Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
161 changes: 47 additions & 114 deletions fuzz/src/chanmon_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
//! send-side handling is correct, other peers. We consider it a failure if any action results in a
//! channel being force-closed.

use bitcoin::FeeRate;
use bitcoin::amount::Amount;
use bitcoin::constants::genesis_block;
use bitcoin::locktime::absolute::LockTime;
Expand All @@ -43,6 +44,7 @@ use lightning::chain::{
chainmonitor, channelmonitor, BestBlock, ChannelMonitorUpdateStatus, Confirm, Watch,
};
use lightning::events;
use lightning::events::bump_transaction::sync::{WalletSourceSync, WalletSync};
use lightning::ln::channel::{
FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE, MAX_STD_OUTPUT_DUST_LIMIT_SATOSHIS,
};
Expand All @@ -52,7 +54,7 @@ use lightning::ln::channelmanager::{
RecipientOnionFields,
};
use lightning::ln::functional_test_utils::*;
use lightning::ln::funding::{FundingTxInput, SpliceContribution};
use lightning::ln::funding::SpliceContribution;
use lightning::ln::inbound_payment::ExpandedKey;
use lightning::ln::msgs::{
BaseMessageHandler, ChannelMessageHandler, CommitmentUpdate, Init, MessageSendEvent,
Expand All @@ -70,12 +72,14 @@ use lightning::sign::{
SignerProvider,
};
use lightning::types::payment::{PaymentHash, PaymentPreimage, PaymentSecret};
use lightning::util::async_poll::{MaybeSend, MaybeSync};
use lightning::util::config::UserConfig;
use lightning::util::errors::APIError;
use lightning::util::hash_tables::*;
use lightning::util::logger::Logger;
use lightning::util::ser::{LengthReadable, ReadableArgs, Writeable, Writer};
use lightning::util::test_channel_signer::{EnforcementState, TestChannelSigner};
use lightning::util::test_utils::TestWalletSource;

use lightning_invoice::RawBolt11Invoice;

Expand Down Expand Up @@ -168,63 +172,6 @@ impl Writer for VecWriter {
}
}

pub struct TestWallet {
secret_key: SecretKey,
utxos: Mutex<Vec<lightning::events::bump_transaction::Utxo>>,
secp: Secp256k1<bitcoin::secp256k1::All>,
}

impl TestWallet {
pub fn new(secret_key: SecretKey) -> Self {
Self { secret_key, utxos: Mutex::new(Vec::new()), secp: Secp256k1::new() }
}

fn get_change_script(&self) -> Result<ScriptBuf, ()> {
let public_key = bitcoin::PublicKey::new(self.secret_key.public_key(&self.secp));
Ok(ScriptBuf::new_p2wpkh(&public_key.wpubkey_hash().unwrap()))
}

pub fn add_utxo(&self, outpoint: bitcoin::OutPoint, value: Amount) -> TxOut {
let public_key = bitcoin::PublicKey::new(self.secret_key.public_key(&self.secp));
let utxo = lightning::events::bump_transaction::Utxo::new_v0_p2wpkh(
outpoint,
value,
&public_key.wpubkey_hash().unwrap(),
);
self.utxos.lock().unwrap().push(utxo.clone());
utxo.output
}

pub fn sign_tx(
&self, mut tx: Transaction,
) -> Result<Transaction, bitcoin::sighash::P2wpkhError> {
let utxos = self.utxos.lock().unwrap();
for i in 0..tx.input.len() {
if let Some(utxo) =
utxos.iter().find(|utxo| utxo.outpoint == tx.input[i].previous_output)
{
let sighash = bitcoin::sighash::SighashCache::new(&tx).p2wpkh_signature_hash(
i,
&utxo.output.script_pubkey,
utxo.output.value,
bitcoin::EcdsaSighashType::All,
)?;
let signature = self.secp.sign_ecdsa(
&secp256k1::Message::from_digest(sighash.to_byte_array()),
&self.secret_key,
);
let bitcoin_sig = bitcoin::ecdsa::Signature {
signature,
sighash_type: bitcoin::EcdsaSighashType::All,
};
tx.input[i].witness =
bitcoin::Witness::p2wpkh(&bitcoin_sig, &self.secret_key.public_key(&self.secp));
}
}
Ok(tx)
}
}

/// The LDK API requires that any time we tell it we're done persisting a `ChannelMonitor[Update]`
/// we never pass it in as the "latest" `ChannelMonitor` on startup. However, we can pass
/// out-of-date monitors as long as we never told LDK we finished persisting them, which we do by
Expand Down Expand Up @@ -532,7 +479,7 @@ type ChanMan<'a> = ChannelManager<
Arc<FuzzEstimator>,
&'a FuzzRouter,
&'a FuzzRouter,
Arc<dyn Logger>,
Arc<dyn Logger + MaybeSend + MaybeSync>,
>;

#[inline]
Expand Down Expand Up @@ -701,14 +648,14 @@ fn send_hop_payment(
}

#[inline]
pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
pub fn do_test<Out: Output + MaybeSend + MaybeSync>(data: &[u8], underlying_out: Out, anchors: bool) {
let out = SearchingOutput::new(underlying_out);
let broadcast = Arc::new(TestBroadcaster {});
let router = FuzzRouter {};

macro_rules! make_node {
($node_id: expr, $fee_estimator: expr) => {{
let logger: Arc<dyn Logger> =
let logger: Arc<dyn Logger + MaybeSend + MaybeSync> =
Arc::new(test_logger::TestLogger::new($node_id.to_string(), out.clone()));
let node_secret = SecretKey::from_slice(&[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
Expand Down Expand Up @@ -758,6 +705,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
),
monitor,
keys_manager,
logger,
)
}};
}
Expand All @@ -772,7 +720,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
keys,
fee_estimator| {
let keys_manager = Arc::clone(keys);
let logger: Arc<dyn Logger> =
let logger: Arc<dyn Logger + MaybeSend + MaybeSync> =
Arc::new(test_logger::TestLogger::new(node_id.to_string(), out.clone()));
let chain_monitor = Arc::new(TestChainMonitor::new(
broadcast.clone(),
Expand Down Expand Up @@ -1048,9 +996,9 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
}};
}

let wallet_a = TestWallet::new(SecretKey::from_slice(&[1; 32]).unwrap());
let wallet_b = TestWallet::new(SecretKey::from_slice(&[2; 32]).unwrap());
let wallet_c = TestWallet::new(SecretKey::from_slice(&[3; 32]).unwrap());
let wallet_a = TestWalletSource::new(SecretKey::from_slice(&[1; 32]).unwrap());
let wallet_b = TestWalletSource::new(SecretKey::from_slice(&[2; 32]).unwrap());
let wallet_c = TestWalletSource::new(SecretKey::from_slice(&[3; 32]).unwrap());
let wallets = vec![wallet_a, wallet_b, wallet_c];
let coinbase_tx = bitcoin::Transaction {
version: bitcoin::transaction::Version::TWO,
Expand All @@ -1064,12 +1012,8 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
})
.collect(),
};
let coinbase_txid = coinbase_tx.compute_txid();
wallets.iter().enumerate().for_each(|(i, w)| {
w.add_utxo(
bitcoin::OutPoint { txid: coinbase_txid, vout: i as u32 },
Amount::from_sat(100_000),
);
w.add_utxo(coinbase_tx.clone(), i as u32);
});

let fee_est_a = Arc::new(FuzzEstimator { ret_val: atomic::AtomicU32::new(253) });
Expand All @@ -1081,11 +1025,13 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {

// 3 nodes is enough to hit all the possible cases, notably unknown-source-unknown-dest
// forwarding.
let (node_a, mut monitor_a, keys_manager_a) = make_node!(0, fee_est_a);
let (node_b, mut monitor_b, keys_manager_b) = make_node!(1, fee_est_b);
let (node_c, mut monitor_c, keys_manager_c) = make_node!(2, fee_est_c);
let (node_a, mut monitor_a, keys_manager_a, logger_a) = make_node!(0, fee_est_a);
let (node_b, mut monitor_b, keys_manager_b, logger_b) = make_node!(1, fee_est_b);
let (node_c, mut monitor_c, keys_manager_c, logger_c) = make_node!(2, fee_est_c);

let mut nodes = [node_a, node_b, node_c];
let loggers = [logger_a, logger_b, logger_c];
let fee_estimators = [Arc::clone(&fee_est_a), Arc::clone(&fee_est_b), Arc::clone(&fee_est_c)];

let chan_1_id = make_channel!(nodes[0], nodes[1], keys_manager_b, 0);
let chan_2_id = make_channel!(nodes[1], nodes[2], keys_manager_c, 1);
Expand Down Expand Up @@ -1536,6 +1482,29 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
events::Event::ChannelReady { .. } => {},
events::Event::HTLCHandlingFailed { .. } => {},

events::Event::FundingNeeded {
channel_id,
counterparty_node_id,
funding_template,
..
} => {
let wallet =
WalletSync::new(&wallets[$node], Arc::clone(&loggers[$node]));
let feerate_sat_per_kw =
fee_estimators[$node].ret_val.load(atomic::Ordering::Acquire);
let feerate = FeeRate::from_sat_per_kwu(feerate_sat_per_kw as u64);
let contribution =
funding_template.build_sync(&wallet, feerate).unwrap();
let locktime = None;
nodes[$node]
.funding_contributed(
&channel_id,
&counterparty_node_id,
contribution,
locktime,
)
.unwrap();
},
events::Event::FundingTransactionReadyForSigning {
channel_id,
counterparty_node_id,
Expand Down Expand Up @@ -1859,16 +1828,11 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
},

0xa0 => {
let input = FundingTxInput::new_p2wpkh(coinbase_tx.clone(), 0).unwrap();
let contribution =
SpliceContribution::splice_in(Amount::from_sat(10_000), vec![input], None);
let funding_feerate_sat_per_kw = fee_est_a.ret_val.load(atomic::Ordering::Acquire);
let contribution = SpliceContribution::splice_in(Amount::from_sat(10_000));
if let Err(e) = nodes[0].splice_channel(
&chan_a_id,
&nodes[1].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand All @@ -1878,16 +1842,11 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
}
},
0xa1 => {
let input = FundingTxInput::new_p2wpkh(coinbase_tx.clone(), 1).unwrap();
let contribution =
SpliceContribution::splice_in(Amount::from_sat(10_000), vec![input], None);
let funding_feerate_sat_per_kw = fee_est_b.ret_val.load(atomic::Ordering::Acquire);
let contribution = SpliceContribution::splice_in(Amount::from_sat(10_000));
if let Err(e) = nodes[1].splice_channel(
&chan_a_id,
&nodes[0].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand All @@ -1897,16 +1856,11 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
}
},
0xa2 => {
let input = FundingTxInput::new_p2wpkh(coinbase_tx.clone(), 0).unwrap();
let contribution =
SpliceContribution::splice_in(Amount::from_sat(10_000), vec![input], None);
let funding_feerate_sat_per_kw = fee_est_b.ret_val.load(atomic::Ordering::Acquire);
let contribution = SpliceContribution::splice_in(Amount::from_sat(10_000));
if let Err(e) = nodes[1].splice_channel(
&chan_b_id,
&nodes[2].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand All @@ -1916,16 +1870,11 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
}
},
0xa3 => {
let input = FundingTxInput::new_p2wpkh(coinbase_tx.clone(), 1).unwrap();
let contribution =
SpliceContribution::splice_in(Amount::from_sat(10_000), vec![input], None);
let funding_feerate_sat_per_kw = fee_est_c.ret_val.load(atomic::Ordering::Acquire);
let contribution = SpliceContribution::splice_in(Amount::from_sat(10_000));
if let Err(e) = nodes[2].splice_channel(
&chan_b_id,
&nodes[1].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand All @@ -1950,14 +1899,10 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
value: Amount::from_sat(MAX_STD_OUTPUT_DUST_LIMIT_SATOSHIS),
script_pubkey: coinbase_tx.output[0].script_pubkey.clone(),
}]);
let funding_feerate_sat_per_kw =
fee_est_a.ret_val.load(atomic::Ordering::Acquire);
if let Err(e) = nodes[0].splice_channel(
&chan_a_id,
&nodes[1].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand All @@ -1979,14 +1924,10 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
value: Amount::from_sat(MAX_STD_OUTPUT_DUST_LIMIT_SATOSHIS),
script_pubkey: coinbase_tx.output[1].script_pubkey.clone(),
}]);
let funding_feerate_sat_per_kw =
fee_est_b.ret_val.load(atomic::Ordering::Acquire);
if let Err(e) = nodes[1].splice_channel(
&chan_a_id,
&nodes[0].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand All @@ -2008,14 +1949,10 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
value: Amount::from_sat(MAX_STD_OUTPUT_DUST_LIMIT_SATOSHIS),
script_pubkey: coinbase_tx.output[1].script_pubkey.clone(),
}]);
let funding_feerate_sat_per_kw =
fee_est_b.ret_val.load(atomic::Ordering::Acquire);
if let Err(e) = nodes[1].splice_channel(
&chan_b_id,
&nodes[2].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand All @@ -2037,14 +1974,10 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
value: Amount::from_sat(MAX_STD_OUTPUT_DUST_LIMIT_SATOSHIS),
script_pubkey: coinbase_tx.output[2].script_pubkey.clone(),
}]);
let funding_feerate_sat_per_kw =
fee_est_c.ret_val.load(atomic::Ordering::Acquire);
if let Err(e) = nodes[2].splice_channel(
&chan_b_id,
&nodes[1].get_our_node_id(),
contribution,
funding_feerate_sat_per_kw,
None,
) {
assert!(
matches!(e, APIError::APIMisuseError { ref err } if err.contains("splice pending")),
Expand Down Expand Up @@ -2276,7 +2209,7 @@ impl<O: Output> SearchingOutput<O> {
}
}

pub fn chanmon_consistency_test<Out: Output>(data: &[u8], out: Out) {
pub fn chanmon_consistency_test<Out: Output + MaybeSend + MaybeSync>(data: &[u8], out: Out) {
do_test(data, out.clone(), false);
do_test(data, out, true);
}
Expand Down
3 changes: 2 additions & 1 deletion lightning-tests/src/upgrade_downgrade_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,8 @@ fn do_test_0_1_htlc_forward_after_splice(fail_htlc: bool) {
value: Amount::from_sat(1_000),
script_pubkey: nodes[0].wallet_source.get_change_script().unwrap(),
}]);
let splice_tx = splice_channel(&nodes[0], &nodes[1], ChannelId(chan_id_bytes_a), contribution);
let (splice_tx, _) =
splice_channel(&nodes[0], &nodes[1], ChannelId(chan_id_bytes_a), contribution);
for node in nodes.iter() {
mine_transaction(node, &splice_tx);
connect_blocks(node, ANTI_REORG_DELAY - 1);
Expand Down
Loading
Loading