Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 5a65e04

Browse files
committedFeb 19, 2025·
Code review
1 parent 95e010b commit 5a65e04

File tree

2 files changed

+70
-60
lines changed

2 files changed

+70
-60
lines changed
 

‎src/builder.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -973,7 +973,7 @@ fn build_with_store_internal(
973973
let persister = Arc::new(Persister::new(
974974
Arc::clone(&kv_store),
975975
Arc::clone(&logger),
976-
10, // (?)
976+
100,
977977
Arc::clone(&keys_manager),
978978
Arc::clone(&keys_manager),
979979
Arc::clone(&tx_broadcaster),

‎src/io/test_utils.rs

+69-59
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@
66
// accordance with one or both of these licenses.
77

88
use lightning::ln::functional_test_utils::{
9-
check_closed_event, connect_block, create_announced_chan_between_nodes, create_chanmon_cfgs,
10-
create_dummy_block, create_network, create_node_cfgs, create_node_chanmgrs, send_payment,
9+
connect_block, create_announced_chan_between_nodes, create_chanmon_cfgs, create_dummy_block,
10+
create_network, create_node_cfgs, create_node_chanmgrs, send_payment, TestChanMonCfg,
1111
};
1212
use lightning::util::persist::{
1313
KVStore, MonitorName, MonitorUpdatingPersister,
@@ -16,14 +16,23 @@ use lightning::util::persist::{
1616

1717
use lightning::events::ClosureReason;
1818
use lightning::util::test_utils;
19-
use lightning::{check_added_monitors, check_closed_broadcast};
19+
use lightning::{check_added_monitors, check_closed_broadcast, check_closed_event};
2020

2121
use rand::distributions::Alphanumeric;
2222
use rand::{thread_rng, Rng};
2323

2424
use std::panic::RefUnwindSafe;
2525
use std::path::PathBuf;
2626

27+
type TestMonitorUpdatePersister<'a, K> = MonitorUpdatingPersister<
28+
&'a K,
29+
&'a test_utils::TestLogger,
30+
&'a test_utils::TestKeysInterface,
31+
&'a test_utils::TestKeysInterface,
32+
&'a test_utils::TestBroadcaster,
33+
&'a test_utils::TestFeeEstimator,
34+
>;
35+
2736
const EXPECTED_UPDATES_PER_PAYMENT: u64 = 5;
2837

2938
pub(crate) fn random_storage_path() -> PathBuf {
@@ -83,6 +92,35 @@ pub(crate) fn do_read_write_remove_list_persist<K: KVStore + RefUnwindSafe>(kv_s
8392
assert_eq!(listed_keys.len(), 0);
8493
}
8594

95+
pub(crate) fn create_persister<'a, K: KVStore>(
96+
store: &'a K, chanmon_cfg: &'a TestChanMonCfg, max_pending_updates: u64,
97+
) -> TestMonitorUpdatePersister<'a, K> {
98+
let persister: TestMonitorUpdatePersister<'a, K> = MonitorUpdatingPersister::new(
99+
store,
100+
&chanmon_cfg.logger,
101+
max_pending_updates,
102+
&chanmon_cfg.keys_manager,
103+
&chanmon_cfg.keys_manager,
104+
&chanmon_cfg.tx_broadcaster,
105+
&chanmon_cfg.fee_estimator,
106+
);
107+
return persister;
108+
}
109+
110+
pub(crate) fn create_chain_monitor<'a, K: KVStore>(
111+
chanmon_cfg: &'a TestChanMonCfg, persister: &'a TestMonitorUpdatePersister<'a, K>,
112+
) -> test_utils::TestChainMonitor<'a> {
113+
let chain_mon = test_utils::TestChainMonitor::new(
114+
Some(&chanmon_cfg.chain_source),
115+
&chanmon_cfg.tx_broadcaster,
116+
&chanmon_cfg.logger,
117+
&chanmon_cfg.fee_estimator,
118+
persister,
119+
&chanmon_cfg.keys_manager,
120+
);
121+
return chain_mon;
122+
}
123+
86124
// Integration-test the given KVStore implementation. Test relaying a few payments and check that
87125
// the persisted data is updated the appropriate number of times.
88126
pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
@@ -93,46 +131,13 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
93131

94132
let chanmon_cfgs = create_chanmon_cfgs(2);
95133

96-
let persister_0 = MonitorUpdatingPersister::new(
97-
store_0,
98-
&chanmon_cfgs[0].logger,
99-
persister_0_max_pending_updates,
100-
&chanmon_cfgs[0].keys_manager,
101-
&chanmon_cfgs[0].keys_manager,
102-
&chanmon_cfgs[0].tx_broadcaster,
103-
&chanmon_cfgs[0].fee_estimator,
104-
);
134+
let persister_0 = create_persister(store_0, &chanmon_cfgs[0], persister_0_max_pending_updates);
135+
let persister_1 = create_persister(store_1, &chanmon_cfgs[1], persister_1_max_pending_updates);
105136

106-
let persister_1 = MonitorUpdatingPersister::new(
107-
store_1,
108-
&chanmon_cfgs[1].logger,
109-
persister_1_max_pending_updates,
110-
&chanmon_cfgs[1].keys_manager,
111-
&chanmon_cfgs[1].keys_manager,
112-
&chanmon_cfgs[1].tx_broadcaster,
113-
&chanmon_cfgs[1].fee_estimator,
114-
);
137+
let chain_mon_0 = create_chain_monitor(&chanmon_cfgs[0], &persister_0);
138+
let chain_mon_1 = create_chain_monitor(&chanmon_cfgs[1], &persister_1);
115139

116140
let mut node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
117-
118-
let chain_mon_0 = test_utils::TestChainMonitor::new(
119-
Some(&chanmon_cfgs[0].chain_source),
120-
&chanmon_cfgs[0].tx_broadcaster,
121-
&chanmon_cfgs[0].logger,
122-
&chanmon_cfgs[0].fee_estimator,
123-
&persister_0,
124-
&chanmon_cfgs[0].keys_manager,
125-
);
126-
127-
let chain_mon_1 = test_utils::TestChainMonitor::new(
128-
Some(&chanmon_cfgs[1].chain_source),
129-
&chanmon_cfgs[1].tx_broadcaster,
130-
&chanmon_cfgs[1].logger,
131-
&chanmon_cfgs[1].fee_estimator,
132-
&persister_1,
133-
&chanmon_cfgs[1].keys_manager,
134-
);
135-
136141
node_cfgs[0].chain_monitor = chain_mon_0;
137142
node_cfgs[1].chain_monitor = chain_mon_1;
138143
let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
@@ -155,14 +160,15 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
155160
assert_eq!(mon.get_latest_update_id(), $expected_update_id);
156161

157162
let monitor_name = MonitorName::from(mon.get_funding_txo().0);
163+
let store_0_updates = store_0
164+
.list(
165+
CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE,
166+
monitor_name.as_str(),
167+
)
168+
.unwrap()
169+
.len() as u64;
158170
assert_eq!(
159-
store_0
160-
.list(
161-
CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE,
162-
monitor_name.as_str()
163-
)
164-
.unwrap()
165-
.len() as u64,
171+
store_0_updates,
166172
mon.get_latest_update_id() % persister_0_max_pending_updates,
167173
"Wrong number of updates stored in persister 0",
168174
);
@@ -173,14 +179,15 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
173179
assert_eq!(mon.get_latest_update_id(), $expected_update_id);
174180

175181
let monitor_name = MonitorName::from(mon.get_funding_txo().0);
182+
let store_1_updates = store_1
183+
.list(
184+
CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE,
185+
monitor_name.as_str(),
186+
)
187+
.unwrap()
188+
.len() as u64;
176189
assert_eq!(
177-
store_1
178-
.list(
179-
CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE,
180-
monitor_name.as_str()
181-
)
182-
.unwrap()
183-
.len() as u64,
190+
store_1_updates,
184191
mon.get_latest_update_id() % persister_1_max_pending_updates,
185192
"Wrong number of updates stored in persister 1",
186193
);
@@ -193,9 +200,11 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
193200
check_persisted_data!(0);
194201

195202
// Send a few payments and make sure the monitors are updated to the latest.
196-
send_payment(&nodes[0], &vec![&nodes[1]][..], 8_000_000);
203+
let expected_route = &[&nodes[1]][..];
204+
send_payment(&nodes[0], expected_route, 8_000_000);
197205
check_persisted_data!(EXPECTED_UPDATES_PER_PAYMENT);
198-
send_payment(&nodes[1], &vec![&nodes[0]][..], 4_000_000);
206+
let expected_route = &[&nodes[0]][..];
207+
send_payment(&nodes[1], expected_route, 4_000_000);
199208
check_persisted_data!(2 * EXPECTED_UPDATES_PER_PAYMENT);
200209

201210
// Send a few more payments to try all the alignments of max pending updates with
@@ -210,7 +219,8 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
210219
sender = 0;
211220
receiver = 1;
212221
}
213-
send_payment(&nodes[sender], &vec![&nodes[receiver]][..], 21_000);
222+
let expected_route = &[&nodes[receiver]][..];
223+
send_payment(&nodes[sender], expected_route, 21_000);
214224
check_persisted_data!(i * EXPECTED_UPDATES_PER_PAYMENT);
215225
}
216226

@@ -223,7 +233,7 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
223233
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &node_id_1, err_msg).unwrap();
224234

225235
let reason = ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) };
226-
check_closed_event(&nodes[0], 1, reason, false, &[node_id_1], 100000);
236+
check_closed_event!(nodes[0], 1, reason, false, [node_id_1], 100000);
227237
check_closed_broadcast!(nodes[0], true);
228238
check_added_monitors!(nodes[0], 1);
229239

@@ -236,7 +246,7 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
236246
check_closed_broadcast!(nodes[1], true);
237247
let reason = ClosureReason::CommitmentTxConfirmed;
238248
let node_id_0 = nodes[0].node.get_our_node_id();
239-
check_closed_event(&nodes[1], 1, reason, false, &[node_id_0], 100000);
249+
check_closed_event!(nodes[1], 1, reason, false, [node_id_0], 100000);
240250
check_added_monitors!(nodes[1], 1);
241251

242252
// Make sure everything is persisted as expected after close.

0 commit comments

Comments
 (0)
Please sign in to comment.