Skip to content
136 changes: 136 additions & 0 deletions lightning-tests/src/upgrade_downgrade_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -701,3 +701,139 @@ fn do_upgrade_mid_htlc_forward(test: MidHtlcForwardCase) {
expect_payment_claimable!(nodes[2], pay_hash, pay_secret, 1_000_000);
claim_payment(&nodes[0], &[&nodes[1], &nodes[2]], pay_preimage);
}

#[test]
fn test_0_0_125_max_update_id_upgrade() {
use lightning::chain::chainmonitor::Persist;
use lightning::util::persist::{
KVStoreSync, MonitorUpdatingPersister, CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE,
MONITOR_UPDATING_PERSISTER_PREPEND_SENTINEL,
};
use lightning::util::ser::ReadableArgs;
use lightning::util::test_utils::TestStore;

// Phase 1: Create old LDK state with u64::MAX update IDs via force-close.
let mon_b_ser;
{
let chanmon_cfgs = lightning_0_0_125_utils::create_chanmon_cfgs(2);
let node_cfgs = lightning_0_0_125_utils::create_node_cfgs(2, &chanmon_cfgs);
let node_chanmgrs =
lightning_0_0_125_utils::create_node_chanmgrs(2, &node_cfgs, &[None, None]);
let nodes = lightning_0_0_125_utils::create_network(2, &node_cfgs, &node_chanmgrs);

let node_a_id = nodes[0].node.get_our_node_id();
let chan_id = lightning_0_0_125_utils::create_announced_chan_between_nodes(&nodes, 0, 1).2;

lightning_0_0_125_utils::route_payment(&nodes[0], &[&nodes[1]], 1_000_000);

let err = "".to_owned();
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id, &node_a_id, err).unwrap();

lightning_0_0_125_utils::check_added_monitors(&nodes[1], 1);
let reason =
ClosureReason_0_0_125::HolderForceClosed { broadcasted_latest_txn: Some(true) };
lightning_0_0_125_utils::check_closed_event(
&nodes[1],
1,
reason,
false,
&[node_a_id],
100000,
);
lightning_0_0_125_utils::check_closed_broadcast(&nodes[1], 1, true);

mon_b_ser = get_monitor_0_0_125!(nodes[1], chan_id).encode();
}

// Phase 2: Pre-seed a TestStore with old monitor data (simulating an existing KV store
// from a pre-0.1 LDK install), then verify MonitorUpdatingPersister handles it correctly.
let chanmon_cfgs = create_chanmon_cfgs(2);

let kv_store = TestStore::new(false);
let max_pending_updates = 5;
let persister = MonitorUpdatingPersister::new(
&kv_store,
&chanmon_cfgs[1].logger,
max_pending_updates,
&chanmon_cfgs[1].keys_manager,
&chanmon_cfgs[1].keys_manager,
&chanmon_cfgs[1].tx_broadcaster,
&chanmon_cfgs[1].fee_estimator,
);

// Deserialize node_b's monitor to get its persistence key, then write the raw bytes
// into the store (without the sentinel prefix, as old KVStoreSync-based persist would).
let (_, mon_b) = <(
bitcoin::BlockHash,
lightning::chain::channelmonitor::ChannelMonitor<
lightning::util::test_channel_signer::TestChannelSigner,
>,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Instead of storing the monitor and loading it and inspecting it, let's use the async monitor updating flow (setting the return value for the write in the first run to InProgress) and then doing a normal load of the channelmanager after that and checking that it can continue operating normally (and that the pending monitor update is still provided to us and, yes, written through a MonitorUpdatingPersister).

)>::read(
&mut &mon_b_ser[..],
(&chanmon_cfgs[1].keys_manager, &chanmon_cfgs[1].keys_manager),
)
.unwrap();
let monitor_key = mon_b.persistence_key().to_string();
assert_eq!(mon_b.get_latest_update_id(), u64::MAX);

KVStoreSync::write(
&kv_store,
CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
&monitor_key,
mon_b_ser.clone(),
)
.unwrap();

// Phase 3: Verify MonitorUpdatingPersister can read the old monitor with u64::MAX update ID.
let mons = persister.read_all_channel_monitors_with_updates().unwrap();
assert_eq!(mons.len(), 1);
assert_eq!(mons[0].1.get_latest_update_id(), u64::MAX);

// Verify no incremental update files exist yet.
let updates = KVStoreSync::list(
&kv_store,
CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE,
&monitor_key,
)
.unwrap();
assert!(updates.is_empty());

// Phase 4: Verify that persisting a u64::MAX monitor through MonitorUpdatingPersister
// writes a full monitor (not an incremental update).
let persist_res = persister.persist_new_channel(mon_b.persistence_key(), &mons[0].1);
assert_eq!(persist_res, lightning::chain::ChannelMonitorUpdateStatus::Completed);

// The full monitor should now be stored with the sentinel prefix.
let stored_bytes = KVStoreSync::read(
&kv_store,
CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
&monitor_key,
)
.unwrap();
assert!(
stored_bytes.starts_with(MONITOR_UPDATING_PERSISTER_PREPEND_SENTINEL),
"Expected sentinel prefix on re-persisted monitor"
);

// Re-read after persist to confirm round-trip works with sentinel prefix.
let mons_after = persister.read_all_channel_monitors_with_updates().unwrap();
assert_eq!(mons_after.len(), 1);
assert_eq!(mons_after[0].1.get_latest_update_id(), u64::MAX);

// Still no incremental updates should exist for a u64::MAX monitor.
let updates_after = KVStoreSync::list(
&kv_store,
CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE,
&monitor_key,
)
.unwrap();
assert!(
updates_after.is_empty(),
"Expected no incremental updates for u64::MAX monitor, found {}",
updates_after.len()
);
}
30 changes: 27 additions & 3 deletions lightning/src/ln/funding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,15 @@ impl FundingTemplate {
return Err(());
}
let FundingTemplate { shared_input, min_feerate, max_feerate } = self;
build_funding_contribution!(value_added, vec![], shared_input, min_feerate, max_feerate, wallet, await)
build_funding_contribution!(
value_added,
vec![],
shared_input,
min_feerate,
max_feerate,
wallet,
await
)
}

/// Creates a [`FundingContribution`] for adding funds to a channel using `wallet` to perform
Expand Down Expand Up @@ -251,7 +259,15 @@ impl FundingTemplate {
return Err(());
}
let FundingTemplate { shared_input, min_feerate, max_feerate } = self;
build_funding_contribution!(Amount::ZERO, outputs, shared_input, min_feerate, max_feerate, wallet, await)
build_funding_contribution!(
Amount::ZERO,
outputs,
shared_input,
min_feerate,
max_feerate,
wallet,
await
)
}

/// Creates a [`FundingContribution`] for removing funds from a channel using `wallet` to
Expand Down Expand Up @@ -282,7 +298,15 @@ impl FundingTemplate {
return Err(());
}
let FundingTemplate { shared_input, min_feerate, max_feerate } = self;
build_funding_contribution!(value_added, outputs, shared_input, min_feerate, max_feerate, wallet, await)
build_funding_contribution!(
value_added,
outputs,
shared_input,
min_feerate,
max_feerate,
wallet,
await
)
}

/// Creates a [`FundingContribution`] for both adding and removing funds from a channel using
Expand Down
Loading