Skip to content

Commit 4d316b2

Browse files
Avoid redundant closed channel preimage updates
Previously, if when we restart with the ChannelManager behind a ChannelMonitor, and that monitor has already claimed an HTLC but the manager thinks the HTLC is yet-to-be-claimed, we will generate a redundant preimage monitor update. That was fine since the manager being behind the monitor is rare, but in upcoming commits we are moving towards not persisting the ChannelManager's pending_claiming_payments map, and instead rebuilding its contents from the monitors. As a consequence of these changes, our existing safeguard that prevents generating redundant preimage monitor updates in the case that the manager is *not* behind the monitor will no longer work, because we can no longer check the pending_claiming_payments map to short circuit the logic that generates the (possibly redundant) preimage monitor updates. So here we implement more rigorous checks to see whether a preimage update for a closed channel is redundant or not before generating it, which prevents the current manager-behind cases as well as upcoming cases when we start reconstructing the pending_claiming_payments map.
1 parent 2503327 commit 4d316b2

3 files changed

Lines changed: 78 additions & 35 deletions

File tree

lightning/src/ln/channelmanager.rs

Lines changed: 61 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -9506,7 +9506,43 @@ impl<
95069506
}
95079507
}
95089508

9509+
// Below, we always queue up the monitor update completion action because we don't have any
9510+
// idea if it's duplicative. This may result in a duplicate `Event`, but note that `Event`s are
9511+
// generally always allowed to be duplicative (and it's specifically noted in
9512+
// `PaymentForwarded`).
9513+
let (action_opt, raa_blocker_opt) = completion_action(None, false);
9514+
9515+
let needs_post_close_monitor_update =
9516+
raa_blocker_opt.as_ref().map_or(true, |raa_blocker| match raa_blocker {
9517+
RAAMonitorUpdateBlockingAction::ClaimedMPPPayment { pending_claim } => {
9518+
// If this monitor already has the preimage, there's no need to generate a redundant update.
9519+
let claim = pending_claim.0.lock().unwrap();
9520+
claim
9521+
.channels_without_preimage
9522+
.contains(&(prev_hop.counterparty_node_id, chan_id))
9523+
},
9524+
RAAMonitorUpdateBlockingAction::ForwardedPaymentInboundClaim { .. } => true,
9525+
});
9526+
95099527
let peer_state = &mut *peer_state_lock;
9528+
if let Some(raa_blocker) = raa_blocker_opt {
9529+
peer_state
9530+
.actions_blocking_raa_monitor_updates
9531+
.entry(prev_hop.channel_id)
9532+
.or_default()
9533+
.push(raa_blocker);
9534+
}
9535+
9536+
if !needs_post_close_monitor_update {
9537+
// If there's no need for a monitor update, just run the (possibly duplicative) completion
9538+
// action.
9539+
if let Some(action) = action_opt {
9540+
mem::drop(peer_state_lock);
9541+
mem::drop(per_peer_state);
9542+
self.handle_monitor_update_completion_actions(core::iter::once(action));
9543+
return;
9544+
}
9545+
}
95109546

95119547
let update_id = if let Some(latest_update_id) =
95129548
peer_state.closed_channel_monitor_update_ids.get_mut(&chan_id)
@@ -9530,21 +9566,6 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
95309566
channel_id: Some(prev_hop.channel_id),
95319567
};
95329568

9533-
// We don't have any idea if this is a duplicate claim without interrogating the
9534-
// `ChannelMonitor`, so we just always queue up the completion action after the
9535-
// `ChannelMonitorUpdate` we're about to generate. This may result in a duplicate `Event`,
9536-
// but note that `Event`s are generally always allowed to be duplicative (and it's
9537-
// specifically noted in `PaymentForwarded`).
9538-
let (action_opt, raa_blocker_opt) = completion_action(None, false);
9539-
9540-
if let Some(raa_blocker) = raa_blocker_opt {
9541-
peer_state
9542-
.actions_blocking_raa_monitor_updates
9543-
.entry(prev_hop.channel_id)
9544-
.or_default()
9545-
.push(raa_blocker);
9546-
}
9547-
95489569
// Given the fact that we're in a bit of a weird edge case, its worth hashing the preimage
95499570
// to include the `payment_hash` in the log metadata here.
95509571
let payment_hash = payment_preimage.into();
@@ -19820,6 +19841,15 @@ impl<
1982019841
)
1982119842
});
1982219843

19844+
// Build the set of channels where the preimage is durably persisted, for use below
19845+
let mut channels_with_durable_preimage: HashSet<(ChannelId, PaymentHash)> = new_hash_set();
19846+
for (channel_id, monitor) in args.channel_monitors.iter() {
19847+
for (payment_hash, (_, claims)) in monitor.get_stored_preimages() {
19848+
if !claims.is_empty() {
19849+
channels_with_durable_preimage.insert((*channel_id, payment_hash));
19850+
}
19851+
}
19852+
}
1982319853
for (channel_id, counterparty_node_id, payment_hash, payment_preimage, payment_claims) in
1982419854
monitor_preimages
1982519855
{
@@ -19890,20 +19920,27 @@ impl<
1989019920
}
1989119921
}
1989219922

19893-
let mut channels_without_preimage = payment_claim
19894-
.mpp_parts
19895-
.iter()
19896-
.map(|htlc_info| (htlc_info.counterparty_node_id, htlc_info.channel_id))
19897-
.collect::<Vec<_>>();
19923+
let mut channels_with_preimage = Vec::new();
19924+
let mut channels_without_preimage = Vec::new();
19925+
for htlc_info in payment_claim.mpp_parts.iter() {
19926+
if channels_with_durable_preimage
19927+
.contains(&(htlc_info.channel_id, payment_hash))
19928+
{
19929+
channels_with_preimage
19930+
.push((htlc_info.counterparty_node_id, htlc_info.channel_id));
19931+
} else {
19932+
channels_without_preimage
19933+
.push((htlc_info.counterparty_node_id, htlc_info.channel_id));
19934+
}
19935+
}
19936+
1989819937
// If we have multiple MPP parts which were received over the same channel,
1989919938
// we only track it once as once we get a preimage durably in the
1990019939
// `ChannelMonitor` it will be used for all HTLCs with a matching hash.
1990119940
channels_without_preimage.sort_unstable();
1990219941
channels_without_preimage.dedup();
19903-
let pending_claims = PendingMPPClaim {
19904-
channels_without_preimage,
19905-
channels_with_preimage: Vec::new(),
19906-
};
19942+
let pending_claims =
19943+
PendingMPPClaim { channels_without_preimage, channels_with_preimage };
1990719944
let pending_claim_ptr_opt = Some(Arc::new(Mutex::new(pending_claims)));
1990819945

1990919946
// While it may be duplicative to generate a PaymentClaimed here, trying to

lightning/src/ln/monitor_tests.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3397,10 +3397,9 @@ fn test_claim_event_never_handled() {
33973397
check_added_monitors(&nodes[1], 0);
33983398

33993399
expect_payment_claimed!(nodes[1], payment_hash_a, 1_000_000);
3400-
// The reload logic spuriously generates 2 redundant payment preimage-containing
3401-
// `ChannelMonitorUpdate`s, plus we get a monitor update once the PaymentClaimed event is
3402-
// processed.
3403-
check_added_monitors(&nodes[1], 3);
3400+
// One monitor update for the outdated channel force-closure, one for the PaymentClaimed event
3401+
// being handled
3402+
check_added_monitors(&nodes[1], 2);
34043403
}
34053404

34063405
fn do_test_lost_preimage_monitor_events(on_counterparty_tx: bool, p2a_anchor: bool) {

lightning/src/ln/reload_tests.rs

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -853,14 +853,12 @@ fn do_test_partial_claim_before_restart(persist_both_monitors: bool, double_rest
853853
if persist_both_monitors {
854854
if let Event::ChannelClosed { reason: ClosureReason::OutdatedChannelManager, .. } = events[2] { } else { panic!(); }
855855
if let Event::PaymentClaimed { amount_msat: 15_000_000, .. } = events[3] { } else { panic!(); }
856-
// 4 monitors for preimage updates + 1 for InboundPaymentClaimed marking the payment as
857-
// claimed in the closed channel's monitor.
858-
check_added_monitors(&nodes[3], 5);
856+
// One update per channel closure + an update for PaymentClaimed being processed
857+
check_added_monitors(&nodes[3], 3);
859858
} else {
860859
if let Event::PaymentClaimed { amount_msat: 15_000_000, .. } = events[2] { } else { panic!(); }
861-
// Only one channel closed; the durable_preimage_channel is the live one, so no extra
862-
// InboundPaymentClaimed update is generated.
863-
check_added_monitors(&nodes[3], 3);
860+
// One update for channel closure, one for preimage replay to non-persisted monitor
861+
check_added_monitors(&nodes[3], 2);
864862
}
865863

866864
// Now that we've processed background events, the preimage should have been copied into the
@@ -929,10 +927,19 @@ fn do_test_partial_claim_before_restart(persist_both_monitors: bool, double_rest
929927
}
930928

931929
#[test]
932-
fn test_partial_claim_before_restart() {
930+
fn test_partial_claim_before_restart_a() {
933931
do_test_partial_claim_before_restart(false, false);
932+
}
933+
#[test]
934+
fn test_partial_claim_before_restart_b() {
934935
do_test_partial_claim_before_restart(false, true);
936+
}
937+
#[test]
938+
fn test_partial_claim_before_restart_c() {
935939
do_test_partial_claim_before_restart(true, false);
940+
}
941+
#[test]
942+
fn test_partial_claim_before_restart_d() {
936943
do_test_partial_claim_before_restart(true, true);
937944
}
938945

0 commit comments

Comments
 (0)