Skip to content

Commit 6960cb8

Browse files
committed
Refactor ShutdownResult type and construction
1 parent 4e3d2d1 commit 6960cb8

File tree

2 files changed

+72
-52
lines changed

2 files changed

+72
-52
lines changed

lightning/src/ln/channel.rs

+51-27
Original file line numberDiff line numberDiff line change
@@ -542,18 +542,16 @@ pub(super) struct ReestablishResponses {
542542
pub shutdown_msg: Option<msgs::Shutdown>,
543543
}
544544

545-
/// The return type of `force_shutdown`
546-
///
547-
/// Contains a tuple with the following:
548-
/// - An optional (counterparty_node_id, funding_txo, [`ChannelMonitorUpdate`]) tuple
549-
/// - A list of HTLCs to fail back in the form of the (source, payment hash, and this channel's
550-
/// counterparty_node_id and channel_id).
551-
/// - An optional transaction id identifying a corresponding batch funding transaction.
552-
pub(crate) type ShutdownResult = (
553-
Option<(PublicKey, OutPoint, ChannelMonitorUpdate)>,
554-
Vec<(HTLCSource, PaymentHash, PublicKey, ChannelId)>,
555-
Option<Txid>
556-
);
545+
/// The result of a shutdown that should be handled.
546+
pub(crate) struct ShutdownResult {
547+
/// A channel monitor update to apply.
548+
pub(crate) monitor_update: Option<(PublicKey, OutPoint, ChannelMonitorUpdate)>,
549+
/// A list of dropped outbound HTLCs that can safely be failed backwards immediately.
550+
pub(crate) dropped_outbound_htlcs: Vec<(HTLCSource, PaymentHash, PublicKey, ChannelId)>,
551+
/// An unbroadcasted batch funding transaction id. The closure of this channel should be
552+
/// propagated to the remainder of the batch.
553+
pub(crate) unbroadcasted_batch_funding_txid: Option<Txid>,
554+
}
557555

558556
/// If the majority of the channels funds are to the fundee and the initiator holds only just
559557
/// enough funds to cover their reserve value, channels are at risk of getting "stuck". Because the
@@ -2063,7 +2061,11 @@ impl<SP: Deref> ChannelContext<SP> where SP::Target: SignerProvider {
20632061

20642062
self.channel_state = ChannelState::ShutdownComplete as u32;
20652063
self.update_time_counter += 1;
2066-
(monitor_update, dropped_outbound_htlcs, unbroadcasted_batch_funding_txid)
2064+
ShutdownResult {
2065+
monitor_update,
2066+
dropped_outbound_htlcs,
2067+
unbroadcasted_batch_funding_txid,
2068+
}
20672069
}
20682070
}
20692071

@@ -4218,18 +4220,18 @@ impl<SP: Deref> Channel<SP> where
42184220

42194221
pub fn maybe_propose_closing_signed<F: Deref, L: Deref>(
42204222
&mut self, fee_estimator: &LowerBoundedFeeEstimator<F>, logger: &L)
4221-
-> Result<(Option<msgs::ClosingSigned>, Option<Transaction>), ChannelError>
4223+
-> Result<(Option<msgs::ClosingSigned>, Option<Transaction>, Option<ShutdownResult>), ChannelError>
42224224
where F::Target: FeeEstimator, L::Target: Logger
42234225
{
42244226
if self.context.last_sent_closing_fee.is_some() || !self.closing_negotiation_ready() {
4225-
return Ok((None, None));
4227+
return Ok((None, None, None));
42264228
}
42274229

42284230
if !self.context.is_outbound() {
42294231
if let Some(msg) = &self.context.pending_counterparty_closing_signed.take() {
42304232
return self.closing_signed(fee_estimator, &msg);
42314233
}
4232-
return Ok((None, None));
4234+
return Ok((None, None, None));
42334235
}
42344236

42354237
let (our_min_fee, our_max_fee) = self.calculate_closing_fee_limits(fee_estimator);
@@ -4254,7 +4256,7 @@ impl<SP: Deref> Channel<SP> where
42544256
min_fee_satoshis: our_min_fee,
42554257
max_fee_satoshis: our_max_fee,
42564258
}),
4257-
}), None))
4259+
}), None, None))
42584260
}
42594261
}
42604262
}
@@ -4403,7 +4405,7 @@ impl<SP: Deref> Channel<SP> where
44034405

44044406
pub fn closing_signed<F: Deref>(
44054407
&mut self, fee_estimator: &LowerBoundedFeeEstimator<F>, msg: &msgs::ClosingSigned)
4406-
-> Result<(Option<msgs::ClosingSigned>, Option<Transaction>), ChannelError>
4408+
-> Result<(Option<msgs::ClosingSigned>, Option<Transaction>, Option<ShutdownResult>), ChannelError>
44074409
where F::Target: FeeEstimator
44084410
{
44094411
if self.context.channel_state & BOTH_SIDES_SHUTDOWN_MASK != BOTH_SIDES_SHUTDOWN_MASK {
@@ -4425,7 +4427,7 @@ impl<SP: Deref> Channel<SP> where
44254427

44264428
if self.context.channel_state & ChannelState::MonitorUpdateInProgress as u32 != 0 {
44274429
self.context.pending_counterparty_closing_signed = Some(msg.clone());
4428-
return Ok((None, None));
4430+
return Ok((None, None, None));
44294431
}
44304432

44314433
let funding_redeemscript = self.context.get_funding_redeemscript();
@@ -4455,10 +4457,15 @@ impl<SP: Deref> Channel<SP> where
44554457
assert!(self.context.shutdown_scriptpubkey.is_some());
44564458
if let Some((last_fee, sig)) = self.context.last_sent_closing_fee {
44574459
if last_fee == msg.fee_satoshis {
4460+
let shutdown_result = ShutdownResult {
4461+
monitor_update: None,
4462+
dropped_outbound_htlcs: Vec::new(),
4463+
unbroadcasted_batch_funding_txid: self.context.unbroadcasted_batch_funding_txid(),
4464+
};
44584465
let tx = self.build_signed_closing_transaction(&mut closing_tx, &msg.signature, &sig);
44594466
self.context.channel_state = ChannelState::ShutdownComplete as u32;
44604467
self.context.update_time_counter += 1;
4461-
return Ok((None, Some(tx)));
4468+
return Ok((None, Some(tx), Some(shutdown_result)));
44624469
}
44634470
}
44644471

@@ -4477,13 +4484,22 @@ impl<SP: Deref> Channel<SP> where
44774484
let sig = ecdsa
44784485
.sign_closing_transaction(&closing_tx, &self.context.secp_ctx)
44794486
.map_err(|_| ChannelError::Close("External signer refused to sign closing transaction".to_owned()))?;
4480-
4481-
let signed_tx = if $new_fee == msg.fee_satoshis {
4487+
let signed_tx;
4488+
let shutdown_result;
4489+
if $new_fee == msg.fee_satoshis {
4490+
shutdown_result = Some(ShutdownResult {
4491+
monitor_update: None,
4492+
dropped_outbound_htlcs: Vec::new(),
4493+
unbroadcasted_batch_funding_txid: self.context.unbroadcasted_batch_funding_txid(),
4494+
});
44824495
self.context.channel_state = ChannelState::ShutdownComplete as u32;
44834496
self.context.update_time_counter += 1;
44844497
let tx = self.build_signed_closing_transaction(&closing_tx, &msg.signature, &sig);
4485-
Some(tx)
4486-
} else { None };
4498+
signed_tx = Some(tx);
4499+
} else {
4500+
shutdown_result = None;
4501+
signed_tx = None;
4502+
};
44874503

44884504
self.context.last_sent_closing_fee = Some((used_fee, sig.clone()));
44894505
Ok((Some(msgs::ClosingSigned {
@@ -4494,7 +4510,7 @@ impl<SP: Deref> Channel<SP> where
44944510
min_fee_satoshis: our_min_fee,
44954511
max_fee_satoshis: our_max_fee,
44964512
}),
4497-
}), signed_tx))
4513+
}), signed_tx, shutdown_result))
44984514
}
44994515
}
45004516
}
@@ -5572,7 +5588,7 @@ impl<SP: Deref> Channel<SP> where
55725588
/// [`ChannelMonitorUpdate`] will be returned).
55735589
pub fn get_shutdown(&mut self, signer_provider: &SP, their_features: &InitFeatures,
55745590
target_feerate_sats_per_kw: Option<u32>, override_shutdown_script: Option<ShutdownScript>)
5575-
-> Result<(msgs::Shutdown, Option<ChannelMonitorUpdate>, Vec<(HTLCSource, PaymentHash)>), APIError>
5591+
-> Result<(msgs::Shutdown, Option<ChannelMonitorUpdate>, Vec<(HTLCSource, PaymentHash)>, Option<ShutdownResult>), APIError>
55765592
{
55775593
for htlc in self.context.pending_outbound_htlcs.iter() {
55785594
if let OutboundHTLCState::LocalAnnounced(_) = htlc.state {
@@ -5626,11 +5642,19 @@ impl<SP: Deref> Channel<SP> where
56265642
};
56275643

56285644
// From here on out, we may not fail!
5645+
let shutdown_result;
5646+
let unbroadcasted_batch_funding_txid = self.context.unbroadcasted_batch_funding_txid();
56295647
self.context.target_closing_feerate_sats_per_kw = target_feerate_sats_per_kw;
56305648
if self.context.channel_state & !STATE_FLAGS < ChannelState::FundingSent as u32 {
56315649
self.context.channel_state = ChannelState::ShutdownComplete as u32;
5650+
shutdown_result = Some(ShutdownResult {
5651+
monitor_update: None,
5652+
dropped_outbound_htlcs: Vec::new(),
5653+
unbroadcasted_batch_funding_txid,
5654+
});
56325655
} else {
56335656
self.context.channel_state |= ChannelState::LocalShutdownSent as u32;
5657+
shutdown_result = None;
56345658
}
56355659
self.context.update_time_counter += 1;
56365660

@@ -5667,7 +5691,7 @@ impl<SP: Deref> Channel<SP> where
56675691
debug_assert!(!self.is_shutdown() || monitor_update.is_none(),
56685692
"we can't both complete shutdown and return a monitor update");
56695693

5670-
Ok((shutdown, monitor_update, dropped_outbound_htlcs))
5694+
Ok((shutdown, monitor_update, dropped_outbound_htlcs, shutdown_result))
56715695
}
56725696

56735697
pub fn inflight_htlc_sources(&self) -> impl Iterator<Item=(&HTLCSource, &PaymentHash)> {

lightning/src/ln/channelmanager.rs

+21-25
Original file line numberDiff line numberDiff line change
@@ -2556,7 +2556,7 @@ where
25562556
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
25572557

25582558
let mut failed_htlcs: Vec<(HTLCSource, PaymentHash)>;
2559-
let mut shutdown_result = None;
2559+
let shutdown_result;
25602560
loop {
25612561
let per_peer_state = self.per_peer_state.read().unwrap();
25622562

@@ -2571,10 +2571,10 @@ where
25712571
if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
25722572
let funding_txo_opt = chan.context.get_funding_txo();
25732573
let their_features = &peer_state.latest_features;
2574-
let unbroadcasted_batch_funding_txid = chan.context.unbroadcasted_batch_funding_txid();
2575-
let (shutdown_msg, mut monitor_update_opt, htlcs) =
2574+
let (shutdown_msg, mut monitor_update_opt, htlcs, local_shutdown_result) =
25762575
chan.get_shutdown(&self.signer_provider, their_features, target_feerate_sats_per_1000_weight, override_shutdown_script)?;
25772576
failed_htlcs = htlcs;
2577+
shutdown_result = local_shutdown_result;
25782578

25792579
// We can send the `shutdown` message before updating the `ChannelMonitor`
25802580
// here as we don't need the monitor update to complete until we send a
@@ -2602,7 +2602,6 @@ where
26022602
});
26032603
}
26042604
self.issue_channel_close_events(&chan.context, ClosureReason::HolderForceClosed);
2605-
shutdown_result = Some((None, Vec::new(), unbroadcasted_batch_funding_txid));
26062605
}
26072606
}
26082607
break;
@@ -2692,30 +2691,29 @@ where
26922691
self.close_channel_internal(channel_id, counterparty_node_id, target_feerate_sats_per_1000_weight, shutdown_script)
26932692
}
26942693

2695-
fn finish_close_channel(&self, shutdown_res: ShutdownResult) {
2694+
fn finish_close_channel(&self, mut shutdown_res: ShutdownResult) {
26962695
debug_assert_ne!(self.per_peer_state.held_by_thread(), LockHeldState::HeldByThread);
26972696
#[cfg(debug_assertions)]
26982697
for (_, peer) in self.per_peer_state.read().unwrap().iter() {
26992698
debug_assert_ne!(peer.held_by_thread(), LockHeldState::HeldByThread);
27002699
}
27012700

2702-
let (monitor_update_option, mut failed_htlcs, unbroadcasted_batch_funding_txid) = shutdown_res;
2703-
log_debug!(self.logger, "Finishing closure of channel with {} HTLCs to fail", failed_htlcs.len());
2704-
for htlc_source in failed_htlcs.drain(..) {
2701+
log_debug!(self.logger, "Finishing closure of channel with {} HTLCs to fail", shutdown_res.dropped_outbound_htlcs.len());
2702+
for htlc_source in shutdown_res.dropped_outbound_htlcs.drain(..) {
27052703
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
27062704
let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
27072705
let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
27082706
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
27092707
}
2710-
if let Some((_, funding_txo, monitor_update)) = monitor_update_option {
2708+
if let Some((_, funding_txo, monitor_update)) = shutdown_res.monitor_update {
27112709
// There isn't anything we can do if we get an update failure - we're already
27122710
// force-closing. The monitor update on the required in-memory copy should broadcast
27132711
// the latest local state, which is the best we can do anyway. Thus, it is safe to
27142712
// ignore the result here.
27152713
let _ = self.chain_monitor.update_channel(funding_txo, &monitor_update);
27162714
}
27172715
let mut shutdown_results = Vec::new();
2718-
if let Some(txid) = unbroadcasted_batch_funding_txid {
2716+
if let Some(txid) = shutdown_res.unbroadcasted_batch_funding_txid {
27192717
let mut funding_batch_states = self.funding_batch_states.lock().unwrap();
27202718
let affected_channels = funding_batch_states.remove(&txid).into_iter().flatten();
27212719
let per_peer_state = self.per_peer_state.read().unwrap();
@@ -6240,22 +6238,19 @@ where
62406238
}
62416239

62426240
fn internal_closing_signed(&self, counterparty_node_id: &PublicKey, msg: &msgs::ClosingSigned) -> Result<(), MsgHandleErrInternal> {
6243-
let mut shutdown_result = None;
6244-
let unbroadcasted_batch_funding_txid;
62456241
let per_peer_state = self.per_peer_state.read().unwrap();
62466242
let peer_state_mutex = per_peer_state.get(counterparty_node_id)
62476243
.ok_or_else(|| {
62486244
debug_assert!(false);
62496245
MsgHandleErrInternal::send_err_msg_no_close(format!("Can't find a peer matching the passed counterparty node_id {}", counterparty_node_id), msg.channel_id)
62506246
})?;
6251-
let (tx, chan_option) = {
6247+
let (tx, chan_option, shutdown_result) = {
62526248
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
62536249
let peer_state = &mut *peer_state_lock;
62546250
match peer_state.channel_by_id.entry(msg.channel_id.clone()) {
62556251
hash_map::Entry::Occupied(mut chan_phase_entry) => {
62566252
if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
6257-
unbroadcasted_batch_funding_txid = chan.context.unbroadcasted_batch_funding_txid();
6258-
let (closing_signed, tx) = try_chan_phase_entry!(self, chan.closing_signed(&self.fee_estimator, &msg), chan_phase_entry);
6253+
let (closing_signed, tx, shutdown_result) = try_chan_phase_entry!(self, chan.closing_signed(&self.fee_estimator, &msg), chan_phase_entry);
62596254
if let Some(msg) = closing_signed {
62606255
peer_state.pending_msg_events.push(events::MessageSendEvent::SendClosingSigned {
62616256
node_id: counterparty_node_id.clone(),
@@ -6268,8 +6263,8 @@ where
62686263
// also implies there are no pending HTLCs left on the channel, so we can
62696264
// fully delete it from tracking (the channel monitor is still around to
62706265
// watch for old state broadcasts)!
6271-
(tx, Some(remove_channel_phase!(self, chan_phase_entry)))
6272-
} else { (tx, None) }
6266+
(tx, Some(remove_channel_phase!(self, chan_phase_entry)), shutdown_result)
6267+
} else { (tx, None, shutdown_result) }
62736268
} else {
62746269
return try_chan_phase_entry!(self, Err(ChannelError::Close(
62756270
"Got a closing_signed message for an unfunded channel!".into())), chan_phase_entry);
@@ -6291,7 +6286,6 @@ where
62916286
});
62926287
}
62936288
self.issue_channel_close_events(&chan.context, ClosureReason::CooperativeClosure);
6294-
shutdown_result = Some((None, Vec::new(), unbroadcasted_batch_funding_txid));
62956289
}
62966290
mem::drop(per_peer_state);
62976291
if let Some(shutdown_result) = shutdown_result {
@@ -6988,13 +6982,16 @@ where
69886982
ChannelPhase::Funded(chan) => {
69896983
let unbroadcasted_batch_funding_txid = chan.context.unbroadcasted_batch_funding_txid();
69906984
match chan.maybe_propose_closing_signed(&self.fee_estimator, &self.logger) {
6991-
Ok((msg_opt, tx_opt)) => {
6985+
Ok((msg_opt, tx_opt, shutdown_result_opt)) => {
69926986
if let Some(msg) = msg_opt {
69936987
has_update = true;
69946988
pending_msg_events.push(events::MessageSendEvent::SendClosingSigned {
69956989
node_id: chan.context.get_counterparty_node_id(), msg,
69966990
});
69976991
}
6992+
if let Some(shutdown_result) = shutdown_result_opt {
6993+
shutdown_results.push(shutdown_result);
6994+
}
69986995
if let Some(tx) = tx_opt {
69996996
// We're done with this channel. We got a closing_signed and sent back
70006997
// a closing_signed with a closing transaction to broadcast.
@@ -7009,7 +7006,6 @@ where
70097006
log_info!(self.logger, "Broadcasting {}", log_tx!(tx));
70107007
self.tx_broadcaster.broadcast_transactions(&[&tx]);
70117008
update_maps_on_chan_removal!(self, &chan.context);
7012-
shutdown_results.push((None, Vec::new(), unbroadcasted_batch_funding_txid));
70137009
false
70147010
} else { true }
70157011
},
@@ -7050,7 +7046,7 @@ where
70507046
// Channel::force_shutdown tries to make us do) as we may still be in initialization,
70517047
// so we track the update internally and handle it when the user next calls
70527048
// timer_tick_occurred, guaranteeing we're running normally.
7053-
if let Some((counterparty_node_id, funding_txo, update)) = failure.0.take() {
7049+
if let Some((counterparty_node_id, funding_txo, update)) = failure.monitor_update.take() {
70547050
assert_eq!(update.updates.len(), 1);
70557051
if let ChannelMonitorUpdateStep::ChannelForceClosed { should_broadcast } = update.updates[0] {
70567052
assert!(should_broadcast);
@@ -9267,16 +9263,16 @@ where
92679263
log_error!(args.logger, " The ChannelMonitor for channel {} is at counterparty commitment transaction number {} but the ChannelManager is at counterparty commitment transaction number {}.",
92689264
&channel.context.channel_id(), monitor.get_cur_counterparty_commitment_number(), channel.get_cur_counterparty_commitment_transaction_number());
92699265
}
9270-
let (monitor_update, mut new_failed_htlcs, batch_funding_txid) = channel.context.force_shutdown(true);
9271-
if batch_funding_txid.is_some() {
9266+
let mut shutdown_result = channel.context.force_shutdown(true);
9267+
if shutdown_result.unbroadcasted_batch_funding_txid.is_some() {
92729268
return Err(DecodeError::InvalidValue);
92739269
}
9274-
if let Some((counterparty_node_id, funding_txo, update)) = monitor_update {
9270+
if let Some((counterparty_node_id, funding_txo, update)) = shutdown_result.monitor_update {
92759271
close_background_events.push(BackgroundEvent::MonitorUpdateRegeneratedOnStartup {
92769272
counterparty_node_id, funding_txo, update
92779273
});
92789274
}
9279-
failed_htlcs.append(&mut new_failed_htlcs);
9275+
failed_htlcs.append(&mut shutdown_result.dropped_outbound_htlcs);
92809276
channel_closures.push_back((events::Event::ChannelClosed {
92819277
channel_id: channel.context.channel_id(),
92829278
user_channel_id: channel.context.get_user_id(),

0 commit comments

Comments
 (0)