Skip to content

Commit 07b5355

Browse files
committed
Set counterparty_node_id on ChannelMonitors as they're updated
Historically, `ChannelMonitor`s had idea who their counterparty was. This was fine, until `ChannelManager` started indexing by peer, at which point it needed to know the counterparty when it saw a `ChannelMonitorUpdate` complete. To address this, a "temporary" map from channel ID to peer was added, but no upgrade path was created for existing `ChannelMonitor`s to not rely on this map. This commit adds such an upgrade path, setting the `counterparty_node_id` on all `ChannelMonitor`s as they're updated, allowing us to eventually break backwards compatibility and remove `ChannelManager::outpoint_to_peer`.
1 parent 9488a1c commit 07b5355

File tree

3 files changed

+36
-3
lines changed

3 files changed

+36
-3
lines changed

lightning/src/chain/channelmonitor.rs

Lines changed: 26 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,15 @@ use crate::sync::{Mutex, LockTestExt};
7171
#[must_use]
7272
pub struct ChannelMonitorUpdate {
7373
pub(crate) updates: Vec<ChannelMonitorUpdateStep>,
74+
/// Historically, [`ChannelMonitor`]s didn't know their counterparty node id. However,
75+
/// `ChannelManager` really wants to know it so that it can easily look up the corresponding
76+
/// channel. For now, this results in a temporary map in `ChannelManager` to look up channels
77+
/// by only the funding outpoint.
78+
///
79+
/// To eventually remove that, we repeat the counterparty node id here so that we can upgrade
80+
/// `ChannelMonitor`s to become aware of the counterparty node id if they were generated prior
81+
/// to when it was stored directly in them.
82+
pub(crate) counterparty_node_id: Option<PublicKey>,
7483
/// The sequence number of this update. Updates *must* be replayed in-order according to this
7584
/// sequence number (and updates may panic if they are not). The update_id values are strictly
7685
/// increasing and increase by one for each new update, with two exceptions specified below.
@@ -107,7 +116,9 @@ impl Writeable for ChannelMonitorUpdate {
107116
for update_step in self.updates.iter() {
108117
update_step.write(w)?;
109118
}
110-
write_tlv_fields!(w, {});
119+
write_tlv_fields!(w, {
120+
(1, self.counterparty_node_id, option),
121+
});
111122
Ok(())
112123
}
113124
}
@@ -122,8 +133,11 @@ impl Readable for ChannelMonitorUpdate {
122133
updates.push(upd);
123134
}
124135
}
125-
read_tlv_fields!(r, {});
126-
Ok(Self { update_id, updates })
136+
let mut counterparty_node_id = None;
137+
read_tlv_fields!(r, {
138+
(1, counterparty_node_id, option),
139+
});
140+
Ok(Self { update_id, counterparty_node_id, updates })
127141
}
128142
}
129143

@@ -2697,6 +2711,15 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitorImpl<Signer> {
26972711
log_info!(logger, "Applying update to monitor {}, bringing update_id from {} to {} with {} change(s).",
26982712
log_funding_info!(self), self.latest_update_id, updates.update_id, updates.updates.len());
26992713
}
2714+
2715+
if updates.counterparty_node_id.is_some() {
2716+
if self.counterparty_node_id.is_none() {
2717+
self.counterparty_node_id = updates.counterparty_node_id;
2718+
} else {
2719+
debug_assert_eq!(self.counterparty_node_id, updates.counterparty_node_id);
2720+
}
2721+
}
2722+
27002723
// ChannelMonitor updates may be applied after force close if we receive a preimage for a
27012724
// broadcasted commitment transaction HTLC output that we'd like to claim on-chain. If this
27022725
// is the case, we no longer have guaranteed access to the monitor's update ID, so we use a

lightning/src/ln/channel.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2389,6 +2389,7 @@ impl<SP: Deref> ChannelContext<SP> where SP::Target: SignerProvider {
23892389
self.latest_monitor_update_id = CLOSED_CHANNEL_UPDATE_ID;
23902390
Some((self.get_counterparty_node_id(), funding_txo, ChannelMonitorUpdate {
23912391
update_id: self.latest_monitor_update_id,
2392+
counterparty_node_id: Some(self.counterparty_node_id),
23922393
updates: vec![ChannelMonitorUpdateStep::ChannelForceClosed { should_broadcast }],
23932394
}))
23942395
} else { None }
@@ -2698,6 +2699,7 @@ impl<SP: Deref> Channel<SP> where
26982699
self.context.latest_monitor_update_id += 1;
26992700
let monitor_update = ChannelMonitorUpdate {
27002701
update_id: self.context.latest_monitor_update_id,
2702+
counterparty_node_id: Some(self.context.counterparty_node_id),
27012703
updates: vec![ChannelMonitorUpdateStep::PaymentPreimage {
27022704
payment_preimage: payment_preimage_arg.clone(),
27032705
}],
@@ -3408,6 +3410,7 @@ impl<SP: Deref> Channel<SP> where
34083410
self.context.latest_monitor_update_id += 1;
34093411
let mut monitor_update = ChannelMonitorUpdate {
34103412
update_id: self.context.latest_monitor_update_id,
3413+
counterparty_node_id: Some(self.context.counterparty_node_id),
34113414
updates: vec![ChannelMonitorUpdateStep::LatestHolderCommitmentTXInfo {
34123415
commitment_tx: holder_commitment_tx,
34133416
htlc_outputs: htlcs_and_sigs,
@@ -3487,6 +3490,7 @@ impl<SP: Deref> Channel<SP> where
34873490

34883491
let mut monitor_update = ChannelMonitorUpdate {
34893492
update_id: self.context.latest_monitor_update_id + 1, // We don't increment this yet!
3493+
counterparty_node_id: Some(self.context.counterparty_node_id),
34903494
updates: Vec::new(),
34913495
};
34923496

@@ -3653,6 +3657,7 @@ impl<SP: Deref> Channel<SP> where
36533657
self.context.latest_monitor_update_id += 1;
36543658
let mut monitor_update = ChannelMonitorUpdate {
36553659
update_id: self.context.latest_monitor_update_id,
3660+
counterparty_node_id: Some(self.context.counterparty_node_id),
36563661
updates: vec![ChannelMonitorUpdateStep::CommitmentSecret {
36573662
idx: self.context.cur_counterparty_commitment_transaction_number + 1,
36583663
secret: msg.per_commitment_secret,
@@ -4705,6 +4710,7 @@ impl<SP: Deref> Channel<SP> where
47054710
self.context.latest_monitor_update_id += 1;
47064711
let monitor_update = ChannelMonitorUpdate {
47074712
update_id: self.context.latest_monitor_update_id,
4713+
counterparty_node_id: Some(self.context.counterparty_node_id),
47084714
updates: vec![ChannelMonitorUpdateStep::ShutdownScript {
47094715
scriptpubkey: self.get_closing_scriptpubkey(),
47104716
}],
@@ -5828,6 +5834,7 @@ impl<SP: Deref> Channel<SP> where
58285834
self.context.latest_monitor_update_id += 1;
58295835
let monitor_update = ChannelMonitorUpdate {
58305836
update_id: self.context.latest_monitor_update_id,
5837+
counterparty_node_id: Some(self.context.counterparty_node_id),
58315838
updates: vec![ChannelMonitorUpdateStep::LatestCounterpartyCommitmentTXInfo {
58325839
commitment_txid: counterparty_commitment_txid,
58335840
htlc_outputs: htlcs.clone(),
@@ -6026,6 +6033,7 @@ impl<SP: Deref> Channel<SP> where
60266033
self.context.latest_monitor_update_id += 1;
60276034
let monitor_update = ChannelMonitorUpdate {
60286035
update_id: self.context.latest_monitor_update_id,
6036+
counterparty_node_id: Some(self.context.counterparty_node_id),
60296037
updates: vec![ChannelMonitorUpdateStep::ShutdownScript {
60306038
scriptpubkey: self.get_closing_scriptpubkey(),
60316039
}],

lightning/src/ln/channelmanager.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5549,6 +5549,7 @@ where
55495549
}
55505550
let preimage_update = ChannelMonitorUpdate {
55515551
update_id: CLOSED_CHANNEL_UPDATE_ID,
5552+
counterparty_node_id: None,
55525553
updates: vec![ChannelMonitorUpdateStep::PaymentPreimage {
55535554
payment_preimage,
55545555
}],
@@ -10202,6 +10203,7 @@ where
1020210203
&funding_txo.to_channel_id());
1020310204
let monitor_update = ChannelMonitorUpdate {
1020410205
update_id: CLOSED_CHANNEL_UPDATE_ID,
10206+
counterparty_node_id: None,
1020510207
updates: vec![ChannelMonitorUpdateStep::ChannelForceClosed { should_broadcast: true }],
1020610208
};
1020710209
close_background_events.push(BackgroundEvent::ClosedMonitorUpdateRegeneratedOnStartup((*funding_txo, monitor_update)));

0 commit comments

Comments
 (0)