Skip to content

Trivial Followups to #1825 #1904

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Dec 12, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3032,10 +3032,10 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
if let Some(new_outputs) = new_outputs_option {
watch_outputs.push(new_outputs);
}
// Since there may be multiple HTLCs (all from the same commitment) being
// claimed by the counterparty within the same transaction, and
// `check_spend_counterparty_htlc` already checks for all of them, we can
// safely break from our loop.
// Since there may be multiple HTLCs for this channel (all spending the
// same commitment tx) being claimed by the counterparty within the same
// transaction, and `check_spend_counterparty_htlc` already checks all the
// ones relevant to this channel, we can safely break from our loop.
break;
}
}
Expand Down
46 changes: 14 additions & 32 deletions lightning/src/chain/onchaintx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ enum OnchainEvent {
/// Outpoint under claim process by our own tx, once this one get enough confirmations, we remove it from
/// bump-txn candidate buffer.
Claim {
claim_request: Txid,
package_id: PackageID,
},
/// Claim tx aggregate multiple claimable outpoints. One of the outpoint may be claimed by a counterparty party tx.
/// In this case, we need to drop the outpoint and regenerate a new claim tx. By safety, we keep tracking
Expand Down Expand Up @@ -123,7 +123,7 @@ impl MaybeReadable for OnchainEventEntry {

impl_writeable_tlv_based_enum_upgradable!(OnchainEvent,
(0, Claim) => {
(0, claim_request, required),
(0, package_id, required),
},
(1, ContentiousOutpoint) => {
(0, package, required),
Expand Down Expand Up @@ -480,8 +480,8 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
// We check for outpoint spends within claims individually rather than as a set
// since requests can have outpoints split off.
if !self.onchain_events_awaiting_threshold_conf.iter()
.any(|event_entry| if let OnchainEvent::Claim { claim_request } = event_entry.event {
first_claim_txid_height.0 == claim_request.into_inner()
.any(|event_entry| if let OnchainEvent::Claim { package_id } = event_entry.event {
first_claim_txid_height.0 == package_id
} else {
// The onchain event is not a claim, keep seeking until we find one.
false
Expand Down Expand Up @@ -733,31 +733,13 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
// outpoints to know if transaction is the original claim or a bumped one issued
// by us.
let mut are_sets_equal = true;
if !request.requires_external_funding() || !request.is_malleable() {
// If the claim does not require external funds to be allocated through
// additional inputs we can simply check the inputs in order as they
// cannot change under us.
if request.outpoints().len() != tx.input.len() {
let mut tx_inputs = tx.input.iter().map(|input| &input.previous_output).collect::<Vec<_>>();
tx_inputs.sort_unstable();
for request_input in request.outpoints() {
if tx_inputs.binary_search(&request_input).is_err() {
Comment on lines -737 to +739
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Took a little while but convinced myself this works. Squashes it down nicely too.

are_sets_equal = false;
} else {
for (claim_inp, tx_inp) in request.outpoints().iter().zip(tx.input.iter()) {
if **claim_inp != tx_inp.previous_output {
are_sets_equal = false;
}
}
}
} else {
// Otherwise, we'll do a linear search for each input (we don't expect
// large input sets to exist) to ensure the request's input set is fully
// spent to be resilient against the external claim reordering inputs.
let mut spends_all_inputs = true;
for request_input in request.outpoints() {
if tx.input.iter().find(|input| input.previous_output == *request_input).is_none() {
spends_all_inputs = false;
break;
}
break;
}
are_sets_equal = spends_all_inputs;
}

macro_rules! clean_claim_request_after_safety_delay {
Expand All @@ -766,7 +748,7 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
txid: tx.txid(),
height: conf_height,
block_hash: Some(conf_hash),
event: OnchainEvent::Claim { claim_request: Txid::from_inner(first_claim_txid_height.0) }
event: OnchainEvent::Claim { package_id: first_claim_txid_height.0 }
};
if !self.onchain_events_awaiting_threshold_conf.contains(&entry) {
self.onchain_events_awaiting_threshold_conf.push(entry);
Expand Down Expand Up @@ -821,13 +803,13 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
for entry in onchain_events_awaiting_threshold_conf {
if entry.has_reached_confirmation_threshold(cur_height) {
match entry.event {
OnchainEvent::Claim { claim_request } => {
let package_id = claim_request.into_inner();
OnchainEvent::Claim { package_id } => {
// We may remove a whole set of claim outpoints here, as these one may have
// been aggregated in a single tx and claimed so atomically
if let Some(request) = self.pending_claim_requests.remove(&package_id) {
for outpoint in request.outpoints() {
log_debug!(logger, "Removing claim tracking for {} due to maturation of claim tx {}.", outpoint, claim_request);
log_debug!(logger, "Removing claim tracking for {} due to maturation of claim package {}.",
outpoint, log_bytes!(package_id));
self.claimable_outpoints.remove(&outpoint);
#[cfg(anchors)]
self.pending_claim_events.remove(&package_id);
Expand Down Expand Up @@ -1065,7 +1047,7 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {

#[cfg(anchors)]
pub(crate) fn generate_external_htlc_claim(
&mut self, outp: &::bitcoin::OutPoint, preimage: &Option<PaymentPreimage>
&self, outp: &::bitcoin::OutPoint, preimage: &Option<PaymentPreimage>
) -> Option<ExternalHTLCClaim> {
let find_htlc = |holder_commitment: &HolderCommitmentTransaction| -> Option<ExternalHTLCClaim> {
let trusted_tx = holder_commitment.trust();
Expand Down
28 changes: 10 additions & 18 deletions lightning/src/ln/chan_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -739,17 +739,12 @@ pub fn build_htlc_input_witness(
} else {
EcdsaSighashType::All
};
let mut remote_sig = remote_sig.serialize_der().to_vec();
remote_sig.push(remote_sighash_type as u8);

let mut local_sig = local_sig.serialize_der().to_vec();
local_sig.push(EcdsaSighashType::All as u8);

let mut witness = Witness::new();
// First push the multisig dummy, note that due to BIP147 (NULLDUMMY) it must be a zero-length element.
witness.push(vec![]);
witness.push(remote_sig);
witness.push(local_sig);
witness.push_bitcoin_signature(&remote_sig.serialize_der(), remote_sighash_type);
witness.push_bitcoin_signature(&local_sig.serialize_der(), EcdsaSighashType::All);
if let Some(preimage) = preimage {
witness.push(preimage.0.to_vec());
} else {
Expand Down Expand Up @@ -801,9 +796,10 @@ pub(crate) fn get_anchor_output<'a>(commitment_tx: &'a Transaction, funding_pubk
/// Returns the witness required to satisfy and spend an anchor input.
pub fn build_anchor_input_witness(funding_key: &PublicKey, funding_sig: &Signature) -> Witness {
let anchor_redeem_script = chan_utils::get_anchor_redeemscript(funding_key);
let mut funding_sig = funding_sig.serialize_der().to_vec();
funding_sig.push(EcdsaSighashType::All as u8);
Witness::from_vec(vec![funding_sig, anchor_redeem_script.to_bytes()])
let mut ret = Witness::new();
ret.push_bitcoin_signature(&funding_sig.serialize_der(), EcdsaSighashType::All);
ret.push(anchor_redeem_script.as_bytes());
ret
}

/// Per-channel data used to build transactions in conjunction with the per-commitment data (CommitmentTransaction).
Expand Down Expand Up @@ -1037,17 +1033,13 @@ impl HolderCommitmentTransaction {
// First push the multisig dummy, note that due to BIP147 (NULLDUMMY) it must be a zero-length element.
let mut tx = self.inner.built.transaction.clone();
tx.input[0].witness.push(Vec::new());
let mut ser_holder_sig = holder_sig.serialize_der().to_vec();
ser_holder_sig.push(EcdsaSighashType::All as u8);
let mut ser_cp_sig = self.counterparty_sig.serialize_der().to_vec();
ser_cp_sig.push(EcdsaSighashType::All as u8);

if self.holder_sig_first {
tx.input[0].witness.push(ser_holder_sig);
tx.input[0].witness.push(ser_cp_sig);
tx.input[0].witness.push_bitcoin_signature(&holder_sig.serialize_der(), EcdsaSighashType::All);
tx.input[0].witness.push_bitcoin_signature(&self.counterparty_sig.serialize_der(), EcdsaSighashType::All);
} else {
tx.input[0].witness.push(ser_cp_sig);
tx.input[0].witness.push(ser_holder_sig);
tx.input[0].witness.push_bitcoin_signature(&self.counterparty_sig.serialize_der(), EcdsaSighashType::All);
tx.input[0].witness.push_bitcoin_signature(&holder_sig.serialize_der(), EcdsaSighashType::All);
}

tx.input[0].witness.push(funding_redeemscript.as_bytes().to_vec());
Expand Down
1 change: 1 addition & 0 deletions lightning/src/util/events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1125,6 +1125,7 @@ impl Writeable for Event {
BumpTransactionEvent::ChannelClose { .. } => {}
BumpTransactionEvent::HTLCResolution { .. } => {}
}
write_tlv_fields!(writer, {}); // Write a length field for forwards compat
}
&Event::ChannelReady { ref channel_id, ref user_channel_id, ref counterparty_node_id, ref channel_type } => {
29u8.write(writer)?;
Expand Down