Skip to content

[0.0.122-bindings] Update 0.0.121 bindings branch to 0.0.122 #3013

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
d7e3cd5
[bindings] Drop the lifetime bound on `Record` for bindings builds
TheBlueMatt Dec 20, 2023
54ee8b4
[bindings] Mark `WithContext` log wrapper with no-export
TheBlueMatt Jan 18, 2024
1043498
[bindings] No-export `RouteHopCandidate` lifetime'd fields
TheBlueMatt Jan 23, 2024
da62028
Store `EntropySource` in `DefaultRouter` instead of passing it
TheBlueMatt Jan 23, 2024
b136783
Move RGS `GraphSyncError` into the top-level module
TheBlueMatt Jan 18, 2024
2855c74
Drop manual `Debug` impl on RGS' `GraphSyncError`
TheBlueMatt Jan 23, 2024
76bdd00
[bindings] Move additional score params from `&()` to `Default`
TheBlueMatt Jan 23, 2024
a9db4db
Stop relying on a `Clone`able `NetworkGraph` ref in `DefaultRouter`
TheBlueMatt Jan 23, 2024
e79d53a
Make `as_directed_to` non-public
TheBlueMatt Mar 1, 2022
a55935c
Restrict ChannelInfo::as_directed_from visibility
jkczyz Mar 29, 2022
6096a4f
Use an explicit `Sign` type on the `ChannelMonitor` read tuple
TheBlueMatt Dec 24, 2022
cb8cd4e
Export `outbound_payment` structs in their respective modules
TheBlueMatt Feb 28, 2023
b64dc67
Avoid enums containing references with lifetimes
TheBlueMatt Mar 5, 2023
b7a63e3
Add some no-exporting of more offers code
TheBlueMatt Apr 25, 2023
eee4138
Hard-code scorer parameters to `ProbabilisticScoringFeeParameters`
TheBlueMatt Oct 21, 2023
efc148b
Mark several types no-export which should be exported eventually
TheBlueMatt Jul 19, 2023
77aafd0
`crate`-only several BOLT12 methods that require unbounded generics
TheBlueMatt Sep 28, 2023
b1236d2
Make ChannelMonitor always clonable
TheBlueMatt Feb 1, 2021
a19a6d9
Make the custom message traits cloneable as they're deep in nested st…
TheBlueMatt Sep 24, 2021
0606644
Replace `EventsProvider` on `OnionMessageHandler` with a single fn
TheBlueMatt Dec 17, 2023
5d0a963
Avoid slices without inner references
TheBlueMatt Sep 28, 2023
6d26a0c
Mark `io_extras` module as no-export (matching its `doc(hidden)`)
TheBlueMatt Apr 23, 2024
6b45237
Drop completed blocked `ChannelMonitorUpdate`s on startup
TheBlueMatt Apr 25, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion fuzz/src/chanmon_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ use lightning::sign::{KeyMaterial, InMemorySigner, Recipient, EntropySource, Nod
use lightning::events;
use lightning::events::MessageSendEventsProvider;
use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentSendFailure, ChannelManagerReadArgs, PaymentId, RecipientOnionFields};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, ChannelManagerReadArgs, PaymentId};
use lightning::ln::outbound_payment::{RecipientOnionFields, PaymentSendFailure};
use lightning::ln::channel::FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE;
use lightning::ln::msgs::{self, CommitmentUpdate, ChannelMessageHandler, DecodeError, UpdateAddHTLC, Init};
use lightning::ln::script::ShutdownScript;
Expand Down
3 changes: 2 additions & 1 deletion fuzz/src/full_stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ use lightning::chain::transaction::OutPoint;
use lightning::sign::{InMemorySigner, Recipient, KeyMaterial, EntropySource, NodeSigner, SignerProvider};
use lightning::events::Event;
use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentId, RecipientOnionFields, Retry};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentId};
use lightning::ln::outbound_payment::{RecipientOnionFields, Retry};
use lightning::ln::peer_handler::{MessageHandler,PeerManager,SocketDescriptor,IgnoringMessageHandler};
use lightning::ln::msgs::{self, DecodeError};
use lightning::ln::script::ShutdownScript;
Expand Down
45 changes: 19 additions & 26 deletions lightning-background-processor/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -694,7 +694,10 @@ where
persister, chain_monitor,
chain_monitor.process_pending_events_async(async_event_handler).await,
channel_manager, channel_manager.process_pending_events_async(async_event_handler).await,
peer_manager, process_onion_message_handler_events_async(&peer_manager, async_event_handler).await,
peer_manager,
for event in onion_message_handler_events(peer_manager) {
handler(event).await
},
gossip_sync, logger, scorer, should_break, {
let fut = Selector {
a: channel_manager.get_event_or_persistence_needed_future(),
Expand All @@ -719,23 +722,11 @@ where
)
}

#[cfg(feature = "futures")]
async fn process_onion_message_handler_events_async<
EventHandlerFuture: core::future::Future<Output = ()>,
EventHandler: Fn(Event) -> EventHandlerFuture,
PM: 'static + Deref + Send + Sync,
>(
peer_manager: &PM, handler: EventHandler
)
where
PM::Target: APeerManager + Send + Sync,
{
let events = core::cell::RefCell::new(Vec::new());
peer_manager.onion_message_handler().process_pending_events(&|e| events.borrow_mut().push(e));

for event in events.into_inner() {
handler(event).await
}
fn onion_message_handler_events<PM: 'static + Deref + Send + Sync>(
peer_manager: &PM
) -> impl Iterator<Item=Event> where PM::Target: APeerManager + Send + Sync {
peer_manager.onion_message_handler().get_and_clear_connections_needed()
.into_iter().map(|(node_id, addresses)| Event::ConnectionNeeded { node_id, addresses })
}

#[cfg(feature = "std")]
Expand Down Expand Up @@ -851,7 +842,9 @@ impl BackgroundProcessor {
persister, chain_monitor, chain_monitor.process_pending_events(&event_handler),
channel_manager, channel_manager.process_pending_events(&event_handler),
peer_manager,
peer_manager.onion_message_handler().process_pending_events(&event_handler),
for event in onion_message_handler_events(&peer_manager) {
event_handler.handle_event(event);
},
gossip_sync, logger, scorer, stop_thread.load(Ordering::Acquire),
{ Sleeper::from_two_futures(
channel_manager.get_event_or_persistence_needed_future(),
Expand Down Expand Up @@ -984,9 +977,8 @@ mod tests {
Arc<DefaultRouter<
Arc<NetworkGraph<Arc<test_utils::TestLogger>>>,
Arc<test_utils::TestLogger>,
Arc<LockingWrapper<TestScorer>>,
(),
TestScorer>
Arc<KeysManager>,
Arc<LockingWrapper<TestScorer>>>
>,
Arc<test_utils::TestLogger>>;

Expand Down Expand Up @@ -1143,9 +1135,10 @@ mod tests {
}

impl ScoreLookUp for TestScorer {
#[cfg(not(c_bindings))]
type ScoreParams = ();
fn channel_penalty_msat(
&self, _candidate: &CandidateRouteHop, _usage: ChannelUsage, _score_params: &Self::ScoreParams
&self, _candidate: &CandidateRouteHop, _usage: ChannelUsage, _score_params: &lightning::routing::scoring::ProbabilisticScoringFeeParameters
) -> u64 { unimplemented!(); }
}

Expand Down Expand Up @@ -1263,12 +1256,12 @@ mod tests {
let genesis_block = genesis_block(network);
let network_graph = Arc::new(NetworkGraph::new(network, logger.clone()));
let scorer = Arc::new(LockingWrapper::new(TestScorer::new()));
let now = Duration::from_secs(genesis_block.header.time as u64);
let seed = [i as u8; 32];
let router = Arc::new(DefaultRouter::new(network_graph.clone(), logger.clone(), seed, scorer.clone(), Default::default()));
let keys_manager = Arc::new(KeysManager::new(&seed, now.as_secs(), now.subsec_nanos()));
let router = Arc::new(DefaultRouter::new(network_graph.clone(), logger.clone(), Arc::clone(&keys_manager), scorer.clone(), Default::default()));
let chain_source = Arc::new(test_utils::TestChainSource::new(Network::Bitcoin));
let kv_store = Arc::new(FilesystemStore::new(format!("{}_persister_{}", &persist_dir, i).into()));
let now = Duration::from_secs(genesis_block.header.time as u64);
let keys_manager = Arc::new(KeysManager::new(&seed, now.as_secs(), now.subsec_nanos()));
let chain_monitor = Arc::new(chainmonitor::ChainMonitor::new(Some(chain_source.clone()), tx_broadcaster.clone(), logger.clone(), fee_estimator.clone(), kv_store.clone()));
let best_block = BestBlock::from_network(network);
let params = ChainParameters { network, best_block };
Expand Down
5 changes: 3 additions & 2 deletions lightning-invoice/src/payment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::Bolt11Invoice;
use bitcoin::hashes::Hash;

use lightning::ln::PaymentHash;
use lightning::ln::channelmanager::RecipientOnionFields;
use lightning::ln::outbound_payment::RecipientOnionFields;
use lightning::routing::router::{PaymentParameters, RouteParameters};

/// Builds the necessary parameters to pay or pre-flight probe the given zero-amount
Expand Down Expand Up @@ -170,7 +170,8 @@ mod tests {
#[cfg(feature = "std")]
fn payment_metadata_end_to_end() {
use lightning::events::Event;
use lightning::ln::channelmanager::{Retry, PaymentId};
use lightning::ln::channelmanager::PaymentId;
use lightning::ln::outbound_payment::Retry;
use lightning::ln::msgs::ChannelMessageHandler;
use lightning::ln::functional_test_utils::*;
// Test that a payment metadata read from an invoice passed to `pay_invoice` makes it all
Expand Down
3 changes: 2 additions & 1 deletion lightning-invoice/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -825,7 +825,8 @@ mod test {
use lightning::ln::PaymentHash;
#[cfg(feature = "std")]
use lightning::ln::PaymentPreimage;
use lightning::ln::channelmanager::{PhantomRouteHints, MIN_FINAL_CLTV_EXPIRY_DELTA, PaymentId, RecipientOnionFields, Retry};
use lightning::ln::channelmanager::{PhantomRouteHints, MIN_FINAL_CLTV_EXPIRY_DELTA, PaymentId};
use lightning::ln::outbound_payment::{RecipientOnionFields, Retry};
use lightning::ln::functional_test_utils::*;
use lightning::ln::msgs::ChannelMessageHandler;
use lightning::routing::router::{PaymentParameters, RouteParameters};
Expand Down
40 changes: 0 additions & 40 deletions lightning-rapid-gossip-sync/src/error.rs

This file was deleted.

39 changes: 32 additions & 7 deletions lightning-rapid-gossip-sync/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,17 +74,42 @@ use core::ops::Deref;
use core::sync::atomic::{AtomicBool, Ordering};

use lightning::io;
use lightning::ln::msgs::{DecodeError, LightningError};
use lightning::routing::gossip::NetworkGraph;
use lightning::util::logger::Logger;

pub use crate::error::GraphSyncError;

/// Error types that these functions can return
mod error;

/// Core functionality of this crate
mod processing;

/// All-encompassing standard error type that processing can return
#[derive(Debug)]
pub enum GraphSyncError {
/// Error trying to read the update data, typically due to an erroneous data length indication
/// that is greater than the actual amount of data provided
DecodeError(DecodeError),
/// Error applying the patch to the network graph, usually the result of updates that are too
/// old or missing prerequisite data to the application of updates out of order
LightningError(LightningError),
}

impl From<lightning::io::Error> for GraphSyncError {
fn from(error: lightning::io::Error) -> Self {
Self::DecodeError(DecodeError::Io(error.kind()))
}
}

impl From<DecodeError> for GraphSyncError {
fn from(error: DecodeError) -> Self {
Self::DecodeError(error)
}
}

impl From<LightningError> for GraphSyncError {
fn from(error: LightningError) -> Self {
Self::LightningError(error)
}
}

/// The main Rapid Gossip Sync object.
///
/// See [crate-level documentation] for usage.
Expand Down Expand Up @@ -167,7 +192,7 @@ mod tests {
use lightning::ln::msgs::DecodeError;
use lightning::routing::gossip::NetworkGraph;
use lightning::util::test_utils::TestLogger;
use crate::RapidGossipSync;
use crate::{GraphSyncError, RapidGossipSync};

#[test]
fn test_sync_from_file() {
Expand Down Expand Up @@ -265,7 +290,7 @@ mod tests {
let start = std::time::Instant::now();
let sync_result = rapid_sync
.sync_network_graph_with_file_path("./res/full_graph.lngossip");
if let Err(crate::error::GraphSyncError::DecodeError(DecodeError::Io(io_error))) = &sync_result {
if let Err(GraphSyncError::DecodeError(DecodeError::Io(io_error))) = &sync_result {
let error_string = format!("Input file lightning-rapid-gossip-sync/res/full_graph.lngossip is missing! Download it from https://bitcoin.ninja/ldk-compressed_graph-285cb27df79-2022-07-21.bin\n\n{:?}", io_error);
#[cfg(not(require_route_graph_test))]
{
Expand Down
6 changes: 2 additions & 4 deletions lightning-rapid-gossip-sync/src/processing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ use lightning::{log_debug, log_warn, log_trace, log_given_level, log_gossip};
use lightning::util::ser::{BigSize, Readable};
use lightning::io;

use crate::error::GraphSyncError;
use crate::RapidGossipSync;
use crate::{GraphSyncError, RapidGossipSync};

#[cfg(all(feature = "std", not(test)))]
use std::time::{SystemTime, UNIX_EPOCH};
Expand Down Expand Up @@ -269,9 +268,8 @@ mod tests {
use lightning::routing::gossip::NetworkGraph;
use lightning::util::test_utils::TestLogger;

use crate::error::GraphSyncError;
use crate::processing::STALE_RGS_UPDATE_AGE_LIMIT_SECS;
use crate::RapidGossipSync;
use crate::{GraphSyncError, RapidGossipSync};

const VALID_RGS_BINARY: [u8; 300] = [
76, 68, 75, 1, 111, 226, 140, 10, 182, 241, 179, 114, 193, 166, 162, 70, 174, 99, 247,
Expand Down
8 changes: 4 additions & 4 deletions lightning/src/blinded_path/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ impl BlindedPath {
// be in relation to a specific channel.
let htlc_maximum_msat = u64::max_value();
Self::new_for_payment(
&[], payee_node_id, payee_tlvs, htlc_maximum_msat, entropy_source, secp_ctx
Vec::new(), payee_node_id, payee_tlvs, htlc_maximum_msat, entropy_source, secp_ctx
)
}

Expand All @@ -106,19 +106,19 @@ impl BlindedPath {
/// [`ForwardTlvs`]: crate::blinded_path::payment::ForwardTlvs
// TODO: make all payloads the same size with padding + add dummy hops
pub fn new_for_payment<ES: EntropySource + ?Sized, T: secp256k1::Signing + secp256k1::Verification>(
intermediate_nodes: &[payment::ForwardNode], payee_node_id: PublicKey,
intermediate_nodes: Vec<payment::ForwardNode>, payee_node_id: PublicKey,
payee_tlvs: payment::ReceiveTlvs, htlc_maximum_msat: u64, entropy_source: &ES,
secp_ctx: &Secp256k1<T>
) -> Result<(BlindedPayInfo, Self), ()> {
let blinding_secret_bytes = entropy_source.get_secure_random_bytes();
let blinding_secret = SecretKey::from_slice(&blinding_secret_bytes[..]).expect("RNG is busted");

let blinded_payinfo = payment::compute_payinfo(intermediate_nodes, &payee_tlvs, htlc_maximum_msat)?;
let blinded_payinfo = payment::compute_payinfo(&intermediate_nodes, &payee_tlvs, htlc_maximum_msat)?;
Ok((blinded_payinfo, BlindedPath {
introduction_node_id: intermediate_nodes.first().map_or(payee_node_id, |n| n.node_id),
blinding_point: PublicKey::from_secret_key(secp_ctx, &blinding_secret),
blinded_hops: payment::blinded_hops(
secp_ctx, intermediate_nodes, payee_node_id, payee_tlvs, &blinding_secret
secp_ctx, &intermediate_nodes, payee_node_id, payee_tlvs, &blinding_secret
).map_err(|_| ())?,
}))
}
Expand Down
6 changes: 3 additions & 3 deletions lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ pub struct ChannelMonitor<Signer: WriteableEcdsaChannelSigner> {
pub(super) inner: Mutex<ChannelMonitorImpl<Signer>>,
}

impl<Signer: WriteableEcdsaChannelSigner> Clone for ChannelMonitor<Signer> where Signer: Clone {
impl<Signer: WriteableEcdsaChannelSigner> Clone for ChannelMonitor<Signer> {
fn clone(&self) -> Self {
let inner = self.inner.lock().unwrap().clone();
ChannelMonitor::from_impl(inner)
Expand Down Expand Up @@ -4338,8 +4338,8 @@ where

const MAX_ALLOC_SIZE: usize = 64*1024;

impl<'a, 'b, ES: EntropySource, SP: SignerProvider> ReadableArgs<(&'a ES, &'b SP)>
for (BlockHash, ChannelMonitor<SP::EcdsaSigner>) {
impl<'a, 'b, ES: EntropySource, Signer: WriteableEcdsaChannelSigner, SP: SignerProvider<EcdsaSigner=Signer>> ReadableArgs<(&'a ES, &'b SP)>
for (BlockHash, ChannelMonitor<Signer>) {
fn read<R: io::Read>(reader: &mut R, args: (&'a ES, &'b SP)) -> Result<Self, DecodeError> {
macro_rules! unwrap_obj {
($key: expr) => {
Expand Down
4 changes: 4 additions & 0 deletions lightning/src/events/bump_transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,8 @@ impl Utxo {
}

/// Returns a `Utxo` with the `satisfaction_weight` estimate for a P2WPKH nested in P2SH output.
///
/// This is not exported to bindings users as WPubkeyHash is not yet exported
pub fn new_nested_p2wpkh(outpoint: OutPoint, value: u64, pubkey_hash: &WPubkeyHash) -> Self {
let script_sig_size = 1 /* script_sig length */ +
1 /* OP_0 */ +
Expand All @@ -281,6 +283,8 @@ impl Utxo {
}

/// Returns a `Utxo` with the `satisfaction_weight` estimate for a SegWit v0 P2WPKH output.
///
/// This is not exported to bindings users as WPubkeyHash is not yet exported
pub fn new_v0_p2wpkh(outpoint: OutPoint, value: u64, pubkey_hash: &WPubkeyHash) -> Self {
Self {
outpoint,
Expand Down
4 changes: 3 additions & 1 deletion lightning/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,9 @@ pub use core2::io;

#[cfg(not(feature = "std"))]
#[doc(hidden)]
/// IO utilities public only for use by in-crate macros. These should not be used externally
/// IO utilities public only for use by in-crate macros. These should not be used externally.
///
/// This is not exported to bindings users as its not intended for public consumption.
pub mod io_extras {
use core2::io::{self, Read, Write};

Expand Down
24 changes: 22 additions & 2 deletions lightning/src/ln/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5179,6 +5179,26 @@ impl<SP: Deref> Channel<SP> where
}
}

/// On startup, its possible we detect some monitor updates have actually completed (and the
/// ChannelManager was simply stale). In that case, we should simply drop them, which we do
/// here after logging them.
pub fn on_startup_drop_completed_blocked_mon_updates_through<L: Logger>(&mut self, logger: &L, loaded_mon_update_id: u64) {
let channel_id = self.context.channel_id();
self.context.blocked_monitor_updates.retain(|update| {
if update.update.update_id <= loaded_mon_update_id {
log_info!(
logger,
"Dropping completed ChannelMonitorUpdate id {} on channel {} due to a stale ChannelManager",
update.update.update_id,
channel_id,
);
false
} else {
true
}
});
}

pub fn blocked_monitor_updates_pending(&self) -> usize {
self.context.blocked_monitor_updates.len()
}
Expand Down Expand Up @@ -5604,7 +5624,7 @@ impl<SP: Deref> Channel<SP> where
return None;
}
};
let our_node_sig = match node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(&announcement)) {
let our_node_sig = match node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(announcement.clone())) {
Err(_) => {
log_error!(logger, "Failed to generate node signature for channel_announcement. Channel will not be announced!");
return None;
Expand Down Expand Up @@ -5650,7 +5670,7 @@ impl<SP: Deref> Channel<SP> where
.map_err(|_| ChannelError::Ignore("Signer failed to retrieve own public key".to_owned()))?);
let were_node_one = announcement.node_id_1 == our_node_key;

let our_node_sig = node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(&announcement))
let our_node_sig = node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(announcement.clone()))
.map_err(|_| ChannelError::Ignore("Failed to generate node signature for channel_announcement".to_owned()))?;
match &self.context.holder_signer {
ChannelSignerType::Ecdsa(ecdsa) => {
Expand Down
Loading