Skip to content

Fix a full_stack_target crash #232

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 31, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 52 additions & 30 deletions fuzz/fuzz_targets/full_stack_target.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ use secp256k1::key::{PublicKey,SecretKey};
use secp256k1::Secp256k1;

use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::{HashMap, hash_map};
use std::cmp;
use std::hash::Hash;
use std::sync::Arc;
Expand Down Expand Up @@ -140,10 +140,11 @@ struct MoneyLossDetector<'a> {

peers: &'a RefCell<[bool; 256]>,
funding_txn: Vec<Transaction>,
txids_confirmed: HashMap<Sha256dHash, usize>,
header_hashes: Vec<Sha256dHash>,
height: usize,
max_height: usize,

blocks_connected: u32,
}
impl<'a> MoneyLossDetector<'a> {
pub fn new(peers: &'a RefCell<[bool; 256]>, manager: Arc<ChannelManager>, monitor: Arc<channelmonitor::SimpleManyChannelMonitor<OutPoint>>, handler: PeerManager<Peer<'a>>) -> Self {
Expand All @@ -154,17 +155,34 @@ impl<'a> MoneyLossDetector<'a> {

peers,
funding_txn: Vec::new(),
txids_confirmed: HashMap::new(),
header_hashes: vec![Default::default()],
height: 0,
max_height: 0,
blocks_connected: 0,
}
}

fn connect_block(&mut self, txn: &[&Transaction], txn_idxs: &[u32]) {
let header = BlockHeader { version: 0x20000000, prev_blockhash: self.header_hashes[self.height], merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
fn connect_block(&mut self, all_txn: &[Transaction]) {
let mut txn = Vec::with_capacity(all_txn.len());
let mut txn_idxs = Vec::with_capacity(all_txn.len());
for (idx, tx) in all_txn.iter().enumerate() {
let txid = Sha256dHash::from_data(&serialize(tx).unwrap()[..]);
match self.txids_confirmed.entry(txid) {
hash_map::Entry::Vacant(e) => {
e.insert(self.height);
txn.push(tx);
txn_idxs.push(idx as u32 + 1);
},
_ => {},
}
}

let header = BlockHeader { version: 0x20000000, prev_blockhash: self.header_hashes[self.height], merkle_root: Default::default(), time: self.blocks_connected, bits: 42, nonce: 42 };
self.height += 1;
self.manager.block_connected(&header, self.height as u32, txn, txn_idxs);
(*self.monitor).block_connected(&header, self.height as u32, txn, txn_idxs);
self.blocks_connected += 1;
self.manager.block_connected(&header, self.height as u32, &txn[..], &txn_idxs[..]);
(*self.monitor).block_connected(&header, self.height as u32, &txn[..], &txn_idxs[..]);
if self.header_hashes.len() > self.height {
self.header_hashes[self.height] = header.bitcoin_hash();
} else {
Expand All @@ -180,6 +198,10 @@ impl<'a> MoneyLossDetector<'a> {
let header = BlockHeader { version: 0x20000000, prev_blockhash: self.header_hashes[self.height], merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
self.manager.block_disconnected(&header);
self.monitor.block_disconnected(&header);
let removal_height = self.height;
self.txids_confirmed.retain(|_, height| {
removal_height != *height
});
}
}
}
Expand Down Expand Up @@ -280,7 +302,7 @@ pub fn do_test(data: &[u8], logger: &Arc<Logger>) {

let watch = Arc::new(ChainWatchInterfaceUtil::new(Network::Bitcoin, Arc::clone(&logger)));
let broadcast = Arc::new(TestBroadcaster{});
let monitor = channelmonitor::SimpleManyChannelMonitor::new(watch.clone(), broadcast.clone());
let monitor = channelmonitor::SimpleManyChannelMonitor::new(watch.clone(), broadcast.clone(), Arc::clone(&logger));

let keys_manager = Arc::new(KeyProvider { node_secret: our_network_key.clone() });
let channelmanager = ChannelManager::new(slice_to_be32(get_slice!(4)), get_slice!(1)[0] != 0, Network::Bitcoin, fee_est.clone(), monitor.clone(), watch.clone(), broadcast.clone(), Arc::clone(&logger), keys_manager.clone()).unwrap();
Expand Down Expand Up @@ -398,36 +420,36 @@ pub fn do_test(data: &[u8], logger: &Arc<Logger>) {
}
},
10 => {
for funding_generation in pending_funding_generation.drain(..) {
'outer_loop: for funding_generation in pending_funding_generation.drain(..) {
let mut tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: vec![TxOut {
value: funding_generation.1, script_pubkey: funding_generation.2,
}] };
let funding_output = OutPoint::new(Sha256dHash::from_data(&serialize(&tx).unwrap()[..]), 0);
let mut found_duplicate_txo = false;
for chan in channelmanager.list_channels() {
if chan.channel_id == funding_output.to_channel_id() {
found_duplicate_txo = true;
let funding_output = 'search_loop: loop {
let funding_txid = Sha256dHash::from_data(&serialize(&tx).unwrap()[..]);
if let None = loss_detector.txids_confirmed.get(&funding_txid) {
let outpoint = OutPoint::new(funding_txid, 0);
for chan in channelmanager.list_channels() {
if chan.channel_id == outpoint.to_channel_id() {
tx.version += 1;
continue 'search_loop;
}
}
break outpoint;
}
}
if !found_duplicate_txo {
channelmanager.funding_transaction_generated(&funding_generation.0, funding_output.clone());
pending_funding_signatures.insert(funding_output, tx);
}
tx.version += 1;
if tx.version > 0xff {
continue 'outer_loop;
}
};
channelmanager.funding_transaction_generated(&funding_generation.0, funding_output.clone());
pending_funding_signatures.insert(funding_output, tx);
}
},
11 => {
if !pending_funding_relay.is_empty() {
let mut txn = Vec::with_capacity(pending_funding_relay.len());
let mut txn_idxs = Vec::with_capacity(pending_funding_relay.len());
for (idx, tx) in pending_funding_relay.iter().enumerate() {
txn.push(tx);
txn_idxs.push(idx as u32 + 1);
}

loss_detector.connect_block(&txn[..], &txn_idxs[..]);
txn_idxs.clear();
loss_detector.connect_block(&pending_funding_relay[..]);
for _ in 2..100 {
loss_detector.connect_block(&txn[..], &txn_idxs[..]);
loss_detector.connect_block(&[]);
}
}
for tx in pending_funding_relay.drain(..) {
Expand All @@ -437,11 +459,11 @@ pub fn do_test(data: &[u8], logger: &Arc<Logger>) {
12 => {
let txlen = slice_to_be16(get_slice!(2));
if txlen == 0 {
loss_detector.connect_block(&[], &[]);
loss_detector.connect_block(&[]);
} else {
let txres: Result<Transaction, _> = deserialize(get_slice!(txlen));
if let Ok(tx) = txres {
loss_detector.connect_block(&[&tx], &[1]);
loss_detector.connect_block(&[tx]);
} else {
return;
}
Expand Down
8 changes: 4 additions & 4 deletions src/ln/channelmanager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4051,7 +4051,7 @@ mod tests {
let mut seed = [0; 32];
rng.fill_bytes(&mut seed);
let keys_manager = Arc::new(keysinterface::KeysManager::new(&seed, Network::Testnet, Arc::clone(&logger)));
let chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(chain_monitor.clone(), tx_broadcaster.clone()));
let chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(chain_monitor.clone(), tx_broadcaster.clone(), logger.clone()));
let node = ChannelManager::new(0, true, Network::Testnet, feeest.clone(), chan_monitor.clone(), chain_monitor.clone(), tx_broadcaster.clone(), Arc::clone(&logger), keys_manager.clone()).unwrap();
let router = Router::new(PublicKey::from_secret_key(&secp_ctx, &keys_manager.get_node_secret()), chain_monitor.clone(), Arc::clone(&logger));
nodes.push(Node { chain_monitor, tx_broadcaster, chan_monitor, node, router, node_seed: seed,
Expand Down Expand Up @@ -6881,7 +6881,7 @@ mod tests {
let mut chan_0_monitor_serialized = VecWriter(Vec::new());
nodes[0].chan_monitor.simple_monitor.monitors.lock().unwrap().iter().next().unwrap().1.write_for_disk(&mut chan_0_monitor_serialized).unwrap();

nodes[0].chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(nodes[0].chain_monitor.clone(), nodes[0].tx_broadcaster.clone()));
nodes[0].chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(nodes[0].chain_monitor.clone(), nodes[0].tx_broadcaster.clone(), Arc::new(test_utils::TestLogger::new())));
let mut chan_0_monitor_read = &chan_0_monitor_serialized.0[..];
let (_, chan_0_monitor) = <(Sha256dHash, ChannelMonitor)>::read(&mut chan_0_monitor_read, Arc::new(test_utils::TestLogger::new())).unwrap();
assert!(chan_0_monitor_read.is_empty());
Expand Down Expand Up @@ -6945,7 +6945,7 @@ mod tests {
let mut chan_0_monitor_serialized = VecWriter(Vec::new());
nodes[0].chan_monitor.simple_monitor.monitors.lock().unwrap().iter().next().unwrap().1.write_for_disk(&mut chan_0_monitor_serialized).unwrap();

nodes[0].chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(nodes[0].chain_monitor.clone(), nodes[0].tx_broadcaster.clone()));
nodes[0].chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(nodes[0].chain_monitor.clone(), nodes[0].tx_broadcaster.clone(), Arc::new(test_utils::TestLogger::new())));
let mut chan_0_monitor_read = &chan_0_monitor_serialized.0[..];
let (_, chan_0_monitor) = <(Sha256dHash, ChannelMonitor)>::read(&mut chan_0_monitor_read, Arc::new(test_utils::TestLogger::new())).unwrap();
assert!(chan_0_monitor_read.is_empty());
Expand Down Expand Up @@ -7004,7 +7004,7 @@ mod tests {
node_0_monitors_serialized.push(writer.0);
}

nodes[0].chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(nodes[0].chain_monitor.clone(), nodes[0].tx_broadcaster.clone()));
nodes[0].chan_monitor = Arc::new(test_utils::TestChannelMonitor::new(nodes[0].chain_monitor.clone(), nodes[0].tx_broadcaster.clone(), Arc::new(test_utils::TestLogger::new())));
let mut node_0_monitors = Vec::new();
for serialized in node_0_monitors_serialized.iter() {
let mut read = &serialized[..];
Expand Down
15 changes: 12 additions & 3 deletions src/ln/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ pub struct SimpleManyChannelMonitor<Key> {
chain_monitor: Arc<ChainWatchInterface>,
broadcaster: Arc<BroadcasterInterface>,
pending_events: Mutex<Vec<events::Event>>,
logger: Arc<Logger>,
}

impl<Key : Send + cmp::Eq + hash::Hash> ChainListener for SimpleManyChannelMonitor<Key> {
Expand Down Expand Up @@ -144,12 +145,13 @@ impl<Key : Send + cmp::Eq + hash::Hash> ChainListener for SimpleManyChannelMonit
impl<Key : Send + cmp::Eq + hash::Hash + 'static> SimpleManyChannelMonitor<Key> {
/// Creates a new object which can be used to monitor several channels given the chain
/// interface with which to register to receive notifications.
pub fn new(chain_monitor: Arc<ChainWatchInterface>, broadcaster: Arc<BroadcasterInterface>) -> Arc<SimpleManyChannelMonitor<Key>> {
pub fn new(chain_monitor: Arc<ChainWatchInterface>, broadcaster: Arc<BroadcasterInterface>, logger: Arc<Logger>) -> Arc<SimpleManyChannelMonitor<Key>> {
let res = Arc::new(SimpleManyChannelMonitor {
monitors: Mutex::new(HashMap::new()),
chain_monitor,
broadcaster,
pending_events: Mutex::new(Vec::new()),
logger,
});
let weak_res = Arc::downgrade(&res);
res.chain_monitor.register_listener(weak_res);
Expand All @@ -160,12 +162,19 @@ impl<Key : Send + cmp::Eq + hash::Hash + 'static> SimpleManyChannelMonitor<Key>
pub fn add_update_monitor_by_key(&self, key: Key, monitor: ChannelMonitor) -> Result<(), HandleError> {
let mut monitors = self.monitors.lock().unwrap();
match monitors.get_mut(&key) {
Some(orig_monitor) => return orig_monitor.insert_combine(monitor),
Some(orig_monitor) => {
log_trace!(self, "Updating Channel Monitor for channel {}", log_funding_option!(monitor.funding_txo));
return orig_monitor.insert_combine(monitor);
},
None => {}
};
match &monitor.funding_txo {
&None => self.chain_monitor.watch_all_txn(),
&None => {
log_trace!(self, "Got new Channel Monitor for no-funding-set channel (monitoring all txn!)");
self.chain_monitor.watch_all_txn()
},
&Some((ref outpoint, ref script)) => {
log_trace!(self, "Got new Channel Monitor for channel {}", log_bytes!(outpoint.to_channel_id()[..]));
self.chain_monitor.install_watch_tx(&outpoint.txid, script);
self.chain_monitor.install_watch_outpoint((outpoint.txid, outpoint.index as u32), script);
},
Expand Down
15 changes: 15 additions & 0 deletions src/util/macro_logger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,21 @@ macro_rules! log_funding_channel_id {
}
}

pub(crate) struct DebugFundingOption<'a, T: 'a>(pub &'a Option<(OutPoint, T)>);
impl<'a, T> std::fmt::Display for DebugFundingOption<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match self.0.as_ref() {
Some(&(ref funding_output, _)) => DebugBytes(&funding_output.to_channel_id()[..]).fmt(f),
None => write!(f, "without funding output set"),
}
}
}
macro_rules! log_funding_option {
($funding_option: expr) => {
::util::macro_logger::DebugFundingOption(&$funding_option)
}
}

pub(crate) struct DebugRoute<'a>(pub &'a Route);
impl<'a> std::fmt::Display for DebugRoute<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
Expand Down
4 changes: 2 additions & 2 deletions src/util/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ pub struct TestChannelMonitor {
pub update_ret: Mutex<Result<(), channelmonitor::ChannelMonitorUpdateErr>>,
}
impl TestChannelMonitor {
pub fn new(chain_monitor: Arc<chaininterface::ChainWatchInterface>, broadcaster: Arc<chaininterface::BroadcasterInterface>) -> Self {
pub fn new(chain_monitor: Arc<chaininterface::ChainWatchInterface>, broadcaster: Arc<chaininterface::BroadcasterInterface>, logger: Arc<Logger>) -> Self {
Self {
added_monitors: Mutex::new(Vec::new()),
simple_monitor: channelmonitor::SimpleManyChannelMonitor::new(chain_monitor, broadcaster),
simple_monitor: channelmonitor::SimpleManyChannelMonitor::new(chain_monitor, broadcaster, logger),
update_ret: Mutex::new(Ok(())),
}
}
Expand Down