Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion lightning/src/chain/chainmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1202,6 +1202,7 @@ where

// We may need to start monitoring for any alternative funding transactions.
if let Some(ref chain_source) = self.chain_source {
let current_block_hash = monitor.current_best_block().block_hash;
for (funding_outpoint, funding_script) in
update.internal_renegotiated_funding_data()
{
Expand All @@ -1212,7 +1213,8 @@ where
);
chain_source.register_tx(&funding_outpoint.txid, &funding_script);
chain_source.register_output(WatchedOutput {
block_hash: None,
block_hash: Some(current_block_hash),
// block_hash: None,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please remove the commented-out lines here and below.

outpoint: funding_outpoint,
script_pubkey: funding_script,
});
Expand Down
4 changes: 3 additions & 1 deletion lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1990,6 +1990,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitor<Signer> {
{
let lock = self.inner.lock().unwrap();
let logger = WithChannelMonitor::from_impl(logger, &*lock, None);
let current_block_hash = lock.best_block.block_hash;
for funding in core::iter::once(&lock.funding).chain(&lock.pending_funding) {
let funding_outpoint = funding.funding_outpoint();
log_trace!(&logger, "Registering funding outpoint {} with the filter to monitor confirmations", &funding_outpoint);
Expand All @@ -2002,7 +2003,8 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitor<Signer> {
let outpoint = OutPoint { txid: *txid, index: *index as u16 };
log_trace!(logger, "Registering outpoint {} with the filter to monitor spend", outpoint);
filter.register_output(WatchedOutput {
block_hash: None,
block_hash: Some(current_block_hash),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since we're interested in the spend of an output, ideally this block hash is for the block the output was created in, or if it has not been created yet, the current block hash. This means we'll need to store it so we can reuse it here when this gets called on startup.

// block_hash: None,
outpoint,
script_pubkey: script_pubkey.clone(),
});
Expand Down
77 changes: 77 additions & 0 deletions lightning/src/ln/monitor_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3298,3 +3298,80 @@ fn test_claim_event_never_handled() {
// `ChannelMonitorUpdate`.
check_added_monitors(&nodes[1], 2);
}

#[test]

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: Stray whitespace.

fn test_watched_output_block_hash_persistence_after_restart() {
use crate::chain::WatchedOutput;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please move any imports to the module-level.

use crate::sync::Mutex;
use std::ops::Deref;

// Custom test filter to capture full WatchedOutput instances
struct TestFilter {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Rather than creating a new test util, you should be able to reuse crate::util::test_utils::TestChainSource.

watched_outputs: Mutex<Vec<WatchedOutput>>,
}

impl TestFilter {
fn new() -> Self {
Self { watched_outputs: Mutex::new(Vec::new()) }
}
}

impl crate::chain::Filter for TestFilter {
fn register_tx(&self, _txid: &bitcoin::Txid, _script_pubkey: &bitcoin::Script) {}

fn register_output(&self, output: WatchedOutput) {
self.watched_outputs.lock().unwrap().push(output);
}
}

impl Deref for TestFilter {
type Target = TestFilter;
fn deref(&self) -> &Self::Target {
self
}
}

let chanmon_cfgs = create_chanmon_cfgs(2);
let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
let persister;
let new_chain_mon;
let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
let nodes_1_reload;
let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);

let chan = create_announced_chan_between_nodes(&nodes, 0, 1);

//Block mining to advance best block hash
for _ in 0..10 {
mine_transactions(&nodes[1], &[]);
}

let expected_block_hash = nodes[1].best_block_hash();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The expected block hash should be earliest hash the given output could potentially appear in. That means it would need to be the earliest block hash here i.e., the hash even before mining any additional blocks, no?


let init_node_ser = nodes[1].node.encode();
let chan_1_monitor_serialized = {
let monitor = get_monitor!(nodes[1], chan.2);
monitor.encode()
};
let mons = &[&chan_1_monitor_serialized[..]];

// Reload node to simulate restart
reload_node!(nodes[1], &init_node_ser, mons, persister, new_chain_mon, nodes_1_reload);

let test_filter = TestFilter::new();

{
let monitor = get_monitor!(nodes[1], chan.2);
monitor.load_outputs_to_watch(&test_filter, &nodes[1].logger);
}

let watched_outputs = test_filter.watched_outputs.lock().unwrap();
assert!(!watched_outputs.is_empty(), "Should have watched_outputs after reload");

for watched_output in watched_outputs.iter() {
assert_eq!(watched_output.block_hash, Some(expected_block_hash),
"WatchedOutput block_hash should match the monitor's best block hash after restart");
}

}
Loading