Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions lightning-dns-resolver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ mod test {
use lightning::types::payment::PaymentHash;
use lightning::util::logger::Logger;

use lightning::{expect_payment_claimed, get_htlc_update_msgs};
use lightning::expect_payment_claimed;
use lightning_types::string::UntrustedString;

use std::ops::Deref;
Expand Down Expand Up @@ -416,7 +416,7 @@ mod test {
nodes[0].onion_messenger.handle_onion_message(payee_id, &inv);

check_added_monitors(&nodes[0], 1);
let updates = get_htlc_update_msgs!(nodes[0], payee_id);
let updates = get_htlc_update_msgs(&nodes[0], &payee_id);
nodes[1].node.handle_update_add_htlc(payer_id, &updates.update_add_htlcs[0]);
do_commitment_signed_dance(&nodes[1], &nodes[0], &updates.commitment_signed, false, false);
expect_and_process_pending_htlcs(&nodes[1], false);
Expand Down Expand Up @@ -450,7 +450,7 @@ mod test {
}

check_added_monitors(&nodes[1], 1);
let mut updates = get_htlc_update_msgs!(nodes[1], payer_id);
let mut updates = get_htlc_update_msgs(&nodes[1], &payer_id);
nodes[0].node.handle_update_fulfill_htlc(payee_id, updates.update_fulfill_htlcs.remove(0));
do_commitment_signed_dance(&nodes[0], &nodes[1], &updates.commitment_signed, false, false);

Expand Down
9 changes: 4 additions & 5 deletions lightning-liquidity/tests/lsps5_integration_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,11 @@ use common::{
};

use lightning::chain::{BestBlock, Filter};
use lightning::check_closed_event;
use lightning::events::ClosureReason;
use lightning::ln::channelmanager::{ChainParameters, InterceptId};
use lightning::ln::functional_test_utils::{
close_channel, create_chan_between_nodes, create_chanmon_cfgs, create_network,
create_node_cfgs, create_node_chanmgrs, Node,
check_closed_event, close_channel, create_chan_between_nodes, create_chanmon_cfgs,
create_network, create_node_cfgs, create_node_chanmgrs, Node,
};
use lightning::ln::msgs::Init;
use lightning::ln::peer_handler::CustomMessageHandler;
Expand Down Expand Up @@ -1480,9 +1479,9 @@ fn dos_protection() {

close_channel(&service_node.inner, &client_node.inner, &channel_id, funding_tx, true);
let node_a_reason = ClosureReason::CounterpartyInitiatedCooperativeClosure;
check_closed_event!(service_node.inner, 1, node_a_reason, [client_node_id], 100000);
check_closed_event(&service_node.inner, 1, node_a_reason, false, &[client_node_id], 100000);
let node_b_reason = ClosureReason::LocallyInitiatedCooperativeClosure;
check_closed_event!(client_node.inner, 1, node_b_reason, [service_node_id], 100000);
check_closed_event(&client_node.inner, 1, node_b_reason, false, &[service_node_id], 100000);

// channel is now closed again -> should reject
assert_lsps5_reject(&service_node, &client_node);
Expand Down
5 changes: 2 additions & 3 deletions lightning-persister/src/fs_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -720,7 +720,6 @@ mod tests {

use lightning::chain::chainmonitor::Persist;
use lightning::chain::ChannelMonitorUpdateStatus;
use lightning::check_closed_event;
use lightning::events::ClosureReason;
use lightning::ln::functional_test_utils::*;
use lightning::ln::msgs::BaseMessageHandler;
Expand Down Expand Up @@ -884,7 +883,7 @@ mod tests {
.unwrap();
let reason =
ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true), message };
check_closed_event!(nodes[1], 1, reason, [node_a_id], 100000);
check_closed_event(&nodes[1], 1, reason, false, &[node_a_id], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();

// Set the store's directory to read-only, which should result in
Expand Down Expand Up @@ -928,7 +927,7 @@ mod tests {
.unwrap();
let reason =
ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true), message };
check_closed_event!(nodes[1], 1, reason, [node_a_id], 100000);
check_closed_event(&nodes[1], 1, reason, false, &[node_a_id], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
let update_id = update_map.get(&added_monitors[0].1.channel_id()).unwrap();
Expand Down
17 changes: 9 additions & 8 deletions lightning-persister/src/test_utils.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
use lightning::events::ClosureReason;
use lightning::ln::functional_test_utils::{
connect_block, create_announced_chan_between_nodes, create_chanmon_cfgs, create_dummy_block,
create_network, create_node_cfgs, create_node_chanmgrs, send_payment,
check_closed_event, connect_block, create_announced_chan_between_nodes, create_chanmon_cfgs,
create_dummy_block, create_network, create_node_cfgs, create_node_chanmgrs, send_payment,
};
use lightning::util::persist::{
migrate_kv_store_data, read_channel_monitors, KVStoreSync, MigratableKVStore,
KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN,
};
use lightning::util::test_utils;
use lightning::{check_added_monitors, check_closed_broadcast, check_closed_event};
use lightning::{check_added_monitors, check_closed_broadcast};

use std::panic::RefUnwindSafe;

Expand Down Expand Up @@ -188,7 +188,7 @@ pub(crate) fn do_test_store<K: KVStoreSync + Sync>(store_0: &K, store_1: &K) {
.force_close_broadcasting_latest_txn(&chan_id, &node_b_id, message.clone())
.unwrap();
let reason = ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true), message };
check_closed_event!(nodes[0], 1, reason, [node_b_id], 100000);
check_closed_event(&nodes[0], 1, reason, false, &[node_b_id], 100000);
check_closed_broadcast!(nodes[0], true);
check_added_monitors!(nodes[0], 1);

Expand All @@ -204,12 +204,13 @@ pub(crate) fn do_test_store<K: KVStoreSync + Sync>(store_0: &K, store_1: &K) {
),
);
check_closed_broadcast!(nodes[1], true);
check_closed_event!(
nodes[1],
check_closed_event(
&nodes[1],
1,
ClosureReason::CommitmentTxConfirmed,
[nodes[0].node.get_our_node_id()],
100000
false,
&[nodes[0].node.get_our_node_id()],
100000,
);
check_added_monitors!(nodes[1], 1);

Expand Down
11 changes: 5 additions & 6 deletions lightning/src/chain/chainmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1596,11 +1596,10 @@ where
mod tests {
use crate::chain::channelmonitor::ANTI_REORG_DELAY;
use crate::chain::{ChannelMonitorUpdateStatus, Watch};
use crate::check_added_monitors;
use crate::events::{ClosureReason, Event};
use crate::get_htlc_update_msgs;
use crate::ln::functional_test_utils::*;
use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, MessageSendEvent};
use crate::{check_added_monitors, check_closed_event};
use crate::{expect_payment_path_successful, get_event_msg};

const CHAINSYNC_MONITOR_PARTITION_FACTOR: u32 = 5;
Expand Down Expand Up @@ -1691,7 +1690,7 @@ mod tests {
// Now manually walk the commitment signed dance - because we claimed two payments
// back-to-back it doesn't fit into the neat walk commitment_signed_dance does.

let mut updates = get_htlc_update_msgs!(nodes[1], node_a_id);
let mut updates = get_htlc_update_msgs(&nodes[1], &node_a_id);
nodes[0].node.handle_update_fulfill_htlc(node_b_id, updates.update_fulfill_htlcs.remove(0));
expect_payment_sent(&nodes[0], payment_preimage_1, None, false, false);
nodes[0].node.handle_commitment_signed_batch_test(node_b_id, &updates.commitment_signed);
Expand All @@ -1700,7 +1699,7 @@ mod tests {

nodes[1].node.handle_revoke_and_ack(node_a_id, &as_first_raa);
check_added_monitors!(nodes[1], 1);
let mut bs_2nd_updates = get_htlc_update_msgs!(nodes[1], node_a_id);
let mut bs_2nd_updates = get_htlc_update_msgs(&nodes[1], &node_a_id);
nodes[1].node.handle_commitment_signed_batch_test(node_a_id, &as_first_update);
check_added_monitors!(nodes[1], 1);
let bs_first_raa = get_event_msg!(nodes[1], MessageSendEvent::SendRevokeAndACK, node_a_id);
Expand Down Expand Up @@ -1781,7 +1780,7 @@ mod tests {
.unwrap();
let closure_reason =
ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true), message };
check_closed_event!(&nodes[0], 1, closure_reason, false, [node_c_id], 1000000);
check_closed_event(&nodes[0], 1, closure_reason, false, &[node_c_id], 1000000);
check_closed_broadcast(&nodes[0], 1, true);
let close_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
assert_eq!(close_tx.len(), 1);
Expand All @@ -1790,7 +1789,7 @@ mod tests {
check_closed_broadcast(&nodes[2], 1, true);
check_added_monitors(&nodes[2], 1);
let closure_reason = ClosureReason::CommitmentTxConfirmed;
check_closed_event!(&nodes[2], 1, closure_reason, false, [node_a_id], 1000000);
check_closed_event(&nodes[2], 1, closure_reason, false, &[node_a_id], 1000000);

chanmon_cfgs[0].persister.chain_sync_monitor_persistences.lock().unwrap().clear();
chanmon_cfgs[2].persister.chain_sync_monitor_persistences.lock().unwrap().clear();
Expand Down
6 changes: 3 additions & 3 deletions lightning/src/ln/async_payments_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,7 @@ fn lock_in_htlc_for_static_invoice(
// The sender should lock in the held HTLC with their LSP right after receiving the static invoice.
sender.onion_messenger.handle_onion_message(om_peer, &static_invoice_om);
check_added_monitors(sender, 1);
let commitment_update = get_htlc_update_msgs!(sender, sender_lsp.node.get_our_node_id());
let commitment_update = get_htlc_update_msgs(&sender, &sender_lsp.node.get_our_node_id());
let update_add = commitment_update.update_add_htlcs[0].clone();
let payment_hash = update_add.payment_hash;
assert!(update_add.hold_htlc.is_some());
Expand Down Expand Up @@ -639,7 +639,7 @@ fn invalid_keysend_payment_secret() {
.expect_failure(HTLCHandlingFailureType::Receive { payment_hash });
do_pass_along_path(args);

let updates_2_1 = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
let updates_2_1 = get_htlc_update_msgs(&nodes[2], &nodes[1].node.get_our_node_id());
assert_eq!(updates_2_1.update_fail_malformed_htlcs.len(), 1);
let update_malformed = &updates_2_1.update_fail_malformed_htlcs[0];
assert_eq!(update_malformed.sha256_of_onion, [0; 32]);
Expand All @@ -652,7 +652,7 @@ fn invalid_keysend_payment_secret() {
.handle_update_fail_malformed_htlc(nodes[2].node.get_our_node_id(), update_malformed);
do_commitment_signed_dance(&nodes[1], &nodes[2], &updates_2_1.commitment_signed, true, false);

let updates_1_0 = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
let updates_1_0 = get_htlc_update_msgs(&nodes[1], &nodes[0].node.get_our_node_id());
assert_eq!(updates_1_0.update_fail_htlcs.len(), 1);
nodes[0].node.handle_update_fail_htlc(
nodes[1].node.get_our_node_id(),
Expand Down
12 changes: 6 additions & 6 deletions lightning/src/ln/async_signer_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -950,7 +950,7 @@ fn do_test_async_commitment_signature_ordering(monitor_update_failure: bool) {
// The rest of this is boilerplate for resolving the previous state.

nodes[0].node.handle_revoke_and_ack(node_b_id, &bs_revoke_and_ack);
let as_commitment_signed = get_htlc_update_msgs!(nodes[0], node_b_id);
let as_commitment_signed = get_htlc_update_msgs(&nodes[0], &node_b_id);
check_added_monitors!(nodes[0], 1);

nodes[0].node.handle_commitment_signed_batch_test(node_b_id, &bs_second_commitment_signed);
Expand Down Expand Up @@ -1319,9 +1319,9 @@ fn do_test_closing_signed(extra_closing_signed: bool, reconnect: bool) {
assert!(nodes[0].node.list_channels().is_empty());
assert!(nodes[1].node.list_channels().is_empty());
let reason_a = ClosureReason::LocallyInitiatedCooperativeClosure;
check_closed_event!(nodes[0], 1, reason_a, [node_b_id], 100000);
check_closed_event(&nodes[0], 1, reason_a, false, &[node_b_id], 100000);
let reason_b = ClosureReason::CounterpartyInitiatedCooperativeClosure;
check_closed_event!(nodes[1], 1, reason_b, [node_a_id], 100000);
check_closed_event(&nodes[1], 1, reason_b, false, &[node_a_id], 100000);
}

#[test]
Expand Down Expand Up @@ -1358,12 +1358,12 @@ fn test_no_disconnect_while_async_revoke_and_ack_expecting_remote_commitment_sig
nodes[1].node.send_payment_with_route(route2, payment_hash2, onion2, payment_id2).unwrap();
check_added_monitors(&nodes[1], 1);

let update = get_htlc_update_msgs!(&nodes[0], node_b_id);
let update = get_htlc_update_msgs(&nodes[0], &node_b_id);
nodes[1].node.handle_update_add_htlc(node_a_id, &update.update_add_htlcs[0]);
nodes[1].node.handle_commitment_signed_batch_test(node_a_id, &update.commitment_signed);
check_added_monitors(&nodes[1], 1);

let update = get_htlc_update_msgs!(&nodes[1], node_a_id);
let update = get_htlc_update_msgs(&nodes[1], &node_a_id);
nodes[0].node.handle_update_add_htlc(node_b_id, &update.update_add_htlcs[0]);
nodes[0].node.handle_commitment_signed_batch_test(node_b_id, &update.commitment_signed);
check_added_monitors(&nodes[0], 1);
Expand Down Expand Up @@ -1420,7 +1420,7 @@ fn test_no_disconnect_while_async_commitment_signed_expecting_remote_revoke_and_

// After processing the `update_fulfill`, they'll only be able to send `revoke_and_ack` until
// the `commitment_signed` is no longer pending.
let mut update = get_htlc_update_msgs!(&nodes[1], node_a_id);
let mut update = get_htlc_update_msgs(&nodes[1], &node_a_id);
nodes[0].node.handle_update_fulfill_htlc(node_b_id, update.update_fulfill_htlcs.remove(0));
expect_payment_sent(&nodes[0], preimage, None, false, false);
nodes[0].node.handle_commitment_signed_batch_test(node_b_id, &update.commitment_signed);
Expand Down
Loading