@@ -2203,7 +2203,7 @@ fn channel_monitor_network_test() {
2203
2203
send_payment(&nodes[0], &vec!(&nodes[1], &nodes[2], &nodes[3], &nodes[4])[..], 8000000);
2204
2204
2205
2205
// Simple case with no pending HTLCs:
2206
- nodes[1].node.force_close_channel (&chan_1.2, &nodes[0].node.get_our_node_id()).unwrap();
2206
+ nodes[1].node.force_close_broadcasting_latest_txn (&chan_1.2, &nodes[0].node.get_our_node_id()).unwrap();
2207
2207
check_added_monitors!(nodes[1], 1);
2208
2208
check_closed_broadcast!(nodes[1], true);
2209
2209
{
@@ -2224,7 +2224,7 @@ fn channel_monitor_network_test() {
2224
2224
2225
2225
// Simple case of one pending HTLC to HTLC-Timeout (note that the HTLC-Timeout is not
2226
2226
// broadcasted until we reach the timelock time).
2227
- nodes[1].node.force_close_channel (&chan_2.2, &nodes[2].node.get_our_node_id()).unwrap();
2227
+ nodes[1].node.force_close_broadcasting_latest_txn (&chan_2.2, &nodes[2].node.get_our_node_id()).unwrap();
2228
2228
check_closed_broadcast!(nodes[1], true);
2229
2229
check_added_monitors!(nodes[1], 1);
2230
2230
{
@@ -2264,7 +2264,7 @@ fn channel_monitor_network_test() {
2264
2264
2265
2265
// nodes[3] gets the preimage, but nodes[2] already disconnected, resulting in a nodes[2]
2266
2266
// HTLC-Timeout and a nodes[3] claim against it (+ its own announces)
2267
- nodes[2].node.force_close_channel (&chan_3.2, &nodes[3].node.get_our_node_id()).unwrap();
2267
+ nodes[2].node.force_close_broadcasting_latest_txn (&chan_3.2, &nodes[3].node.get_our_node_id()).unwrap();
2268
2268
check_added_monitors!(nodes[2], 1);
2269
2269
check_closed_broadcast!(nodes[2], true);
2270
2270
let node2_commitment_txid;
@@ -3403,7 +3403,7 @@ fn test_htlc_ignore_latest_remote_commitment() {
3403
3403
create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
3404
3404
3405
3405
route_payment(&nodes[0], &[&nodes[1]], 10000000);
3406
- nodes[0].node.force_close_channel (&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
3406
+ nodes[0].node.force_close_broadcasting_latest_txn (&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
3407
3407
connect_blocks(&nodes[0], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + 1);
3408
3408
check_closed_broadcast!(nodes[0], true);
3409
3409
check_added_monitors!(nodes[0], 1);
@@ -3466,7 +3466,7 @@ fn test_force_close_fail_back() {
3466
3466
// state or updated nodes[1]' state. Now force-close and broadcast that commitment/HTLC
3467
3467
// transaction and ensure nodes[1] doesn't fail-backwards (this was originally a bug!).
3468
3468
3469
- nodes[2].node.force_close_channel (&payment_event.commitment_msg.channel_id, &nodes[1].node.get_our_node_id()).unwrap();
3469
+ nodes[2].node.force_close_broadcasting_latest_txn (&payment_event.commitment_msg.channel_id, &nodes[1].node.get_our_node_id()).unwrap();
3470
3470
check_closed_broadcast!(nodes[2], true);
3471
3471
check_added_monitors!(nodes[2], 1);
3472
3472
check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed);
@@ -4793,7 +4793,7 @@ fn test_claim_sizeable_push_msat() {
4793
4793
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
4794
4794
4795
4795
let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 98_000_000, InitFeatures::known(), InitFeatures::known());
4796
- nodes[1].node.force_close_channel (&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
4796
+ nodes[1].node.force_close_broadcasting_latest_txn (&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
4797
4797
check_closed_broadcast!(nodes[1], true);
4798
4798
check_added_monitors!(nodes[1], 1);
4799
4799
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
@@ -4822,7 +4822,7 @@ fn test_claim_on_remote_sizeable_push_msat() {
4822
4822
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
4823
4823
4824
4824
let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 98_000_000, InitFeatures::known(), InitFeatures::known());
4825
- nodes[0].node.force_close_channel (&chan.2, &nodes[1].node.get_our_node_id()).unwrap();
4825
+ nodes[0].node.force_close_broadcasting_latest_txn (&chan.2, &nodes[1].node.get_our_node_id()).unwrap();
4826
4826
check_closed_broadcast!(nodes[0], true);
4827
4827
check_added_monitors!(nodes[0], 1);
4828
4828
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
@@ -8365,7 +8365,7 @@ fn test_manually_accept_inbound_channel_request() {
8365
8365
_ => panic!("Unexpected event"),
8366
8366
}
8367
8367
8368
- nodes[1].node.force_close_channel (&temp_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
8368
+ nodes[1].node.force_close_broadcasting_latest_txn (&temp_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
8369
8369
8370
8370
let close_msg_ev = nodes[1].node.get_and_clear_pending_msg_events();
8371
8371
assert_eq!(close_msg_ev.len(), 1);
@@ -8400,7 +8400,7 @@ fn test_manually_reject_inbound_channel_request() {
8400
8400
let events = nodes[1].node.get_and_clear_pending_events();
8401
8401
match events[0] {
8402
8402
Event::OpenChannelRequest { temporary_channel_id, .. } => {
8403
- nodes[1].node.force_close_channel (&temporary_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
8403
+ nodes[1].node.force_close_broadcasting_latest_txn (&temporary_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
8404
8404
}
8405
8405
_ => panic!("Unexpected event"),
8406
8406
}
@@ -9053,7 +9053,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
9053
9053
force_closing_node = 1;
9054
9054
counterparty_node = 0;
9055
9055
}
9056
- nodes[force_closing_node].node.force_close_channel (&chan_ab.2, &nodes[counterparty_node].node.get_our_node_id()).unwrap();
9056
+ nodes[force_closing_node].node.force_close_broadcasting_latest_txn (&chan_ab.2, &nodes[counterparty_node].node.get_our_node_id()).unwrap();
9057
9057
check_closed_broadcast!(nodes[force_closing_node], true);
9058
9058
check_added_monitors!(nodes[force_closing_node], 1);
9059
9059
check_closed_event!(nodes[force_closing_node], 1, ClosureReason::HolderForceClosed);
@@ -9489,7 +9489,7 @@ fn do_test_tx_confirmed_skipping_blocks_immediate_broadcast(test_height_before_t
9489
9489
nodes[1].node.peer_disconnected(&nodes[2].node.get_our_node_id(), false);
9490
9490
nodes[2].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
9491
9491
9492
- nodes[1].node.force_close_channel (&channel_id, &nodes[2].node.get_our_node_id()).unwrap();
9492
+ nodes[1].node.force_close_broadcasting_latest_txn (&channel_id, &nodes[2].node.get_our_node_id()).unwrap();
9493
9493
check_closed_broadcast!(nodes[1], true);
9494
9494
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
9495
9495
check_added_monitors!(nodes[1], 1);
0 commit comments