@@ -56,8 +56,8 @@ use crate::ln::channel_state::ChannelDetails;
56
56
use crate::types::features::{Bolt12InvoiceFeatures, ChannelFeatures, ChannelTypeFeatures, InitFeatures, NodeFeatures};
57
57
#[cfg(any(feature = "_test_utils", test))]
58
58
use crate::types::features::Bolt11InvoiceFeatures;
59
- use crate::routing::router::{BlindedTail, InFlightHtlcs, Path, Payee, PaymentParameters, RouteParameters, RouteParametersConfig, Router, FixedRouter, Route };
60
- use crate::ln::onion_payment::{check_incoming_htlc_cltv, create_recv_pending_htlc_info, create_fwd_pending_htlc_info, decode_incoming_update_add_htlc_onion, HopConnector, InboundHTLCErr, NextPacketDetails, invalid_payment_err_data };
59
+ use crate::routing::router::{BlindedTail, FixedRouter, InFlightHtlcs, Path, Payee, PaymentParameters, Route, RouteHop, RouteParameters, RouteParametersConfig, Router };
60
+ use crate::ln::onion_payment::{check_incoming_htlc_cltv, create_recv_pending_htlc_info, create_fwd_pending_htlc_info, decode_incoming_update_add_htlc_onion, HopConnector, InboundHTLCErr, invalid_payment_err_data, NextPacketDetails };
61
61
use crate::ln::msgs;
62
62
use crate::ln::onion_utils::{self};
63
63
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
@@ -626,10 +626,17 @@ impl Readable for InterceptId {
626
626
}
627
627
628
628
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
629
+ pub(crate) struct PreviousHopIdData {
630
+ short_channel_id: u64,
631
+ htlc_id: u64,
632
+ }
633
+
634
+ #[derive(Clone, Debug, PartialEq, Eq, Hash)]
629
635
/// Uniquely describes an HTLC by its source. Just the guaranteed-unique subset of [`HTLCSource`].
630
636
pub(crate) enum SentHTLCId {
631
637
PreviousHopData { short_channel_id: u64, htlc_id: u64 },
632
638
OutboundRoute { session_priv: [u8; SECRET_KEY_SIZE] },
639
+ TrampolineForward { session_priv: [u8; SECRET_KEY_SIZE], previous_hop_data: Vec<PreviousHopIdData> }
633
640
}
634
641
impl SentHTLCId {
635
642
pub(crate) fn from_source(source: &HTLCSource) -> Self {
@@ -640,9 +647,20 @@ impl SentHTLCId {
640
647
},
641
648
HTLCSource::OutboundRoute { session_priv, .. } =>
642
649
Self::OutboundRoute { session_priv: session_priv.secret_bytes() },
650
+ HTLCSource::TrampolineForward { previous_hop_data, session_priv, .. } => Self::TrampolineForward {
651
+ session_priv: session_priv.secret_bytes(),
652
+ previous_hop_data: previous_hop_data.iter().map(|hop_data| PreviousHopIdData {
653
+ short_channel_id: hop_data.short_channel_id,
654
+ htlc_id: hop_data.htlc_id,
655
+ }).collect(),
656
+ },
643
657
}
644
658
}
645
659
}
660
+ impl_writeable_tlv_based!(PreviousHopIdData, {
661
+ (0, short_channel_id, required),
662
+ (2, htlc_id, required),
663
+ });
646
664
impl_writeable_tlv_based_enum!(SentHTLCId,
647
665
(0, PreviousHopData) => {
648
666
(0, short_channel_id, required),
@@ -651,6 +669,10 @@ impl_writeable_tlv_based_enum!(SentHTLCId,
651
669
(2, OutboundRoute) => {
652
670
(0, session_priv, required),
653
671
},
672
+ (4, TrampolineForward) => {
673
+ (0, session_priv, required),
674
+ (2, previous_hop_data, required_vec),
675
+ },
654
676
);
655
677
656
678
mod fuzzy_channelmanager {
@@ -661,6 +683,16 @@ mod fuzzy_channelmanager {
661
683
#[derive(Clone, Debug, PartialEq, Eq)]
662
684
pub enum HTLCSource {
663
685
PreviousHopData(HTLCPreviousHopData),
686
+ TrampolineForward {
687
+ /// We might be forwarding an incoming payment that was received over MPP, and therefore
688
+ /// need to store the vector of corresponding `HTLCPreviousHopData` values.
689
+ previous_hop_data: Vec<HTLCPreviousHopData>,
690
+ incoming_trampoline_shared_secret: [u8; 32],
691
+ hops: Vec<RouteHop>,
692
+ /// In order to decode inter-Trampoline errors, we need to store the session_priv key
693
+ /// given we're effectively creating new outbound routes.
694
+ session_priv: SecretKey,
695
+ },
664
696
OutboundRoute {
665
697
path: Path,
666
698
session_priv: SecretKey,
@@ -712,6 +744,13 @@ impl core::hash::Hash for HTLCSource {
712
744
payment_id.hash(hasher);
713
745
first_hop_htlc_msat.hash(hasher);
714
746
},
747
+ HTLCSource::TrampolineForward { previous_hop_data, incoming_trampoline_shared_secret, hops, session_priv } => {
748
+ 2u8.hash(hasher);
749
+ previous_hop_data.hash(hasher);
750
+ incoming_trampoline_shared_secret.hash(hasher);
751
+ hops.hash(hasher);
752
+ session_priv[..].hash(hasher);
753
+ },
715
754
}
716
755
}
717
756
}
@@ -6999,7 +7038,7 @@ where
6999
7038
// Note that we MUST NOT end up calling methods on self.chain_monitor here - we're called
7000
7039
// from block_connected which may run during initialization prior to the chain_monitor
7001
7040
// being fully configured. See the docs for `ChannelManagerReadArgs` for more.
7002
- let mut push_forward_event;
7041
+ let mut push_forward_event = true ;
7003
7042
match source {
7004
7043
HTLCSource::OutboundRoute { ref path, ref session_priv, ref payment_id, .. } => {
7005
7044
push_forward_event = self.pending_outbound_payments.fail_htlc(source, payment_hash, onion_error, path,
@@ -7056,6 +7095,65 @@ where
7056
7095
failed_next_destination: destination,
7057
7096
}, None));
7058
7097
},
7098
+ HTLCSource::TrampolineForward { previous_hop_data, incoming_trampoline_shared_secret, .. } => {
7099
+ // todo: what do we want to do with this given we do not wish to propagate it directly?
7100
+ let _decoded_onion_failure = onion_error.decode_onion_failure(&self.secp_ctx, &self.logger, &source);
7101
+
7102
+ for current_hop_data in previous_hop_data {
7103
+ let incoming_packet_shared_secret = current_hop_data.incoming_packet_shared_secret;
7104
+ let channel_id = current_hop_data.channel_id;
7105
+ let short_channel_id = current_hop_data.short_channel_id;
7106
+ let htlc_id = current_hop_data.htlc_id;
7107
+ let blinded_failure = current_hop_data.blinded_failure;
7108
+ log_trace!(
7109
+ WithContext::from(&self.logger, None, Some(channel_id), Some(*payment_hash)),
7110
+ "Failing {}HTLC with payment_hash {} backwards from us following Trampoline forwarding failure: {:?}",
7111
+ if blinded_failure.is_some() { "blinded " } else { "" }, &payment_hash, onion_error
7112
+ );
7113
+ let failure = match blinded_failure {
7114
+ Some(BlindedFailure::FromIntroductionNode) => {
7115
+ let blinded_onion_error = HTLCFailReason::reason(LocalHTLCFailureReason::InvalidOnionBlinding, vec![0; 32]);
7116
+ let err_packet = blinded_onion_error.get_encrypted_failure_packet(
7117
+ &incoming_packet_shared_secret, &Some(incoming_trampoline_shared_secret.clone())
7118
+ );
7119
+ HTLCForwardInfo::FailHTLC { htlc_id, err_packet }
7120
+ },
7121
+ Some(BlindedFailure::FromBlindedNode) => {
7122
+ HTLCForwardInfo::FailMalformedHTLC {
7123
+ htlc_id,
7124
+ failure_code: LocalHTLCFailureReason::InvalidOnionBlinding.failure_code(),
7125
+ sha256_of_onion: [0; 32]
7126
+ }
7127
+ },
7128
+ None => {
7129
+ let err_packet = HTLCFailReason::reason(LocalHTLCFailureReason::TemporaryTrampolineFailure, Vec::new())
7130
+ .get_encrypted_failure_packet(&incoming_packet_shared_secret, &Some(incoming_trampoline_shared_secret.clone()));
7131
+ HTLCForwardInfo::FailHTLC { htlc_id, err_packet }
7132
+ }
7133
+ };
7134
+
7135
+ push_forward_event = self.decode_update_add_htlcs.lock().unwrap().is_empty();
7136
+ let mut forward_htlcs = self.forward_htlcs.lock().unwrap();
7137
+ push_forward_event &= forward_htlcs.is_empty();
7138
+
7139
+ match forward_htlcs.entry(short_channel_id) {
7140
+ hash_map::Entry::Occupied(mut entry) => {
7141
+ entry.get_mut().push(failure);
7142
+ },
7143
+ hash_map::Entry::Vacant(entry) => {
7144
+ entry.insert(vec!(failure));
7145
+ }
7146
+ }
7147
+
7148
+ mem::drop(forward_htlcs);
7149
+
7150
+ let mut pending_events = self.pending_events.lock().unwrap();
7151
+ pending_events.push_back((events::Event::HTLCHandlingFailed {
7152
+ prev_channel_id: channel_id,
7153
+ failed_next_destination: destination.clone(),
7154
+ }, None));
7155
+ }
7156
+ },
7059
7157
}
7060
7158
push_forward_event
7061
7159
}
@@ -7514,6 +7612,63 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
7514
7612
}
7515
7613
});
7516
7614
},
7615
+ HTLCSource::TrampolineForward { previous_hop_data, .. } => {
7616
+ for current_previous_hop_data in previous_hop_data {
7617
+ let prev_channel_id = current_previous_hop_data.channel_id;
7618
+ let prev_user_channel_id = current_previous_hop_data.user_channel_id;
7619
+ let prev_node_id = current_previous_hop_data.counterparty_node_id;
7620
+ let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(¤t_previous_hop_data);
7621
+ self.claim_funds_from_hop(current_previous_hop_data, payment_preimage, None,
7622
+ |htlc_claim_value_msat, definitely_duplicate| {
7623
+ let chan_to_release = Some(EventUnblockedChannel {
7624
+ counterparty_node_id: next_channel_counterparty_node_id,
7625
+ funding_txo: next_channel_outpoint,
7626
+ channel_id: next_channel_id,
7627
+ blocking_action: completed_blocker,
7628
+ });
7629
+
7630
+ if definitely_duplicate && startup_replay {
7631
+ // On startup we may get redundant claims which are related to
7632
+ // monitor updates still in flight. In that case, we shouldn't
7633
+ // immediately free, but instead let that monitor update complete
7634
+ // in the background.
7635
+ (None, None)
7636
+ } else if definitely_duplicate {
7637
+ if let Some(other_chan) = chan_to_release {
7638
+ (Some(MonitorUpdateCompletionAction::FreeOtherChannelImmediately {
7639
+ downstream_counterparty_node_id: other_chan.counterparty_node_id,
7640
+ downstream_funding_outpoint: other_chan.funding_txo,
7641
+ downstream_channel_id: other_chan.channel_id,
7642
+ blocking_action: other_chan.blocking_action,
7643
+ }), None)
7644
+ } else { (None, None) }
7645
+ } else {
7646
+ let total_fee_earned_msat = if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
7647
+ if let Some(claimed_htlc_value) = htlc_claim_value_msat {
7648
+ Some(claimed_htlc_value - forwarded_htlc_value)
7649
+ } else { None }
7650
+ } else { None };
7651
+ debug_assert!(skimmed_fee_msat <= total_fee_earned_msat,
7652
+ "skimmed_fee_msat must always be included in total_fee_earned_msat");
7653
+ (Some(MonitorUpdateCompletionAction::EmitEventAndFreeOtherChannel {
7654
+ event: events::Event::PaymentForwarded {
7655
+ prev_channel_id: Some(prev_channel_id),
7656
+ next_channel_id: Some(next_channel_id),
7657
+ prev_user_channel_id,
7658
+ next_user_channel_id,
7659
+ prev_node_id,
7660
+ next_node_id: Some(next_channel_counterparty_node_id),
7661
+ total_fee_earned_msat,
7662
+ skimmed_fee_msat,
7663
+ claim_from_onchain_tx: from_onchain,
7664
+ outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
7665
+ },
7666
+ downstream_counterparty_and_funding_outpoint: chan_to_release,
7667
+ }), None)
7668
+ }
7669
+ });
7670
+ }
7671
+ },
7517
7672
}
7518
7673
}
7519
7674
@@ -13163,6 +13318,24 @@ impl Readable for HTLCSource {
13163
13318
})
13164
13319
}
13165
13320
1 => Ok(HTLCSource::PreviousHopData(Readable::read(reader)?)),
13321
+ 2 => {
13322
+ let mut previous_hop_data = Vec::new();
13323
+ let mut incoming_trampoline_shared_secret: crate::util::ser::RequiredWrapper<[u8; 32]> = crate::util::ser::RequiredWrapper(None);
13324
+ let mut session_priv: crate::util::ser::RequiredWrapper<SecretKey> = crate::util::ser::RequiredWrapper(None);
13325
+ let mut hops = Vec::new();
13326
+ read_tlv_fields!(reader, {
13327
+ (0, previous_hop_data, required_vec),
13328
+ (2, incoming_trampoline_shared_secret, required),
13329
+ (4, session_priv, required),
13330
+ (6, hops, required_vec),
13331
+ });
13332
+ Ok(HTLCSource::TrampolineForward {
13333
+ previous_hop_data,
13334
+ incoming_trampoline_shared_secret: incoming_trampoline_shared_secret.0.unwrap(),
13335
+ hops,
13336
+ session_priv: session_priv.0.unwrap(),
13337
+ })
13338
+ },
13166
13339
_ => Err(DecodeError::UnknownRequiredFeature),
13167
13340
}
13168
13341
}
@@ -13188,6 +13361,17 @@ impl Writeable for HTLCSource {
13188
13361
1u8.write(writer)?;
13189
13362
field.write(writer)?;
13190
13363
}
13364
+ HTLCSource::TrampolineForward { previous_hop_data: previous_hop_data_ref, ref incoming_trampoline_shared_secret, ref session_priv, hops: hops_ref } => {
13365
+ 2u8.write(writer)?;
13366
+ let previous_hop_data = previous_hop_data_ref.clone();
13367
+ let hops = hops_ref.clone();
13368
+ write_tlv_fields!(writer, {
13369
+ (0, previous_hop_data, required_vec),
13370
+ (2, incoming_trampoline_shared_secret, required),
13371
+ (4, session_priv, required),
13372
+ (6, hops, required_vec),
13373
+ });
13374
+ }
13191
13375
}
13192
13376
Ok(())
13193
13377
}
@@ -14368,6 +14552,55 @@ where
14368
14552
} else { true }
14369
14553
});
14370
14554
},
14555
+ HTLCSource::TrampolineForward { previous_hop_data, .. } => {
14556
+ for current_previous_hop_data in previous_hop_data {
14557
+ let pending_forward_matches_htlc = |info: &PendingAddHTLCInfo| {
14558
+ info.prev_funding_outpoint == current_previous_hop_data.outpoint &&
14559
+ info.prev_htlc_id == current_previous_hop_data.htlc_id
14560
+ };
14561
+ // The ChannelMonitor is now responsible for this HTLC's
14562
+ // failure/success and will let us know what its outcome is. If we
14563
+ // still have an entry for this HTLC in `forward_htlcs` or
14564
+ // `pending_intercepted_htlcs`, we were apparently not persisted after
14565
+ // the monitor was when forwarding the payment.
14566
+ decode_update_add_htlcs.retain(|scid, update_add_htlcs| {
14567
+ update_add_htlcs.retain(|update_add_htlc| {
14568
+ let matches = *scid == current_previous_hop_data.short_channel_id &&
14569
+ update_add_htlc.htlc_id == current_previous_hop_data.htlc_id;
14570
+ if matches {
14571
+ log_info!(logger, "Removing pending to-decode HTLC with hash {} as it was forwarded to the closed channel {}",
14572
+ &htlc.payment_hash, &monitor.channel_id());
14573
+ }
14574
+ !matches
14575
+ });
14576
+ !update_add_htlcs.is_empty()
14577
+ });
14578
+ forward_htlcs.retain(|_, forwards| {
14579
+ forwards.retain(|forward| {
14580
+ if let HTLCForwardInfo::AddHTLC(htlc_info) = forward {
14581
+ if pending_forward_matches_htlc(&htlc_info) {
14582
+ log_info!(logger, "Removing pending to-forward HTLC with hash {} as it was forwarded to the closed channel {}",
14583
+ &htlc.payment_hash, &monitor.channel_id());
14584
+ false
14585
+ } else { true }
14586
+ } else { true }
14587
+ });
14588
+ !forwards.is_empty()
14589
+ });
14590
+ pending_intercepted_htlcs.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
14591
+ if pending_forward_matches_htlc(&htlc_info) {
14592
+ log_info!(logger, "Removing pending intercepted HTLC with hash {} as it was forwarded to the closed channel {}",
14593
+ &htlc.payment_hash, &monitor.channel_id());
14594
+ pending_events_read.retain(|(event, _)| {
14595
+ if let Event::HTLCIntercepted { intercept_id: ev_id, .. } = event {
14596
+ intercepted_id != ev_id
14597
+ } else { true }
14598
+ });
14599
+ false
14600
+ } else { true }
14601
+ });
14602
+ }
14603
+ }
14371
14604
HTLCSource::OutboundRoute { payment_id, session_priv, path, .. } => {
14372
14605
if let Some(preimage) = preimage_opt {
14373
14606
let pending_events = Mutex::new(pending_events_read);
0 commit comments