@@ -735,15 +735,34 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
735735 /// Note that this writes roughly one line per channel for which we have a liquidity estimate,
736736 /// which may be a substantial amount of log output.
737737 pub fn debug_log_liquidity_stats ( & self ) {
738+ let now = T :: now ( ) ;
739+
738740 let graph = self . network_graph . read_only ( ) ;
739741 for ( scid, liq) in self . channel_liquidities . iter ( ) {
740742 if let Some ( chan_debug) = graph. channels ( ) . get ( scid) {
741743 let log_direction = |source, target| {
742744 if let Some ( ( directed_info, _) ) = chan_debug. as_directed_to ( target) {
743745 let amt = directed_info. effective_capacity ( ) . as_msat ( ) ;
744746 let dir_liq = liq. as_directed ( source, target, amt, & self . params ) ;
745- log_debug ! ( self . logger, "Liquidity from {:?} to {:?} via {} is in the range ({}, {})" ,
746- source, target, scid, dir_liq. min_liquidity_msat( ) , dir_liq. max_liquidity_msat( ) ) ;
747+
748+ let buckets = HistoricalMinMaxBuckets {
749+ min_liquidity_offset_history : & dir_liq. min_liquidity_offset_history ,
750+ max_liquidity_offset_history : & dir_liq. max_liquidity_offset_history ,
751+ } ;
752+ let ( min_buckets, max_buckets, _) = buckets. get_decayed_buckets ( now,
753+ * dir_liq. last_updated , self . params . historical_no_updates_half_life ) ;
754+
755+ log_debug ! ( self . logger, core:: concat!(
756+ "Liquidity from {} to {} via {} is in the range ({}, {}).\n " ,
757+ "\t Historical min liquidity octile relative probabilities: {} {} {} {} {} {} {} {}\n " ,
758+ "\t Historical max liquidity octile relative probabilities: {} {} {} {} {} {} {} {}" ) ,
759+ source, target, scid, dir_liq. min_liquidity_msat( ) , dir_liq. max_liquidity_msat( ) ,
760+ min_buckets[ 0 ] , min_buckets[ 1 ] , min_buckets[ 2 ] , min_buckets[ 3 ] ,
761+ min_buckets[ 4 ] , min_buckets[ 5 ] , min_buckets[ 6 ] , min_buckets[ 7 ] ,
762+ // Note that the liquidity buckets are an offset from the edge, so we
763+ // inverse the max order to get the probabilities from zero.
764+ max_buckets[ 7 ] , max_buckets[ 6 ] , max_buckets[ 5 ] , max_buckets[ 4 ] ,
765+ max_buckets[ 3 ] , max_buckets[ 2 ] , max_buckets[ 1 ] , max_buckets[ 0 ] ) ;
747766 } else {
748767 log_debug ! ( self . logger, "No amount known for SCID {} from {:?} to {:?}" , scid, source, target) ;
749768 }
@@ -774,6 +793,53 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
774793 None
775794 }
776795
796+ /// Query the historical estimated minimum and maximum liquidity available for sending a
797+ /// payment over the channel with `scid` towards the given `target` node.
798+ ///
799+ /// Returns two sets of 8 buckets. The first set describes the octiles for lower-bound
800+ /// liquidity estimates, the second set describes the octiles for upper-bound liquidity
801+ /// estimates. Each bucket describes the relative frequency at which we've seen a liquidity
802+ /// bound in the octile relative to the channel's total capacity, on an arbitrary scale.
803+ /// Because the values are slowly decayed, more recent data points are weighted more heavily
804+ /// than older datapoints.
805+ ///
806+ /// When scoring, the estimated probability that an upper-/lower-bound lies in a given octile
807+ /// relative to the channel's total capacity is calculated by dividing that bucket's value with
808+ /// the total of all buckets for the given bound.
809+ ///
810+ /// For example, a value of `[0, 0, 0, 0, 0, 0, 32]` indicates that we belive the probability
811+ /// of a bound being in the top octile to be 100%, and have never (recently) seen it in any
812+ /// other octiles. A value of `[31, 0, 0, 0, 0, 0, 0, 32]` indicates we've seen the bound being
813+ /// both in the top and bottom octile, and roughly with similar (recent) frequency.
814+ ///
815+ /// Because the datapoints are decayed slowly over time, values will eventually return to
816+ /// `Some(([0; 8], [0; 8]))`.
817+ pub fn historical_estimated_channel_liquidity_probabilities ( & self , scid : u64 , target : & NodeId )
818+ -> Option < ( [ u16 ; 8 ] , [ u16 ; 8 ] ) > {
819+ let graph = self . network_graph . read_only ( ) ;
820+
821+ if let Some ( chan) = graph. channels ( ) . get ( & scid) {
822+ if let Some ( liq) = self . channel_liquidities . get ( & scid) {
823+ if let Some ( ( directed_info, source) ) = chan. as_directed_to ( target) {
824+ let amt = directed_info. effective_capacity ( ) . as_msat ( ) ;
825+ let dir_liq = liq. as_directed ( source, target, amt, & self . params ) ;
826+
827+ let buckets = HistoricalMinMaxBuckets {
828+ min_liquidity_offset_history : & dir_liq. min_liquidity_offset_history ,
829+ max_liquidity_offset_history : & dir_liq. max_liquidity_offset_history ,
830+ } ;
831+ let ( min_buckets, mut max_buckets, _) = buckets. get_decayed_buckets ( T :: now ( ) ,
832+ * dir_liq. last_updated , self . params . historical_no_updates_half_life ) ;
833+ // Note that the liquidity buckets are an offset from the edge, so we inverse
834+ // the max order to get the probabilities from zero.
835+ max_buckets. reverse ( ) ;
836+ return Some ( ( min_buckets, max_buckets) ) ;
837+ }
838+ }
839+ }
840+ None
841+ }
842+
777843 /// Marks the node with the given `node_id` as banned, i.e.,
778844 /// it will be avoided during path finding.
779845 pub fn add_banned ( & mut self , node_id : & NodeId ) {
@@ -2688,19 +2754,32 @@ mod tests {
26882754 } ;
26892755 // With no historical data the normal liquidity penalty calculation is used.
26902756 assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 47 ) ;
2757+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2758+ None ) ;
26912759
26922760 scorer. payment_path_failed ( & payment_path_for_amount ( 1 ) . iter ( ) . collect :: < Vec < _ > > ( ) , 42 ) ;
26932761 assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 2048 ) ;
2762+ // The "it failed" increment is 32, where the probability should lie fully in the first
2763+ // octile.
2764+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2765+ Some ( ( [ 32 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ] , [ 32 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ] ) ) ) ;
26942766
26952767 // Even after we tell the scorer we definitely have enough available liquidity, it will
26962768 // still remember that there was some failure in the past, and assign a non-0 penalty.
26972769 scorer. payment_path_failed ( & payment_path_for_amount ( 1000 ) . iter ( ) . collect :: < Vec < _ > > ( ) , 43 ) ;
26982770 assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 198 ) ;
2771+ // The first octile should be decayed just slightly and the last octile has a new point.
2772+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2773+ Some ( ( [ 31 , 0 , 0 , 0 , 0 , 0 , 0 , 32 ] , [ 31 , 0 , 0 , 0 , 0 , 0 , 0 , 32 ] ) ) ) ;
26992774
27002775 // Advance the time forward 16 half-lives (which the docs claim will ensure all data is
27012776 // gone), and check that we're back to where we started.
27022777 SinceEpoch :: advance ( Duration :: from_secs ( 10 * 16 ) ) ;
27032778 assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 47 ) ;
2779+ // Once fully decayed we still have data, but its all-0s. In the future we may remove the
2780+ // data entirely instead.
2781+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2782+ Some ( ( [ 0 ; 8 ] , [ 0 ; 8 ] ) ) ) ;
27042783 }
27052784
27062785 #[ test]
0 commit comments