From b1d0289c935e2c0d318fa486e70aa1687697eab2 Mon Sep 17 00:00:00 2001 From: JesusEV Date: Tue, 2 Jul 2024 18:48:53 +0200 Subject: [PATCH] Remove overloaded compute_gradient function --- models/eprop_iaf.cpp | 75 ++----------------------- models/eprop_iaf.h | 11 +--- models/eprop_iaf_adapt.cpp | 77 ++------------------------ models/eprop_iaf_adapt.h | 11 +--- models/eprop_iaf_psc_delta.cpp | 75 ++----------------------- models/eprop_iaf_psc_delta.h | 11 +--- models/eprop_readout.cpp | 68 ++--------------------- models/eprop_readout.h | 11 +--- models/eprop_synapse.h | 12 +--- nestkernel/eprop_archiving_node.h | 18 ++++++ nestkernel/eprop_archiving_node_impl.h | 41 ++++++++++++++ nestkernel/node.cpp | 15 +---- nestkernel/node.h | 17 +----- 13 files changed, 83 insertions(+), 359 deletions(-) diff --git a/models/eprop_iaf.cpp b/models/eprop_iaf.cpp index ca37f77c9a9..2ea24569ed3 100644 --- a/models/eprop_iaf.cpp +++ b/models/eprop_iaf.cpp @@ -415,6 +415,7 @@ eprop_iaf::handle( DataLoggingRequest& e ) void eprop_iaf::compute_gradient( const long t_spike, const long t_spike_previous, + std::queue< double >& z_previous_buffer, double& z_previous, double& z_bar, double& e_bar, @@ -433,87 +434,19 @@ eprop_iaf::compute_gradient( const long t_spike, const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp ); const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_; - auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 ); - - const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike ); - - for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) - { - z = z_previous; - z_previous = z_current; - z_current = 0.0; - - psi = eprop_hist_it->surrogate_gradient_; - L = eprop_hist_it->learning_signal_; - - z_bar = V_.P_v_m_ * z_bar + V_.P_z_in_ * z; - e = psi * z_bar; - e_bar = P_.kappa_ * e_bar + ( 1.0 - P_.kappa_ ) * e; - - if ( optimize_each_step ) - { - grad = L * e_bar; - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t, grad, weight ); - } - else - { - grad += L * e_bar; - } - } - - if ( not optimize_each_step ) - { - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t_compute_until, grad, weight ); - } - - const int power = t_spike - ( t_spike_previous + P_.eprop_isi_trace_cutoff_ ); - - if ( power > 0 ) - { - z_bar *= std::pow( V_.P_v_m_, power ); - e_bar *= std::pow( P_.kappa_, power ); - } -} - -void -eprop_iaf::compute_gradient( const long t_spike, - const long t_spike_previous, - std::queue< double >& z_previous_buffer, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) -{ - double e = 0.0; // eligibility trace - double z = 0.0; // spiking variable - double psi = 0.0; // surrogate gradient - double L = 0.0; // learning signal - double grad = 0.0; // gradient - - const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp ); - const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_; - auto eprop_hist_it = get_eprop_history( t_spike_previous - P_.delay_total_ ); const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike ); for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) { - if ( !z_previous_buffer.empty() ) - { - z = z_previous_buffer.front(); - z_previous_buffer.pop(); - } - - if ( t_spike - t > 1 ) + if ( P_.delay_total_ > 1 ) { - z_previous_buffer.push( 0.0 ); + update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } else { - z_previous_buffer.push( 1.0 ); + update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } psi = eprop_hist_it->surrogate_gradient_; diff --git a/models/eprop_iaf.h b/models/eprop_iaf.h index fa166f53fb3..461f3253503 100644 --- a/models/eprop_iaf.h +++ b/models/eprop_iaf.h @@ -328,19 +328,10 @@ class eprop_iaf : public EpropArchivingNodeRecurrent void update( Time const&, const long, const long ) override; - void compute_gradient( const long t_spike, - const long t_spike_previous, - double& z_previous, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) override; - void compute_gradient( const long t_spike, const long t_spike_previous, std::queue< double >& z_previous_buffer, + double& z_previous, double& z_bar, double& e_bar, double& epsilon, diff --git a/models/eprop_iaf_adapt.cpp b/models/eprop_iaf_adapt.cpp index e05f1ebd5da..61dffc255c9 100644 --- a/models/eprop_iaf_adapt.cpp +++ b/models/eprop_iaf_adapt.cpp @@ -453,6 +453,7 @@ eprop_iaf_adapt::handle( DataLoggingRequest& e ) void eprop_iaf_adapt::compute_gradient( const long t_spike, const long t_spike_previous, + std::queue< double >& z_previous_buffer, double& z_previous, double& z_bar, double& e_bar, @@ -471,89 +472,19 @@ eprop_iaf_adapt::compute_gradient( const long t_spike, const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp ); const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_; - auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 ); - - const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike ); - - for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) - { - z = z_previous; - z_previous = z_current; - z_current = 0.0; - - psi = eprop_hist_it->surrogate_gradient_; - L = eprop_hist_it->learning_signal_; - - z_bar = V_.P_v_m_ * z_bar + V_.P_z_in_ * z; - e = psi * ( z_bar - P_.adapt_beta_ * epsilon ); - epsilon = V_.P_adapt_ * epsilon + e; - e_bar = P_.kappa_ * e_bar + ( 1.0 - P_.kappa_ ) * e; - - if ( optimize_each_step ) - { - grad = L * e_bar; - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t, grad, weight ); - } - else - { - grad += L * e_bar; - } - } - - if ( not optimize_each_step ) - { - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t_compute_until, grad, weight ); - } - - const int power = t_spike - ( t_spike_previous + P_.eprop_isi_trace_cutoff_ ); - - if ( power > 0 ) - { - z_bar *= std::pow( V_.P_v_m_, power ); - e_bar *= std::pow( P_.kappa_, power ); - epsilon *= std::pow( V_.P_adapt_, power ); - } -} - -void -eprop_iaf_adapt::compute_gradient( const long t_spike, - const long t_spike_previous, - std::queue< double >& z_previous_buffer, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) -{ - double e = 0.0; // eligibility trace - double z = 0.0; // spiking variable - double psi = 0.0; // surrogate gradient - double L = 0.0; // learning signal - double grad = 0.0; // gradient - - const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp ); - const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_; - auto eprop_hist_it = get_eprop_history( t_spike_previous - P_.delay_total_ ); const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike ); for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) { - if ( !z_previous_buffer.empty() ) - { - z = z_previous_buffer.front(); - z_previous_buffer.pop(); - } - - if ( t_spike - t > 1 ) + if ( P_.delay_total_ > 1 ) { - z_previous_buffer.push( 0.0 ); + update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } else { - z_previous_buffer.push( 1.0 ); + update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } psi = eprop_hist_it->surrogate_gradient_; diff --git a/models/eprop_iaf_adapt.h b/models/eprop_iaf_adapt.h index 95c53ed1600..23d93e42c2a 100644 --- a/models/eprop_iaf_adapt.h +++ b/models/eprop_iaf_adapt.h @@ -344,19 +344,10 @@ class eprop_iaf_adapt : public EpropArchivingNodeRecurrent void update( Time const&, const long, const long ) override; - void compute_gradient( const long t_spike, - const long t_spike_previous, - double& z_previous, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) override; - void compute_gradient( const long t_spike, const long t_spike_previous, std::queue< double >& z_previous_buffer, + double& z_previous, double& z_bar, double& e_bar, double& epsilon, diff --git a/models/eprop_iaf_psc_delta.cpp b/models/eprop_iaf_psc_delta.cpp index f65874957fb..e4ec08a3c12 100644 --- a/models/eprop_iaf_psc_delta.cpp +++ b/models/eprop_iaf_psc_delta.cpp @@ -495,6 +495,7 @@ nest::eprop_iaf_psc_delta::handle( DataLoggingRequest& e ) void eprop_iaf_psc_delta::compute_gradient( const long t_spike, const long t_spike_previous, + std::queue< double >& z_previous_buffer, double& z_previous, double& z_bar, double& e_bar, @@ -513,87 +514,19 @@ eprop_iaf_psc_delta::compute_gradient( const long t_spike, const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp ); const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_; - auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 ); - - const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike ); - - for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) - { - z = z_previous; - z_previous = z_current; - z_current = 0.0; - - psi = eprop_hist_it->surrogate_gradient_; - L = eprop_hist_it->learning_signal_; - - z_bar = V_.P33_ * z_bar + V_.P_z_in_ * z; - e = psi * z_bar; - e_bar = P_.kappa_ * e_bar + ( 1.0 - P_.kappa_ ) * e; - - if ( optimize_each_step ) - { - grad = L * e_bar; - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t, grad, weight ); - } - else - { - grad += L * e_bar; - } - } - - if ( not optimize_each_step ) - { - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t_compute_until, grad, weight ); - } - - const int power = t_spike - ( t_spike_previous + P_.eprop_isi_trace_cutoff_ ); - - if ( power > 0 ) - { - z_bar *= std::pow( V_.P33_, power ); - e_bar *= std::pow( P_.kappa_, power ); - } -} - -void -eprop_iaf_psc_delta::compute_gradient( const long t_spike, - const long t_spike_previous, - std::queue< double >& z_previous_buffer, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) -{ - double e = 0.0; // eligibility trace - double z = 0.0; // spiking variable - double psi = 0.0; // surrogate gradient - double L = 0.0; // learning signal - double grad = 0.0; // gradient - - const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp ); - const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_; - auto eprop_hist_it = get_eprop_history( t_spike_previous - P_.delay_total_ ); const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike ); for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) { - if ( !z_previous_buffer.empty() ) - { - z = z_previous_buffer.front(); - z_previous_buffer.pop(); - } - - if ( t_spike - t > 1 ) + if ( P_.delay_total_ > 1 ) { - z_previous_buffer.push( 0.0 ); + update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } else { - z_previous_buffer.push( 1.0 ); + update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } psi = eprop_hist_it->surrogate_gradient_; diff --git a/models/eprop_iaf_psc_delta.h b/models/eprop_iaf_psc_delta.h index 11772c975c1..07dfe00233c 100644 --- a/models/eprop_iaf_psc_delta.h +++ b/models/eprop_iaf_psc_delta.h @@ -262,19 +262,10 @@ class eprop_iaf_psc_delta : public EpropArchivingNodeRecurrent void update( Time const&, const long, const long ) override; - void compute_gradient( const long t_spike, - const long t_spike_previous, - double& z_previous, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) override; - void compute_gradient( const long t_spike, const long t_spike_previous, std::queue< double >& z_previous_buffer, + double& z_previous, double& z_bar, double& e_bar, double& epsilon, diff --git a/models/eprop_readout.cpp b/models/eprop_readout.cpp index 09d7b2c4667..f80ec57eac6 100644 --- a/models/eprop_readout.cpp +++ b/models/eprop_readout.cpp @@ -372,6 +372,7 @@ eprop_readout::handle( DataLoggingRequest& e ) void eprop_readout::compute_gradient( const long t_spike, const long t_spike_previous, + std::queue< double >& z_previous_buffer, double& z_previous, double& z_bar, double& e_bar, @@ -394,75 +395,14 @@ eprop_readout::compute_gradient( const long t_spike, for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) { - z = z_previous; - z_previous = z_current; - z_current = 0.0; - - L = eprop_hist_it->error_signal_; - - z_bar = V_.P_v_m_ * z_bar + V_.P_z_in_ * z; - - if ( optimize_each_step ) - { - grad = L * z_bar; - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t, grad, weight ); - } - else - { - grad += L * z_bar; - } - } - - if ( not optimize_each_step ) - { - weight = optimizer->optimized_weight( *ecp.optimizer_cp_, t_compute_until, grad, weight ); - } - - const int power = t_spike - ( t_spike_previous + P_.eprop_isi_trace_cutoff_ ); - - if ( power > 0 ) - { - z_bar *= std::pow( V_.P_v_m_, power ); - } -} - -void -eprop_readout::compute_gradient( const long t_spike, - const long t_spike_previous, - std::queue< double >& z_previous_buffer, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) -{ - double z = 0.0; // spiking variable - double L = 0.0; // error signal - double grad = 0.0; // gradient - - const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp ); - const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_; - - auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 ); - - const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike ); - - for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it ) - { - if ( !z_previous_buffer.empty() ) + if ( P_.delay_rec_out_ > 1 ) { z = z_previous_buffer.front(); - z_previous_buffer.pop(); - } - - if ( t_spike - t > 1 ) - { - z_previous_buffer.push( 0.0 ); + update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } else { - z_previous_buffer.push( 1.0 ); + update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t ); } L = eprop_hist_it->error_signal_; diff --git a/models/eprop_readout.h b/models/eprop_readout.h index 14f7b7fd592..23ef1076526 100644 --- a/models/eprop_readout.h +++ b/models/eprop_readout.h @@ -300,19 +300,10 @@ class eprop_readout : public EpropArchivingNodeReadout void update( Time const&, const long, const long ) override; - void compute_gradient( const long t_spike, - const long t_spike_previous, - double& z_previous, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ) override; - void compute_gradient( const long t_spike, const long t_spike_previous, std::queue< double >& z_previous_buffer, + double& z_previous, double& z_bar, double& e_bar, double& epsilon, diff --git a/models/eprop_synapse.h b/models/eprop_synapse.h index 95ff68ebff2..5a1e5428314 100644 --- a/models/eprop_synapse.h +++ b/models/eprop_synapse.h @@ -514,16 +514,8 @@ eprop_synapse< targetidentifierT >::send( Event& e, size_t thread, const EpropSy if ( t_spike_previous_ != 0 ) { - if ( delay_total > 1 ) - { - target->compute_gradient( - t_spike, t_spike_previous_, z_previous_buffer_, z_bar_, e_bar_, epsilon_, weight_, cp, optimizer_ ); - } - else - { - target->compute_gradient( - t_spike, t_spike_previous_, z_previous_, z_bar_, e_bar_, epsilon_, weight_, cp, optimizer_ ); - } + target->compute_gradient( + t_spike, t_spike_previous_, z_previous_buffer_, z_previous_, z_bar_, e_bar_, epsilon_, weight_, cp, optimizer_ ); } else { diff --git a/nestkernel/eprop_archiving_node.h b/nestkernel/eprop_archiving_node.h index c692c96024c..87c75464314 100644 --- a/nestkernel/eprop_archiving_node.h +++ b/nestkernel/eprop_archiving_node.h @@ -157,6 +157,24 @@ class EpropArchivingNode : public Node //! the first update. void erase_used_eprop_history( const long eprop_isi_trace_cutoff ); + //! Update multiple entries in the presynaptic buffer. This function is used when the total synaptic delay + //! is greater than one. + void update_pre_syn_buffer_multiple_entries( double& z, + double& z_current, + double& z_previous, + std::queue< double >& z_previous_buffer, + double t_spike, + double t ); + + //! Update one entry in the presynaptic buffer. This function is used when the total synaptic delay + //! is equal one. + void update_pre_syn_buffer_one_entry( double& z, + double& z_current, + double& z_previous, + std::queue< double >& z_previous_buffer, + double t_spike, + double t ); + protected: //! Number of incoming eprop synapses size_t eprop_indegree_; diff --git a/nestkernel/eprop_archiving_node_impl.h b/nestkernel/eprop_archiving_node_impl.h index f3f152d0cdf..445d699dc3f 100644 --- a/nestkernel/eprop_archiving_node_impl.h +++ b/nestkernel/eprop_archiving_node_impl.h @@ -205,6 +205,47 @@ EpropArchivingNode< HistEntryT >::erase_used_update_history() } } + +template < typename HistEntryT > +void +EpropArchivingNode< HistEntryT >::update_pre_syn_buffer_multiple_entries( double& z, + double& z_current, + double& z_previous, + std::queue< double >& z_previous_buffer, + double t_spike, + double t ) +{ + if ( !z_previous_buffer.empty() ) + { + z = z_previous_buffer.front(); + z_previous_buffer.pop(); + } + + if ( t_spike - t > 1 ) + { + z_previous_buffer.push( 0.0 ); + } + else + { + z_previous_buffer.push( 1.0 ); + } +} + +template < typename HistEntryT > +void +EpropArchivingNode< HistEntryT >::update_pre_syn_buffer_one_entry( double& z, + double& z_current, + double& z_previous, + std::queue< double >& pre_syn_buffer, + double t_spike, + double t ) +{ + z = z_previous; + z_previous = z_current; + z_current = 0.0; +} + + } // namespace nest #endif // EPROP_ARCHIVING_NODE_IMPL_H diff --git a/nestkernel/node.cpp b/nestkernel/node.cpp index 3e2c076a7f9..0783b8a7155 100644 --- a/nestkernel/node.cpp +++ b/nestkernel/node.cpp @@ -568,20 +568,6 @@ nest::Node::get_tau_syn_in( int ) throw UnexpectedEvent(); } -void -nest::Node::compute_gradient( const long, - const long, - double&, - double&, - double&, - double&, - double&, - const CommonSynapseProperties&, - WeightOptimizer* ) -{ - throw IllegalConnection( "The target node does not support compute_gradient()." ); -} - void nest::Node::compute_gradient( const long, const long, @@ -590,6 +576,7 @@ nest::Node::compute_gradient( const long, double&, double&, double&, + double&, const CommonSynapseProperties&, WeightOptimizer* ) { diff --git a/nestkernel/node.h b/nestkernel/node.h index f32819e9a82..b405047e695 100644 --- a/nestkernel/node.h +++ b/nestkernel/node.h @@ -846,26 +846,11 @@ class Node * * @params presyn_isis is cleared during call */ - virtual void compute_gradient( const long t_spike, - const long t_spike_previous, - double& z_previous, - double& z_bar, - double& e_bar, - double& epsilon, - double& weight, - const CommonSynapseProperties& cp, - WeightOptimizer* optimizer ); - /** - * Compute gradient change for eprop synapses. - * - * This method is called from an eprop synapse on the eprop target neuron and returns the change in gradient. - * - * @params presyn_isis is cleared during call - */ virtual void compute_gradient( const long t_spike, const long t_spike_previous, std::queue< double >& z_previous_buffer, + double& z_previous, double& z_bar, double& e_bar, double& epsilon,