Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generalize recurrent-to-readout and readout-to-recurrent delays #31

Open
wants to merge 12 commits into
base: eprop_bio_feature
Choose a base branch
from
64 changes: 53 additions & 11 deletions models/eprop_iaf.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,9 @@ eprop_iaf::Parameters_::Parameters_()
, V_th_( -55.0 - E_L_ )
, kappa_( 0.97 )
, eprop_isi_trace_cutoff_( std::numeric_limits< long >::max() )
, delay_rec_out_( 1 )
, delay_out_rec_( 1 )
, delay_total_( 1 )
{
}

Expand Down Expand Up @@ -129,6 +132,11 @@ eprop_iaf::Parameters_::get( DictionaryDatum& d ) const
def< double >( d, names::V_th, V_th_ + E_L_ );
def< double >( d, names::kappa, kappa_ );
def< long >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_ );

double delay_rec_out_ms = Time( Time::step( delay_rec_out_ ) ).get_ms();
def< double >( d, names::delay_rec_out, delay_rec_out_ms );
double delay_out_rec_ms = Time( Time::step( delay_out_rec_ ) ).get_ms();
def< double >( d, names::delay_out_rec, delay_out_rec_ms );
}

double
Expand Down Expand Up @@ -160,6 +168,14 @@ eprop_iaf::Parameters_::set( const DictionaryDatum& d, Node* node )
updateValueParam< double >( d, names::kappa, kappa_, node );
updateValueParam< long >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_, node );

double delay_rec_out_ms = Time( Time::step( delay_rec_out_ ) ).get_ms();
updateValueParam< double >( d, names::delay_rec_out, delay_rec_out_ms, node );
delay_rec_out_ = Time( Time::ms( delay_rec_out_ms ) ).get_steps();

double delay_out_rec_ms = Time( Time::step( delay_out_rec_ ) ).get_ms();
updateValueParam< double >( d, names::delay_out_rec, delay_out_rec_ms, node );
delay_out_rec_ = Time( Time::ms( delay_out_rec_ms ) ).get_steps();

if ( C_m_ <= 0 )
{
throw BadProperty( "Membrane capacitance C_m > 0 required." );
Expand Down Expand Up @@ -200,6 +216,18 @@ eprop_iaf::Parameters_::set( const DictionaryDatum& d, Node* node )
throw BadProperty( "Cutoff of integration of eprop trace between spikes eprop_isi_trace_cutoff ≥ 0 required." );
}

if ( delay_rec_out_ < 1 )
{
throw BadProperty( "Connection delay from recurrent to output neuron ≥ 1 required." );
}

if ( delay_out_rec_ < 1 )
{
throw BadProperty( "Broadcast delay of learning signals ≥ 1 required." );
}

delay_total_ = delay_rec_out_ + ( delay_out_rec_ - 1 );

return delta_EL;
}

Expand Down Expand Up @@ -266,6 +294,14 @@ eprop_iaf::pre_run_hook()
V_.P_v_m_ = std::exp( -dt / P_.tau_m_ );
V_.P_i_in_ = P_.tau_m_ / P_.C_m_ * ( 1.0 - V_.P_v_m_ );
V_.P_z_in_ = P_.regular_spike_arrival_ ? 1.0 : 1.0 - V_.P_v_m_;

if ( eprop_history_.empty() )
{
for ( long t = -P_.delay_total_; t < 0; ++t )
{
emplace_new_eprop_history_entry( t );
}
}
}

long
Expand Down Expand Up @@ -379,33 +415,39 @@ eprop_iaf::handle( DataLoggingRequest& e )
void
eprop_iaf::compute_gradient( const long t_spike,
const long t_spike_previous,
double& z_previous_buffer,
std::queue< double >& z_previous_buffer,
double& z_previous,
double& z_bar,
double& e_bar,
double& epsilon,
double& weight,
const CommonSynapseProperties& cp,
WeightOptimizer* optimizer )
{
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current_buffer = 1.0; // buffer containing the spike that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double grad = 0.0; // gradient
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current = 1.0; // buffer containing the spike that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double grad = 0.0; // gradient

const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp );
const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_;

auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 );
auto eprop_hist_it = get_eprop_history( t_spike_previous - P_.delay_total_ );

const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike );

for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it )
{
z = z_previous_buffer;
z_previous_buffer = z_current_buffer;
z_current_buffer = 0.0;
if ( P_.delay_total_ > 1 )
{
update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}
else
{
update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}

psi = eprop_hist_it->surrogate_gradient_;
L = eprop_hist_it->learning_signal_;
Expand Down
40 changes: 39 additions & 1 deletion models/eprop_iaf.h
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,8 @@ class eprop_iaf : public EpropArchivingNodeRecurrent

void compute_gradient( const long t_spike,
const long t_spike_previous,
double& z_previous_buffer,
std::queue< double >& z_previous_buffer,
double& z_previous,
double& z_bar,
double& e_bar,
double& epsilon,
Expand All @@ -341,6 +342,9 @@ class eprop_iaf : public EpropArchivingNodeRecurrent
long get_shift() const override;
bool is_eprop_recurrent_node() const override;
long get_eprop_isi_trace_cutoff() override;
long get_delay_total() const override;
long get_delay_recurrent_to_readout() const override;
long get_delay_readout_to_recurrent() const override;

//! Compute the surrogate gradient.
double ( eprop_iaf::*compute_surrogate_gradient )( double, double, double, double, double, double );
Expand Down Expand Up @@ -401,6 +405,15 @@ class eprop_iaf : public EpropArchivingNodeRecurrent
//! eprop_isi_trace_cutoff_ and the inter-spike distance.
long eprop_isi_trace_cutoff_;

//! Connection delay from recurrent to output neurons.
long delay_rec_out_;

//! Broadcast delay of learning signals.
long delay_out_rec_;

//! Sum of broadcast delay of learning signals and connection delay from recurrent to output neurons.
long delay_total_;

//! Default constructor.
Parameters_();

Expand Down Expand Up @@ -526,10 +539,35 @@ eprop_iaf::get_eprop_isi_trace_cutoff()
return P_.eprop_isi_trace_cutoff_;
}

inline long
eprop_iaf::get_delay_total() const
{
return P_.delay_total_;
}

inline long
eprop_iaf::get_delay_recurrent_to_readout() const
{
return P_.delay_rec_out_;
}

inline long
eprop_iaf::get_delay_readout_to_recurrent() const
{
return P_.delay_out_rec_;
}

inline size_t
eprop_iaf::send_test_event( Node& target, size_t receptor_type, synindex, bool )
{
SpikeEvent e;

// To perform a consistency check on the delay parameter d_out_rec between recurrent
// neurons and output neurons, the recurrent neurons send a test event with a delay
// specified by d_rec_out. Upon receiving the test event from the recurrent neuron,
// the output neuron checks if the delay with which the event was received matches
// its own specified delay parameter d_rec_out.
e.set_delay_steps( P_.delay_rec_out_ );
e.set_sender( *this );
return target.handles_test_event( e, receptor_type );
}
Expand Down
64 changes: 53 additions & 11 deletions models/eprop_iaf_adapt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ eprop_iaf_adapt::Parameters_::Parameters_()
, V_th_( -55.0 - E_L_ )
, kappa_( 0.97 )
, eprop_isi_trace_cutoff_( std::numeric_limits< long >::max() )
, delay_rec_out_( 1 )
, delay_out_rec_( 1 )
, delay_total_( 1 )
{
}

Expand Down Expand Up @@ -137,6 +140,11 @@ eprop_iaf_adapt::Parameters_::get( DictionaryDatum& d ) const
def< double >( d, names::V_th, V_th_ + E_L_ );
def< double >( d, names::kappa, kappa_ );
def< long >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_ );

double delay_rec_out_ms = Time( Time::step( delay_rec_out_ ) ).get_ms();
def< double >( d, names::delay_rec_out, delay_rec_out_ms );
double delay_out_rec_ms = Time( Time::step( delay_out_rec_ ) ).get_ms();
def< double >( d, names::delay_out_rec, delay_out_rec_ms );
}

double
Expand Down Expand Up @@ -170,6 +178,14 @@ eprop_iaf_adapt::Parameters_::set( const DictionaryDatum& d, Node* node )
updateValueParam< double >( d, names::kappa, kappa_, node );
updateValueParam< long >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_, node );

double delay_rec_out_ms = Time( Time::step( delay_rec_out_ ) ).get_ms();
updateValueParam< double >( d, names::delay_rec_out, delay_rec_out_ms, node );
delay_rec_out_ = Time( Time::ms( delay_rec_out_ms ) ).get_steps();

double delay_out_rec_ms = Time( Time::step( delay_out_rec_ ) ).get_ms();
updateValueParam< double >( d, names::delay_out_rec, delay_out_rec_ms, node );
delay_out_rec_ = Time( Time::ms( delay_out_rec_ms ) ).get_steps();

if ( adapt_beta_ < 0 )
{
throw BadProperty( "Threshold adaptation prefactor adapt_beta ≥ 0 required." );
Expand Down Expand Up @@ -220,6 +236,18 @@ eprop_iaf_adapt::Parameters_::set( const DictionaryDatum& d, Node* node )
throw BadProperty( "Cutoff of integration of eprop trace between spikes eprop_isi_trace_cutoff ≥ 0 required." );
}

if ( delay_rec_out_ < 1 )
{
throw BadProperty( "Connection delay from recurrent to output neuron ≥ 1 required." );
}

if ( delay_out_rec_ < 1 )
{
throw BadProperty( "Broadcast delay of learning signals ≥ 1 required." );
}

delay_total_ = delay_rec_out_ + ( delay_out_rec_ - 1 );

return delta_EL;
}

Expand Down Expand Up @@ -301,6 +329,14 @@ eprop_iaf_adapt::pre_run_hook()
V_.P_i_in_ = P_.tau_m_ / P_.C_m_ * ( 1.0 - V_.P_v_m_ );
V_.P_z_in_ = P_.regular_spike_arrival_ ? 1.0 : 1.0 - V_.P_v_m_;
V_.P_adapt_ = std::exp( -dt / P_.adapt_tau_ );

if ( eprop_history_.empty() )
{
for ( long t = -P_.delay_total_; t < 0; ++t )
{
emplace_new_eprop_history_entry( t );
}
}
}

long
Expand Down Expand Up @@ -417,33 +453,39 @@ eprop_iaf_adapt::handle( DataLoggingRequest& e )
void
eprop_iaf_adapt::compute_gradient( const long t_spike,
const long t_spike_previous,
double& z_previous_buffer,
std::queue< double >& z_previous_buffer,
double& z_previous,
double& z_bar,
double& e_bar,
double& epsilon,
double& weight,
const CommonSynapseProperties& cp,
WeightOptimizer* optimizer )
{
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current_buffer = 1.0; // buffer containing the spike that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double grad = 0.0; // gradient
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current = 1.0; // buffer containing the spike that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double grad = 0.0; // gradient

const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp );
const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_;

auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 );
auto eprop_hist_it = get_eprop_history( t_spike_previous - P_.delay_total_ );

const long t_compute_until = std::min( t_spike_previous + P_.eprop_isi_trace_cutoff_, t_spike );

for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it )
{
z = z_previous_buffer;
z_previous_buffer = z_current_buffer;
z_current_buffer = 0.0;
if ( P_.delay_total_ > 1 )
{
update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}
else
{
update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}

psi = eprop_hist_it->surrogate_gradient_;
L = eprop_hist_it->learning_signal_;
Expand Down
40 changes: 39 additions & 1 deletion models/eprop_iaf_adapt.h
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,8 @@ class eprop_iaf_adapt : public EpropArchivingNodeRecurrent

void compute_gradient( const long t_spike,
const long t_spike_previous,
double& z_previous_buffer,
std::queue< double >& z_previous_buffer,
double& z_previous,
double& z_bar,
double& e_bar,
double& epsilon,
Expand All @@ -357,6 +358,9 @@ class eprop_iaf_adapt : public EpropArchivingNodeRecurrent
long get_shift() const override;
bool is_eprop_recurrent_node() const override;
long get_eprop_isi_trace_cutoff() override;
long get_delay_total() const override;
long get_delay_recurrent_to_readout() const override;
long get_delay_readout_to_recurrent() const override;

//! Compute the surrogate gradient.
double ( eprop_iaf_adapt::*compute_surrogate_gradient )( double, double, double, double, double, double );
Expand Down Expand Up @@ -423,6 +427,15 @@ class eprop_iaf_adapt : public EpropArchivingNodeRecurrent
//! eprop_isi_trace_cutoff_ and the inter-spike distance.
long eprop_isi_trace_cutoff_;

//! Connection delay from recurrent to output neurons.
long delay_rec_out_;

//! Broadcast delay of learning signals.
long delay_out_rec_;

//! Sum of broadcast delay of learning signals and connection delay from recurrent to output neurons.
long delay_total_;

//! Default constructor.
Parameters_();

Expand Down Expand Up @@ -571,10 +584,35 @@ eprop_iaf_adapt::get_eprop_isi_trace_cutoff()
return P_.eprop_isi_trace_cutoff_;
}

inline long
eprop_iaf_adapt::get_delay_total() const
{
return P_.delay_total_;
}

inline long
eprop_iaf_adapt::get_delay_recurrent_to_readout() const
{
return P_.delay_rec_out_;
}

inline long
eprop_iaf_adapt::get_delay_readout_to_recurrent() const
{
return P_.delay_out_rec_;
}

inline size_t
eprop_iaf_adapt::send_test_event( Node& target, size_t receptor_type, synindex, bool )
{
SpikeEvent e;

// To perform a consistency check on the delay parameter d_out_rec between recurrent
// neurons and output neurons, the recurrent neurons send a test event with a delay
// specified by d_rec_out. Upon receiving the test event from the recurrent neuron,
// the output neuron checks if the delay with which the event was received matches
// its own specified delay parameter d_rec_out.
e.set_delay_steps( P_.delay_rec_out_ );
e.set_sender( *this );
return target.handles_test_event( e, receptor_type );
}
Expand Down
Loading
Loading