Skip to content

Commit

Permalink
Remove misplaced adam optimizer parameter initialization
Browse files Browse the repository at this point in the history
  • Loading branch information
JesusEV committed Oct 14, 2024
1 parent f6929d9 commit 81a3d6b
Showing 1 changed file with 0 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -376,13 +376,6 @@ def create_mask(weights, sparsity_level):
"delay": duration["step"],
}

params_init_optimizer = {
"optimizer": {
"m": 0.0, # initial 1st moment estimate m of Adam optimizer
"v": 0.0, # initial 2nd moment raw estimate v of Adam optimizer
}
}

####################

nest.SetDefaults("eprop_synapse", params_common_syn_eprop)
Expand Down Expand Up @@ -413,11 +406,6 @@ def sparsely_connect(weights, params_syn, nrns_pre, nrns_post):
nest.Connect(mm_rec, nrns_rec_record, params_conn_all_to_all, params_syn_static)
nest.Connect(mm_out, nrns_out, params_conn_all_to_all, params_syn_static)

# After creating the connections, we can individually initialize the optimizer's
# dynamic variables for single synapses (here exemplarily for two connections).

nest.GetConnections(nrns_rec[0], nrns_rec[1:3]).set([params_init_optimizer] * 2)

# %% ###########################################################################################################
# Create input and output
# ~~~~~~~~~~~~~~~~~~~~~~~
Expand Down

0 comments on commit 81a3d6b

Please sign in to comment.