You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
This is what I believe might be causing the bug, but I am not exactly sure. Let me know if the full source code is needed.
/// A probabilistic implementation of the STDP algorithm for a bitwise linear layer.pubfnprobabilistic_bitwise_stdp<I:BitVolume,O:BitVolume>(synaptic_layers:&mut[BitwiseLinear<I,O>],self_connected:bool,presynaptic_trace:&af::Array<f32>,dominant_postsynaptic_spikes:&af::Array<bool>,dopamine:f32,alpha:f32,learning_rate:f32){// TODO:// When arrayfire supports 8-bit signed integers, migrate to that instead of 16-bit integers.// Go through each synaptic layer and modify its weights.let connections:usize = synaptic_layers.len();for(layer_idx, layer)in synaptic_layers.iter_mut().enumerate(){// Get the excitatory and inhibitory weights, and convert it to a trinary layer.let(excitatory, inhibitory):(af::Array<bool>, af::Array<bool>) = layer.weights_as_array();letmut weights: af::Array<i16> = af::constant(0, excitatory.dims());
weights = af::selectl(1.0,&excitatory,&weights);
weights = af::selectl(-1.0,&inhibitory,&weights);// Normalize the trace and compute the LTP and LTD components.let trace_norm: af::Array<f32> = if connections == 1{
presynaptic_trace / af::max_all(presynaptic_trace).0}else{let sequences:&[af::Seq<i32>] = &[af::Seq::default(), af::Seq::default(), af::Seq::default(), af::Seq::new(layer_idx asi32, layer_idx asi32,1)];let trace_layer: af::Array<f32> = af::index(presynaptic_trace, sequences);&trace_layer / af::max_all(&trace_layer).0};let ltp_trace: af::Array<f32> = af::exp(&trace_norm) - alpha;let ltd_trace: af::Array<f32> = af::exp(&(1.0f32 - &trace_norm)) - alpha;// Calculate update probabilities.let p_update_prob: af::Array<f32> = (ltp_trace - ltd_trace)* learning_rate * dopamine;let n_update_prob: af::Array<f32> = &p_update_prob * -1f32;// Generate random values for probabilistic update.// We use a uniform distribution to have an equal chance for all values to be generated.//// Using the random values, we'll then create a mask for different weight updates (positive and// negative).let random_values: af::Array<f32> = af::randu(weights.dims());let increase_mask: af::Array<bool> = af::lt(&random_values,&p_update_prob,false)& af::lt(&weights,&1i16,false);let decrease_mask: af::Array<bool> = af::lt(&random_values,&n_update_prob,false)& af::gt(&weights,&-1i16,false);// Calculate delta weightslet delta_weights: af::Array<i16> = increase_mask.cast::<i16>() - decrease_mask.cast::<i16>();// Apply winner_mask and identity_mask to delta_weights.let winner_mask: af::Array<bool> = af::tile(&dominant_postsynaptic_spikes, af::dim4!(1,I::FSIZEasu64,1,1));let masked_delta_weights: af::Array<i16> = if self_connected {let identity_mask: af::Array<i16> = (1f32 - af::identity::<i16>(af::dim4!(1,I::FSIZEasu64,O::FSIZEasu64,1))).cast();&delta_weights
*&winner_mask.cast::<i16>()*&identity_mask
}else{&delta_weights
*&winner_mask.cast::<i16>()};// Apply the masked delta weights to the weights.
weights += masked_delta_weights;
weights = af::clamp(&weights,&-1i16,&1i16,true);// Deconstruct the weights back into their excitatory and inhibitory counterparts and export it// back into the layer.let excitatory: af::Array<bool> = af::eq(&weights,&1,false);let inhibitory: af::Array<bool> = af::eq(&weights,&-1,false);
layer.update_weights(excitatory, inhibitory);}}
/// Returns the weights as an arrayfire array of the dimensions (1, input size, output size)./// Arrays are returned in the order of excitatory and inhibitory weights.pubfnweights_as_array(&self) -> (af::Array<bool>, af::Array<bool>){(
af::Array::new(&self.neurons.par_iter().flat_map(|neuron| neuron.weight_excitatory.to_flattened_bits()).collect::<Vec<_>>(),
af::dim4!(1,I::FSIZEasu64,O::FSIZEasu64)),
af::Array::new(&self.neurons.par_iter().flat_map(|neuron| neuron.weight_inhibitory.to_flattened_bits()).collect::<Vec<_>>(),
af::dim4!(1,I::FSIZEasu64,O::FSIZEasu64)))}/// Updates the weights from an arrayfire array of the dimensions (input size, output size).pubfnupdate_weights(&mutself,updated_excitatory: af::Array<bool>,updated_inhibitory: af::Array<bool>){letmut excitatory_weights:Vec<bool> = vec![false;(I::FSIZE*O::FSIZE)asusize];letmut inhibitory_weights:Vec<bool> = vec![false;(I::FSIZE*O::FSIZE)asusize];
updated_excitatory.host(&mut excitatory_weights);
updated_inhibitory.host(&mut inhibitory_weights);self.neurons.par_iter_mut().enumerate().for_each(|(i, neuron)| {let start:usize = i *I::FSIZEasusize;let end:usize = start + I::FSIZEasusize;let excitatory_slice:&[bool] = &excitatory_weights[start..end];let inhibitory_slice:&[bool] = &inhibitory_weights[start..end];
neuron.weight_excitatory = I::from_flattened_bits(excitatory_slice).unwrap();
neuron.weight_inhibitory = I::from_flattened_bits(inhibitory_slice).unwrap();});}
System Information
Arrayfire 3.8.3
I have an NVidia 3050 RTX laptop GPU
Output as follows:
LSB Version: n/a
Distributor ID: Arch
Description: Arch Linux
Release: rolling
Codename: n/a
name, memory.total [MiB], driver_version
NVIDIA GeForce RTX 3050 Laptop GPU, 4096 MiB, 565.57.01
rocm-smi not found.
clinfo not found.
Checklist
Using the latest available ArrayFire release
GPU drivers are up to date
The text was updated successfully, but these errors were encountered:
Description
Reproducible Code and/or Steps
This is what I believe might be causing the bug, but I am not exactly sure. Let me know if the full source code is needed.
System Information
Checklist
The text was updated successfully, but these errors were encountered: