Paper doi abstract bibtex

Sparse approximation is a hypothesized coding strategy where a population of sensory neurons (e.g. V1) encodes a stimulus using as few active neurons as possible. We present the Spiking LCA (locally competitive algorithm), a rate encoded Spiking Neural Network (SNN) of integrate and fire neurons that calculate sparse approximations. The Spiking LCA is designed to be equivalent to the nonspiking LCA, an analog dynamical system that converges on a ?1-norm sparse approximations exponentially. We show that the firing rate of the Spiking LCA converges on the same solution as the analog LCA, with an error inversely proportional to the sampling time. We simulate in NEURON a network of 128 neuron pairs that encode 8 � 8 pixel image patches, demonstrating that the network converges to nearly optimal encodings within 20 ms of biological time. We also show that when using more biophysically realistic parameters in the neurons, the gain function encourages additional ?0-norm sparsity in the encoding, relative both to ideal neurons and digital solvers.

@Article{SHAPERO_2014, author = {SAMUEL SHAPERO and MENGCHEN ZHU and JENNIFER HASLER and CHRISTOPHER ROZELL}, title = {{OPTIMAL} {SPARSE} {APPROXIMATION} {WITH} {INTEGRATE} {AND} {FIRE} {NEURONS}}, journal = {International Journal of Neural Systems}, year = {2014}, volume = {24}, number = {05}, pages = {1440001}, month = {aug}, abstract = {Sparse approximation is a hypothesized coding strategy where a population of sensory neurons (e.g. V1) encodes a stimulus using as few active neurons as possible. We present the Spiking LCA (locally competitive algorithm), a rate encoded Spiking Neural Network (SNN) of integrate and fire neurons that calculate sparse approximations. The Spiking LCA is designed to be equivalent to the nonspiking LCA, an analog dynamical system that converges on a ?1-norm sparse approximations exponentially. We show that the firing rate of the Spiking LCA converges on the same solution as the analog LCA, with an error inversely proportional to the sampling time. We simulate in NEURON a network of 128 neuron pairs that encode 8 � 8 pixel image patches, demonstrating that the network converges to nearly optimal encodings within 20 ms of biological time. We also show that when using more biophysically realistic parameters in the neurons, the gain function encourages additional ?0-norm sparsity in the encoding, relative both to ideal neurons and digital solvers.}, doi = {10.1142/s0129065714400012}, keywords = {Sparse coding; spiking neural networks; locally competitive algorithm}, publisher = {World Scientific Pub Co Pte Lt}, url = {http://www.worldscientific.com/doi/abs/10.1142/S0129065714400012}, }

Downloads: 0