brainpy / BrainPy

Brain Dynamics Programming in Python
https://brainpy.readthedocs.io/
GNU General Public License v3.0
515 stars 93 forks source link

Questions are the activation of neurons when input is current #603

Closed hongruj closed 8 months ago

hongruj commented 8 months ago

Hi, BrainPy team.

I am trying to use the input in the form of current (not spike) to the network. However, I met problems in the activation of neurons during .update(). Here is the code:

class Exponential(bp.Projection):
    def __init__(self, num_pre, post, scale=20., tau=10.):
        super().__init__()
        self.proj = bp.dyn.HalfProjAlignPostMg(
            comm=bp.dnn.Linear(num_pre, post.num, W_initializer=bp.init.KaimingNormal(scale=scale)),
            syn=bp.dyn.Expon.desc(post.num, tau=tau),
            out=bp.dyn.CUBA.desc(),
            post=post, 
        )

dt=1.
bm.set_dt(dt)

class SNN(bp.DynamicalSystem):
    def __init__(self, batch_size, home_joint_state):
        super().__init__()

        # parameters
        self.num_input = 3 
        self.num_fast = 300
        self.num_slow = 100
        self.num_muscle = 6

        # input layer  FOR CURRENT
        self.inp_layer = bp.dnn.Linear(self.num_input, self.num_fast, W_initializer=bp.init.KaimingNormal(scale=1.)) 

        # neuron groups
        self.fast = bp.dyn.LifRef(self.num_slow, tau=8.0, tau_ref=2.0,  V_th=1.)
        self.slow = bp.dyn.LifRef(self.num_slow, tau=40.0, tau_ref=2.0,  V_th=1.)

        # output layer  
        self.readout = bp.Sequential(
          bp.dnn.Linear(self.num_fast, self.num_muscle, W_initializer=bp.init.KaimingNormal(scale=1.)),
          bp.dyn.Expon(self.num_muscle, tau=10.),
          bp.dyn.Integrator(self.num_muscle, tau=20))

        # synapse: f->f
        self.f2f = Exponential(self.num_fast, self.fast, scale=2.) 
        # synapse: f->s
        self.f2s = Exponential(self.num_fast, self.slow, scale=200.) 
        # synapse: s->f
        self.s2f = Exponential(self.num_slow, self.fast, scale=2.)

        self.adyn = muscular_arm(dt, batch_size)

    def update(self, inp):
        cur_inp = self.inp_layer(inp)  
        self.fast(cur_inp).   # neurons receive the current input
        self.f2f(self.fast.spike)
        self.f2s(self.fast.spike)
        self.s2f(self.slow.spike)
        self.slow().    # zero during simulation

        muscle = self.readout(self.fast.spike)
        return muscle 
runner = bp.DSRunner(net, monitors={'fast.spike': net.fast.spike, 'f.membrane': net.fast.V, 
                                    'slow.spike': net.slow.spike, 'slow.membrane': net.slow.V,},
                     data_first_axis='T')
out = runner.run(inputs=input, reset_state=True)

When I gave the input to the network (for example, the input is composed of sin(t),cos(t) and a step function). I found the slow.membrane are always 0. Could you please give me some advice? Thank you very much.

chaoming0625 commented 8 months ago

This is because your inputs are too weak to elicit the fast spikes. Here is my code modified from your codebase:


import brainpy as bp
import brainpy.math as bm

dt = 1.
bm.set_dt(dt)

class Exponential(bp.Projection):
  def __init__(self, num_pre, post, scale=20., tau=10.):
    super().__init__()
    self.proj = bp.dyn.HalfProjAlignPostMg(
      comm=bp.dnn.Linear(num_pre, post.num, W_initializer=bp.init.KaimingNormal(scale=scale)),
      syn=bp.dyn.Expon.desc(post.num, tau=tau),
      out=bp.dyn.CUBA.desc(),
      post=post,
    )

class SNN(bp.DynamicalSystem):
  def __init__(self):
    super().__init__()

    # parameters
    self.num_input = 3
    self.num_fast = 300
    self.num_slow = 100
    self.num_muscle = 6

    # input layer  FOR CURRENT
    self.inp_layer = bp.dnn.Linear(self.num_input, self.num_fast, W_initializer=bp.init.KaimingNormal(scale=1.))

    # neuron groups
    self.fast = bp.dyn.LifRef(self.num_fast, tau=8.0, tau_ref=2.0, V_th=1.)
    self.slow = bp.dyn.LifRef(self.num_slow, tau=40.0, tau_ref=2.0, V_th=1.)

    # output layer
    self.readout = bp.Sequential(
      bp.dnn.Linear(self.num_fast, self.num_muscle, W_initializer=bp.init.KaimingNormal(scale=1.)),
      bp.dyn.Expon(self.num_muscle, tau=10.),
      bp.dyn.Integrator(self.num_muscle, tau=20)
    )

    # synapse: f->f
    self.f2f = Exponential(self.num_fast, self.fast, scale=2.)
    # synapse: f->s
    self.f2s = Exponential(self.num_fast, self.slow, scale=200.)
    # synapse: s->f
    self.s2f = Exponential(self.num_slow, self.fast, scale=2.)

  def update(self, inp):
    cur_inp = self.inp_layer(inp)
    self.f2f(self.fast.spike)
    self.f2s(self.fast.spike)
    self.s2f(self.slow.spike)
    self.fast(cur_inp)  # neurons receive the current input
    self.slow()  # zero during simulation
    muscle = self.readout(self.fast.spike)
    return muscle

net = SNN()
runner = bp.DSRunner(net,
                     monitors={'fast.spike': net.fast.spike,
                               'fast.membrane': net.fast.V,
                               'slow.spike': net.slow.spike,
                               'slow.membrane': net.slow.V, },
                     data_first_axis='T')
out = runner.run(inputs=bm.random.rand(1000, 3) < 0.5, reset_state=True)

bp.visualize.raster_plot(runner.mon['ts'], runner.mon['fast.spike'], show=True)
bp.visualize.line_plot(runner.mon['ts'], runner.mon['slow.membrane'], plot_ids=[0, 2, 4, 6, 8], show=True)
hongruj commented 8 months ago

Thanks for your reply. (I didn't expect an input issue, as there were 'fast' spikes but no 'slow' spikes)