Open bjarnur opened 6 years ago
Take a look at their simple implementation (1 layer net) and notice:
So lets create the layer!
from keras import backend as K
from keras.engine.topology import Layer
import numpy as np
class Hebb(Layer):
def __init__(self, output_dim, **kwargs):
self.output_dim = output_dim
super(MyLayer, self).__init__(**kwargs)
def build(self, input_shape):
# Create a trainable weight variable for this layer.
self.kernel = self.add_weight(name='kernel',
shape=(input_shape[1], self.output_dim),
initializer='uniform',
trainable=True)
# TODO create the alpha matrix, not sure about params
self.alpha = self.add_weight(name='alpha',
shape=(input_shape, self.output_dim),
initializer='zeros',
trainable=True)
# TODO create the ETA, hebbian learning rate, not sure about params
# TODO initializer (starts at .01 in their implementation)
self.eta = self.add_weight(name='eta',
shape=(1),
initializer='uniform',
trainable=True)
super(MyLayer, self).build(input_shape) # Be sure to call this at the end
def call(self, x):
# TODO [Hebbian](https://github.com/uber-common/differentiable-plasticity/blob/master/simple/simple.py#L119)
# TODO further alterations?
return K.dot(x, self.kernel) # TODO alter to include hebbian
def compute_output_shape(self, input_shape):
return (input_shape[0], self.output_dim)
As seen on https://keras.io/layers/writing-your-own-keras-layers/