Skip to content

Commit

Permalink
- mor preparation
Browse files Browse the repository at this point in the history
  • Loading branch information
dsmic committed Oct 11, 2019
1 parent a965bb5 commit 7cb4a24
Showing 1 changed file with 31 additions and 6 deletions.
37 changes: 31 additions & 6 deletions few_shot_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ def parser():
parser.add_argument('--dense_img_num', dest='dense_img_num', type=int, default=-1)
parser.add_argument('--binary_siamese', dest='binary_siamese', action='store_true') #seems to be a bad idea
parser.add_argument('--square_siamese', dest='square_siamese', action='store_true')
parser.add_argument('--eta', dest='eta', type=float, default=0)

args = parser.parse_args()

Expand Down Expand Up @@ -443,6 +444,7 @@ def __init__(self,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
eta = 0.0,
**kwargs):
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
Expand All @@ -461,6 +463,7 @@ def __init__(self,

self.supports_masking = True
self.input_spec = InputSpec(min_ndim=2)
self.eta = eta

def build(self, input_shape):
dtype = dtypes.as_dtype(self.dtype or K.floatx())
Expand Down Expand Up @@ -522,7 +525,15 @@ def call(self, inputs):
outputs = standard_ops.tensordot(inputs, self.kernel, [[rank - 1], [0]])
outputs2 = standard_ops.tensordot(inputs, placticity, [[rank - 1], [0]])
outputs = tf.add(outputs,outputs2)
# Reshape the output back to the original ndim of the input.

#plasticity management
inputs_1 = K.mean(tf.expand_dims(inputs,rank),axis=0)
outputs_1 = K.mean(tf.expand_dims(outputs,rank-1),axis=0)
v = tf.multiply(inputs_1, outputs_1)
while len(v.shape) >2:
v = K.mean(v,axis=0)
self.hebb.assign((1-self.eta)*self.hebb + self.eta * v)

if not context.executing_eagerly():
shape = inputs.shape.as_list()
output_shape = shape[:-1] + [self.units]
Expand All @@ -536,15 +547,29 @@ def call(self, inputs):
if K.is_sparse(inputs):
outputs = sparse_ops.sparse_tensor_dense_matmul(inputs, self.kernel)
outputs2 = sparse_ops.sparse_tensor_dense_matmul(inputs, placticity)
outputs.set_shape(output_shape)
outputs = tf.add(outputs,outputs2)
else:
outputs = gen_math_ops.mat_mul(inputs, self.kernel)
outputs2 = gen_math_ops.mat_mul(inputs, placticity)
outputs.set_shape(output_shape)
outputs = tf.add(outputs,outputs2)

#plasticity management
inputs_1 = K.mean(tf.expand_dims(inputs,rank),axis=0)
outputs_1 = K.mean(tf.expand_dims(outputs,rank-1),axis=0)
self.hebb.assign((1-self.eta)*self.hebb + self.eta * tf.multiply(inputs_1, outputs_1))

if self.use_bias:
outputs = nn.bias_add(outputs, self.bias)


#inputs_1 = K.mean(tf.expand_dims(inputs,rank),axis=0)
#outputs_1 = K.mean(tf.expand_dims(outputs,rank-1),axis=0)
#self.hebb.assign ( (1-self.eta)*self.hebb + self.eta * tf.multiply(inputs_1, outputs_1))


if self.activation is not None:
return self.activation(outputs) # pylint: disable=not-callable

return outputs

def compute_output_shape(self, input_shape):
Expand Down Expand Up @@ -591,7 +616,7 @@ def get_config(self):

flat = TimeDistributed(Flatten())(pool5)
if args.dense_img_num > 0:
x = TimeDistributed(Dense(args.dense_img_num, activation = 'sigmoid'))(flat)
x = TimeDistributed(Dense_plasticity(args.dense_img_num, eta = args.eta, activation = 'sigmoid'))(flat)
else:
if args.binary_siamese:
x = Activation('sigmoid')(flat)
Expand Down Expand Up @@ -625,15 +650,15 @@ def get_config(self):
L1_layer = Lambda(lambda tensors:K.binary_crossentropy(tensors[0], tensors[1]))
print(encoded_l,encoded_rb_scale)
L1_distance = L1_layer([encoded_l, encoded_rb_scale])
prediction = Dense_plasticity(1, name = 'dense_siamese')(L1_distance)
prediction = Dense_plasticity(1, eta = args.eta, name = 'dense_siamese')(L1_distance)
else:
# Add a customized layer to compute the absolute difference between the encodings
if args.square_siamese:
L1_layer = Lambda(lambda tensors:K.pow(tensors[0] - tensors[1], 2))
else:
L1_layer = Lambda(lambda tensors:K.abs(tensors[0] - tensors[1]))
L1_distance = L1_layer([encoded_l, encoded_rb_scale])
prediction = Dense_plasticity(1, name = 'dense_siamese')(L1_distance)
prediction = Dense_plasticity(1, eta = args.eta, name = 'dense_siamese')(L1_distance)

# Connect the inputs with the outputs
if args.load_subnet:
Expand Down

0 comments on commit 7cb4a24

Please sign in to comment.