From dea5ff33d8525ebff0f3e2fdf9a87dcb6885d582 Mon Sep 17 00:00:00 2001 From: detlef Date: Fri, 23 Aug 2019 17:55:41 +0200 Subject: [PATCH] - mor flexible shapes in BiasLayer --- few_shot_tests.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/few_shot_tests.py b/few_shot_tests.py index fa2856b..61fb55f 100755 --- a/few_shot_tests.py +++ b/few_shot_tests.py @@ -177,7 +177,7 @@ def __init__(self, proto_num, mult_bias = 1, **kwargs): def build(self, input_shape): # Create a trainable weight variable for this layer. self.bias = self.add_weight(name='bias', - shape=(self.proto_num,input_shape[2], input_shape[3], input_shape[4]), + shape=(self.proto_num) + input_shape[2:], initializer='zeros', trainable=True) super(BiasLayer, self).build(input_shape) # Be sure to call this at the end @@ -223,10 +223,11 @@ def get_config(self): input2b = BiasLayer(shots * cathegories, mult_bias = 0)(input2) encoded_l = model_img(input1) encoded_r = model_img(input2b) - + +encoded_rb = BiasLayer(shots * cathegories, mult_bias = 0)(encoded_r) # Add a customized layer to compute the absolute difference between the encodings L1_layer = Lambda(lambda tensors:K.abs(tensors[0] - tensors[1])) -L1_distance = L1_layer([encoded_l, encoded_r]) +L1_distance = L1_layer([encoded_l, encoded_rb]) # Add a dense layer with a sigmoid unit to generate the similarity score prediction = Dense(1)(L1_distance)