Skip to content

Commit

Permalink
- mor flexible shapes in BiasLayer
Browse files Browse the repository at this point in the history
  • Loading branch information
dsmic committed Aug 23, 2019
1 parent e8b54ae commit dea5ff3
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions few_shot_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def __init__(self, proto_num, mult_bias = 1, **kwargs):
def build(self, input_shape):
# Create a trainable weight variable for this layer.
self.bias = self.add_weight(name='bias',
shape=(self.proto_num,input_shape[2], input_shape[3], input_shape[4]),
shape=(self.proto_num) + input_shape[2:],
initializer='zeros',
trainable=True)
super(BiasLayer, self).build(input_shape) # Be sure to call this at the end
Expand Down Expand Up @@ -223,10 +223,11 @@ def get_config(self):
input2b = BiasLayer(shots * cathegories, mult_bias = 0)(input2)
encoded_l = model_img(input1)
encoded_r = model_img(input2b)


encoded_rb = BiasLayer(shots * cathegories, mult_bias = 0)(encoded_r)
# Add a customized layer to compute the absolute difference between the encodings
L1_layer = Lambda(lambda tensors:K.abs(tensors[0] - tensors[1]))
L1_distance = L1_layer([encoded_l, encoded_r])
L1_distance = L1_layer([encoded_l, encoded_rb])

# Add a dense layer with a sigmoid unit to generate the similarity score
prediction = Dense(1)(L1_distance)
Expand Down

0 comments on commit dea5ff3

Please sign in to comment.