-
Notifications
You must be signed in to change notification settings - Fork 4
/
customlayers.py
107 lines (84 loc) · 3.23 KB
/
customlayers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import numpy as np
from keras.layers.core import Lambda
from keras.layers import Merge
from keras.layers.convolutional import Convolution2D
from keras import backend as K
from keras.engine import Layer
import theano.tensor as T
# def crosschannelnormalization(alpha = 1e-4, k=2, beta=0.75, n=5,**kwargs):
# """
# This is the function used for cross channel normalization in the original
# Alexnet
# """
# def f(X):
# b, ch, r, c = X.shape
# half = n // 2
# square = K.square(X)
# extra_channels = K.spatial_2d_padding(K.permute_dimensions(square, (0,2,3,1))
# , (0,half))
# extra_channels = K.permute_dimensions(extra_channels, (0,3,1,2))
# scale = k
# for i in range(n):
# scale += alpha * extra_channels[:,i:i+ch,:,:]
# scale = scale ** beta
# return X / scale
# return Lambda(f, output_shape=lambda input_shape:input_shape,**kwargs)
def crosschannelnormalization(alpha = 1e-4, k=2, beta=0.75, n=5,**kwargs):
"""
This is the function used for cross channel normalization in the original Alexnet
combing the conventkeras and pylearn functions.
erralves
"""
def f(X):
ch, r, c, b = X.shape
half = n // 2
sq = T.sqr(X)
extra_channels = T.alloc(0., ch + 2*half, r, c, b)
sq = T.set_subtensor(extra_channels[half:half+ch,:,:,:], sq)
scale = k
for i in range(n):
scale += alpha * sq[i:i+ch,:,:,:]
scale = scale ** beta
return X / scale
return Lambda(f, output_shape=lambda input_shape:input_shape,**kwargs)
def splittensor(axis=1, ratio_split=1, id_split=0,**kwargs):
def f(X):
div = X.shape[axis] // ratio_split
if axis == 0:
output = X[id_split*div:(id_split+1)*div,:,:,:]
elif axis == 1:
output = X[:, id_split*div:(id_split+1)*div, :, :]
elif axis == 2:
output = X[:,:,id_split*div:(id_split+1)*div,:]
elif axis == 3:
output = X[:,:,:,id_split*div:(id_split+1)*div]
else:
raise ValueError("This axis is not possible")
return output
def g(input_shape):
output_shape=list(input_shape)
output_shape[axis] = output_shape[axis] // ratio_split
return tuple(output_shape)
return Lambda(f,output_shape=lambda input_shape:g(input_shape),**kwargs)
def convolution2Dgroup(n_group, nb_filter, nb_row, nb_col, **kwargs):
def f(input):
return Merge([
Convolution2D(nb_filter//n_group,nb_row,nb_col)(
splittensor(axis=1,
ratio_split=n_group,
id_split=i)(input))
for i in range(n_group)
],mode='concat',concat_axis=1)
return f
class Softmax4D(Layer):
def __init__(self, axis=-1,**kwargs):
self.axis=axis
super(Softmax4D, self).__init__(**kwargs)
def build(self,input_shape):
pass
def call(self, x,mask=None):
e = K.exp(x - K.max(x, axis=self.axis, keepdims=True))
s = K.sum(e, axis=self.axis, keepdims=True)
return e / s
def get_output_shape_for(self, input_shape):
return input_shape