Skip to content

Commit

Permalink
Followed contributions guidelines.
Browse files Browse the repository at this point in the history
  • Loading branch information
namish800 committed Nov 1, 2018
1 parent 005f691 commit d4896eb
Show file tree
Hide file tree
Showing 9 changed files with 157 additions and 135 deletions.
97 changes: 49 additions & 48 deletions keras_app/custom_layers/capsule_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,58 +3,59 @@
import keras



def squash_activation(vectors,axis=-1):
def squash_activation(vectors, axis=-1):
s_squared_norm = K.sum(K.square(vectors), axis, keepdims=True)
scale = s_squared_norm / (1 + s_squared_norm) / K.sqrt(s_squared_norm +1e-7)
scale = s_squared_norm / (1 + s_squared_norm) / K.sqrt(s_squared_norm + 1e-7)
return scale * vectors


class CapsuleLayer(Layer):
def __init__(self,num_capsule,dim_capsule,num_routing=3,**kwargs):
super(CapsuleLayer,self).__init__(**kwargs)
self.num_capsule=num_capsule
self.dim_capsule=dim_capsule
self.num_routing=num_routing
self.kernel_initializer=keras.initializers.random_uniform(-1, 1)
self.bias_initializer=keras.initializers.Zeros()
super(CapsuleLayer,self).__init__(**kwargs)

def build(self,input_shape):
assert len(input_shape)>=3
self.W=self.add_weight(shape=[input_shape[1],self.num_capsule,input_shape[2],self.dim_capsule],
initializer=self.kernel_initializer,name='W')
self.b=self.add_weight(shape=[input_shape[1],self.num_capsule],initializer=self.bias_initializer,name='b')

super(CapsuleLayer,self).build(input_shape)

def call(self,inputs,training=None):
inputs_expand=K.expand_dims(inputs,2)
inputs_tiled=K.repeat_elements(inputs_expand,self.num_capsule,axis=2)
inputs_hat=K.map_fn(lambda x:K.batch_dot(x,self.W,[2,2]),inputs_tiled)

input_shape=K.shape(inputs_hat)
b=self.b
b=K.expand_dims(b,axis=0)
assert self.num_routing>0
def __init__(self, num_capsule, dim_capsule, num_routing=3, **kwargs):
super(CapsuleLayer, self).__init__(**kwargs)
self.num_capsule = num_capsule
self.dim_capsule = dim_capsule
self.num_routing = num_routing
self.kernel_initializer = keras.initializers.random_uniform(-1, 1)
self.bias_initializer = keras.initializers.Zeros()
super(CapsuleLayer, self).__init__(**kwargs)

def build(self, input_shape):
assert len(input_shape) >= 3
self.W = self.add_weight(shape=[input_shape[1], self.num_capsule, input_shape[2], self.dim_capsule],
initializer=self.kernel_initializer,
name='W')
self.b = self.add_weight(shape=[input_shape[1], self.num_capsule],
initializer=self.bias_initializer,
name ='b')
super(CapsuleLayer, self).build(input_shape)

def call(self, inputs, training=None):
inputs_expand = K.expand_dims(inputs, 2)
inputs_tiled = K.repeat_elements(inputs_expand, self.num_capsule, axis=2)
inputs_hat = K.map_fn(lambda x: K.batch_dot(x, self.W, [2, 2]), inputs_tiled)
input_shape = K.shape(inputs_hat)
b = self.b
b = K.expand_dims(b, axis=0)
assert self.num_routing > 0
for i in range(self.num_routing):
c=K.softmax(b)
c=K.expand_dims(c,axis=-1)
c=K.repeat_elements(c,rep=self.dim_capsule,axis=-1)
S=K.sum(c*inputs_hat,axis=1)
V=squash_activation(S)
if i!=self.num_routing-1:
V_expanded=K.expand_dims(V,axis=1)
V_expanded=K.tile(V_expanded,[1,input_shape[1],1,1])
b=b+K.sum(inputs_hat*V_expanded,axis=-1)
return V
def compute_output_shape(self,input_shape):
return tuple([None,self.num_capsule,self.dim_capsule])
c = K.softmax(b)
c = K.expand_dims(c, axis=-1)
c = K.repeat_elements(c, rep=self.dim_capsule, axis=-1)
S = K.sum(c * inputs_hat, axis=1)
V = squash_activation(S)
if i != self.num_routing-1:
V_expanded = K.expand_dims(V, axis=1)
V_expanded = K.tile(V_expanded, [1, input_shape[1], 1, 1])
b = b + K.sum(inputs_hat * V_expanded, axis=-1)
return V

def compute_output_shape(self, input_shape):
return tuple([None, self.num_capsule, self.dim_capsule])

def get_config(self):
base_config=super(CapsuleLayer,self).get_config()
base_config['num_capsule']=self.num_capsule
base_config['num_routing']=self.num_routing
base_config['dim_capsule']=self.dim_capsule
base_config = super(CapsuleLayer, self).get_config()
base_config['num_capsule'] = self.num_capsule
base_config['num_routing'] = self.num_routing
base_config['dim_capsule'] = self.dim_capsule
return base_config

18 changes: 9 additions & 9 deletions keras_app/custom_layers/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,19 @@
'url': '/media/lrn.py'
},
'CapsuleLayer': {
'filename': 'capsule_layer.py',
'url': '/media/capsule_layer.py'
'filename': 'capsule_layer.py',
'url': '/media/capsule_layer.py'
},
'Length': {
'filename': 'length.py',
'url': 'media/length.py'
'filename': 'length.py',
'url': 'media/length.py'
},
'MaskCapsule': {
'filename': 'mask_capsule.py',
'url':'media/mask_capsule.py'
'filename': 'mask_capsule.py',
'url': 'media/mask_capsule.py'
},
'Squash': {
'filename': 'squash.py',
'url':'media/squash.py'
'filename': 'squash.py',
'url': 'media/squash.py'
}
}
}
24 changes: 12 additions & 12 deletions keras_app/custom_layers/length.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,20 @@
from keras import backend as K



class Length(Layer):
def __init__(self,**kwargs):
super(Length,self).__init__(**kwargs)
def build(self,input_shape):
super(Length,self).build(input_shape)

def call(self,input):
return K.sqrt(K.sum(K.square(input),-1))

def compute_output_shape(self,input_shape):
def __init__(self, **kwargs):
super(Length, self).__init__(**kwargs)

def build(self, input_shape):
super(Length, self).build(input_shape)

def call(self, input):
return K.sqrt(K.sum(K.square(input), -1))

def compute_output_shape(self, input_shape):
return input_shape[:-1]

def get_config(self):
base_config=super(Length,self).get_config()
base_config = super(Length, self).get_config()
return base_config

40 changes: 20 additions & 20 deletions keras_app/custom_layers/mask_capsule.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,28 @@


class MaskCapsule(Layer):
def __init__(self,**kwargs):
super(MaskCapsule,self).__init__(**kwargs)
def build(self,input_shape):
super(MaskCapsule,self).build(input_shape)
def call(self,inputs):
if type(inputs)==list:
assert len(inputs)==2
inputs,mask=inputs
def __init__(self, **kwargs):
super(MaskCapsule, self).__init__(**kwargs)

def build(self, input_shape):
super(MaskCapsule, self).build(input_shape)

def call(self, inputs):
if type(inputs) == list:
assert len(inputs) == 2
inputs, mask = inputs
else:
x=K.sqrt(K.sum(K.square(inputs),-1))
mask=K.one_hot(indices=K.argmax(x,1),num_classes=x.get_shape().as_list()[1])
masked=K.batch_flatten(inputs*K.expand_dims(mask,-1))
x = K.sqrt(K.sum(K.square(inputs), -1))
mask = K.one_hot(indices=K.argmax(x, 1), num_classes=x.get_shape().as_list()[1])
masked = K.batch_flatten(inputs*K.expand_dims(mask, -1))
return masked
def compute_output_shape(self,input_shape):

def compute_output_shape(self, input_shape):
if type(input_shape[0]) is tuple:
return tuple([None,input_shape[0][1]*input_shape[0][2]])
return tuple([None, input_shape[0][1]*input_shape[0][2]])
else:
return tuple([None ,input_shape[1]*input_shape[2]])
return tuple([None, input_shape[1]*input_shape[2]])

def get_config(self):
return super(MaskCapsule,self).get_config()
return super(MaskCapsule, self).get_config()

28 changes: 17 additions & 11 deletions keras_app/custom_layers/squash.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
from keras.layers.core import Layer
from keras import backend as K


class Squash(Layer):
def __init__(self,axis=-1,**kwargs):
self.axis=axis
super(Squash,self).__init__(**kwargs)
def build(self,input_shape):
super(Squash,self).build(input_shape)
def call(self,inputs):
def __init__(self, axis=-1, **kwargs):
self.axis = axis
super(Squash, self).__init__(**kwargs)

def build(self, input_shape):
super(Squash, self).build(input_shape)

def call(self, inputs):
s_squared_norm = K.sum(K.square(inputs), self.axis, keepdims=True)
scale = s_squared_norm / (1 + s_squared_norm) / K.sqrt(s_squared_norm +1e-7)
scale = s_squared_norm / (1 + s_squared_norm) / K.sqrt(s_squared_norm + 1e-7)
return scale * inputs
def compute_output_shape(self,input_shape):

def compute_output_shape(self, input_shape):
return input_shape

def get_config(self):
base_config=super(Squash,self).get_config()
base_config['axis']=self.axis
return base_config
base_config = super(Squash, self).get_config()
base_config['axis'] = self.axis
return base_config

10 changes: 5 additions & 5 deletions keras_app/views/export_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from layers_export import data, convolution, deconvolution, pooling, dense, dropout, embed,\
recurrent, batch_norm, activation, flatten, reshape, eltwise, concat, upsample, locally_connected,\
permute, repeat_vector, regularization, masking, gaussian_noise, gaussian_dropout, alpha_dropout, \
bidirectional, time_distributed, lrn, depthwiseConv,capsule_layer,length,mask_capsule,squash
bidirectional, time_distributed, lrn, depthwiseConv, capsule_layer, length, mask_capsule, squash
from ..custom_layers import config as custom_layers_config


Expand Down Expand Up @@ -82,10 +82,10 @@ def export_json(request, is_tf=False):

custom_layers_map = {
'LRN': lrn,
'CapsuleLayer':capsule_layer,
'Length':length,
'MaskCapsule':mask_capsule,
'Squash':squash
'CapsuleLayer': capsule_layer,
'Length': length,
'MaskCapsule': mask_capsule,
'Squash': squash
}

# Remove any duplicate activation layers (timedistributed and bidirectional layers)
Expand Down
14 changes: 8 additions & 6 deletions keras_app/views/import_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
Recurrent, BatchNorm, Activation, LeakyReLU, PReLU, ELU, Scale, Flatten, Reshape, Concat, \
Eltwise, Padding, Upsample, LocallyConnected, ThresholdedReLU, Permute, RepeatVector,\
ActivityRegularization, Masking, GaussianNoise, GaussianDropout, AlphaDropout, \
TimeDistributed, Bidirectional, DepthwiseConv, lrn ,capsule_layer,length,mask_capsule,squash
TimeDistributed, Bidirectional, DepthwiseConv, lrn, capsule_layer, length, mask_capsule, squash
from keras.models import model_from_json, Sequential
from keras.layers import deserialize
from ..custom_layers.lrn import LRN
from ..custom_layers.capsule_layer import *
from ..custom_layers.capsule_layer import CapsuleLayer
from ..custom_layers.length import Length
from ..custom_layers.mask_capsule import MaskCapsule
from ..custom_layers.squash import Squash
Expand Down Expand Up @@ -53,7 +53,9 @@ def import_json(request):
except Exception:
return JsonResponse({'result': 'error', 'error': 'Invalid JSON'})

model = model_from_json(json.dumps(model), custom_objects={'LRN': LRN,'Length':Length,'MaskCapsule':MaskCapsule,'Squash':Squash,'CapsuleLayer':CapsuleLayer})
model = model_from_json(json.dumps(model),
custom_objects={'LRN': LRN, 'Length': Length, 'MaskCapsule': MaskCapsule,
'Squash': Squash, 'CapsuleLayer': CapsuleLayer})
layer_map = {
'InputLayer': Input,
'Dense': Dense,
Expand Down Expand Up @@ -119,9 +121,9 @@ def import_json(request):
'Bidirectional': Bidirectional,
'LRN': lrn,
'CapsuleLayer': capsule_layer,
'Length':length,
'MaskCapsule':mask_capsule,
'Squash':squash
'Length': length,
'MaskCapsule': mask_capsule,
'Squash': squash
}

hasActivation = ['Conv1D', 'Conv2D', 'Conv3D', 'Conv2DTranspose', 'Dense', 'LocallyConnected1D',
Expand Down
37 changes: 22 additions & 15 deletions keras_app/views/layers_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from keras.layers import TimeDistributed, Bidirectional
from keras import regularizers
from ..custom_layers.lrn import LRN
from ..custom_layers.capsule_layer import *
from ..custom_layers.capsule_layer import CapsuleLayer
from ..custom_layers.length import Length
from ..custom_layers.mask_capsule import MaskCapsule
from ..custom_layers.squash import Squash
Expand Down Expand Up @@ -132,9 +132,9 @@ def activation(layer, layer_in, layerId, tensor=True):
return out


def squash(layer,layer_in,layerId):
axis=layer['params']['axis']
out={}
def squash(layer, layer_in, layerId):
axis = layer['params']['axis']
out = {}
out[layerId] = Squash(axis=axis)(*layer_in)


Expand Down Expand Up @@ -189,15 +189,19 @@ def masking(layer, layer_in, layerId, tensor=True):
out[layerId] = out[layerId](*layer_in)
return out

def length(layer,layer_in,layerId):
out={}
out[layerId]=Length()(*layer_in)

def length(layer, layer_in, layerId):
out = {}
out[layerId] = Length()(*layer_in)
return out

def mask_capsule(layer,layer_in,layerId):
out={}
out[layerId]=MaskCapsule()(*layer_in)

def mask_capsule(layer, layer_in, layerId):
out = {}
out[layerId] = MaskCapsule()(*layer_in)
return out


# ********** Convolution Layers **********
def convolution(layer, layer_in, layerId, tensor=True):
convMap = {
Expand Down Expand Up @@ -383,14 +387,17 @@ def upsample(layer, layer_in, layerId, tensor=True):
out[layerId] = out[layerId](*layer_in)
return out


# ********** Capsule Layers **********
def capsule_layer(layer,layer_in,layerId):
num_capsule=layer['params']['num_capsule']
dim_capsule=layer['params']['dim_capsule']
num_routing=layer['params']['num_routing']
def capsule_layer(layer, layer_in, layerId):
num_capsule = layer['params']['num_capsule']
dim_capsule = layer['params']['dim_capsule']
num_routing = layer['params']['num_routing']
out = {}
out[layerId]=CapsuleLayer(num_capsule=num_capsule,dim_capsule=dim_capsule,num_routing=num_routing)(*layer_in)
out[layerId] = CapsuleLayer(num_capsule=num_capsule, dim_capsule=dim_capsule, num_routing=num_routing)(*layer_in)
return out


# ********** Pooling Layers **********
def pooling(layer, layer_in, layerId, tensor=True):
poolMap = {
Expand Down
Loading

0 comments on commit d4896eb

Please sign in to comment.