-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmodel.py
108 lines (96 loc) · 4.01 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import tensorflow as tf
tf.config.run_functions_eagerly(True)
def backbone():
'''
RETURNS THE BACKBONE FEATURE ENCODER NETWORK
XCEPTION USED IN THIS CASE
'''
mod = tf.keras.applications.Xception(weights='imagenet')
mod = tf.keras.Model(mod.input, mod.layers[-13].output)
return mod
class ModifiedBranch(tf.keras.layers.Layer):
'''
COMPUTES THE MODIFIED BRANCH TO BE USED IN ATTENTION TECHNIQUE
INPUT : a_vec_size
a_vec_size = number of hidden nodes used in attention mechanism
'''
def __init__(self, a_vec_size):
super(ModifiedBranch, self).__init__()
self.a_vec_size = a_vec_size
def build(self, input_shape):
self.Dense = tf.keras.layers.Dense
def call(self, input):
af = tf.keras.backend.mean(input, axis=2)
hs = self.Dense(self.a_vec_size, activation='tanh')(af)
return hs
class MainBranch(tf.keras.layers.Layer):
def __init__(self, a_vec_size, dim):
super(MainBranch, self).__init__()
self.a_vec_size = a_vec_size
self.dim = dim
def build(self, input_shape):
self.Reshape = tf.keras.layers.Reshape
self.Relu = tf.keras.activations.relu
self.Dropout = tf.keras.layers.Dropout
def call(self, input):
e= tf.transpose(input, perm=[0, 2, 1])
e= self.Reshape((-1, self.a_vec_size))(e)
e= self.Relu(e)
e= self.Dropout(0.5)(e)
e= self.Reshape((self.dim**2, self.a_vec_size))(e)
e= tf.transpose(e, perm=[0, 2, 1])
return e
class Attention(tf.keras.layers.Layer):
'''
IMPLEMENTATION OF THE ATTENTION TECHNIQUE ON TWO BRANCHES
INPUT : a_vec_size, dim, input
a_vec_size = number of hidden nodes used in attention mechanism
dim = output feature map dimension of backbone network
input = list containing the features maps of modified branch and main branch in the order.
'''
def __init__(self, dim, a_vec_size):
super(Attention, self).__init__()
self.dim = dim
self.a_vec_size = a_vec_size
def build(self, input_shape):
self.Dense = tf.keras.layers.Dense
self.Reshape = tf.keras.layers.Reshape
self.Add = tf.keras.layers.Add
self.Dropout = tf.keras.layers.Dropout
self.Relu = tf.keras.activations.relu
def call(self, input):
eh= self.Dense(self.dim**2)(input[0])
eh= self.Reshape((1, self.dim**2))(eh)
eh = self.Add()([input[1], eh])
eh= self.Relu(eh)
eh= self.Dropout(0.5)(eh)
eh= tf.transpose(eh, perm=[0, 2, 1])
eh= self.Reshape((-1, self.a_vec_size))(eh)
eh= self.Dense(1, use_bias=False)(eh)
eh= self.Reshape((-1, self.dim**2))(eh)
eh= self.Relu(eh)
return eh
def model(a_vec_size, dim):
'''
THIS FUNCTION CALLS THE ENTIRE MODEL
INPUT : a_vec_size, dim
a_vec_size = number of hidden nodes used in attention mechanism
dim = output feature map dimension of backbone network
OUTPUT : mod
mod = built model
'''
back = backbone() #CALLING THE BACKBONE NETWORK
backbone_feature = back.output
out = tf.keras.layers.Conv2D(filters = a_vec_size, kernel_size = (1,1), strides=(1,1), padding = 'valid', use_bias=True)(backbone_feature) #APPLYING 1X1 CONVOLUTION
out = tf.keras.layers.BatchNormalization(axis=-1)(out)
out = tf.keras.activations.relu(out)
out = tf.keras.layers.Dropout(0.8)(out)
out = tf.keras.layers.Reshape((a_vec_size, dim**2))(out) #RESHAPED TO DIMENSION (1024, 381)
#THIS OUTPUT IS PASSED THROUGH TWO BRANCHES
modified = ModifiedBranch(a_vec_size)(out) #FIRST BRANCH WHICH TRANSFORMS THE OUTPUT FEATURE MAPS GENERATED BY BACKBONE
main = MainBranch(a_vec_size, dim)(out) #SECOND BRANCH
att = Attention(dim, a_vec_size)([modified, main]) #USING ATTENTION BETWEEN THE TWO BRANCHES
fin = tf.keras.layers.Dense(2, activation='softmax')(att) #CLASSIFICATION LAYER
fin = tf.keras.layers.Flatten()(fin)
mod = tf.keras.Model(inputs=back.input, outputs=fin) #MODEL BUILT
return mod