@@ -55,7 +55,7 @@ def make_layers(self, cfg, in_channels, batch_norm = True):
55
55
i += 1
56
56
return nn .Sequential (OrderedDict (layers ))
57
57
58
- def make_layers_MLP (self , cfg_MLP , cfg , single_output = False ):
58
+ def make_layers_MLP (self , cfg_MLP , cfg , regression = False ):
59
59
"""
60
60
Create sequential models layers according to the chosen configuration provided in
61
61
cfg for the MLP.
@@ -77,13 +77,13 @@ def make_layers_MLP(self, cfg_MLP, cfg, single_output = False):
77
77
for out_features in cfg_MLP :
78
78
if out_features == "M" :
79
79
layers += [(f"MLP_relu{ i } " , nn .ReLU (True )), (f"MLP_dropout{ i } " , nn .Dropout ())]
80
- i += 1
80
+ i += 1
81
81
else :
82
82
linear = (f"MLP_linear{ i } " , nn .Linear (in_features , out_features ))
83
83
layers += [linear ]
84
84
in_features = out_features
85
85
86
- if single_output :
86
+ if regression : # if regression is True, make the final layer a single output
87
87
linear = (f"MLP_linear_final" , nn .Linear (in_features , 1 ))
88
88
layers += [linear ]
89
89
else :
@@ -112,7 +112,7 @@ class VGG1(VGGBase):
112
112
Instance of VGGBase with the model architecture 1.
113
113
"""
114
114
def __init__ (self ,
115
- cfg = "B" ,
115
+ cfg = "B" , # default configuration
116
116
cfg_MLP = "A" ,
117
117
dimensions = 196 ,
118
118
in_channels = 1 ,
@@ -167,17 +167,15 @@ class VGG2_regression(VGGBase):
167
167
"""
168
168
def __init__ (self ,
169
169
cfg = "B" ,
170
- cfg_MLP = "B" ,
171
- dimensions = 196 ,
172
- in_channels = 1 ,
173
- num_classes = 2 ,
170
+ cfg_MLP = "A" ,
171
+ in_channels = 1 ,
174
172
):
175
173
176
174
super (VGG2_regression , self ).__init__ ()
177
175
178
176
self .norm = nn .BatchNorm2d (in_channels )
179
177
self .features = self .make_layers (self .cfgs [cfg ], in_channels )
180
- self .classifier = self .make_layers_MLP (self .cfgs_MLP [cfg_MLP ], self .cfgs [cfg ], single_output = True )
178
+ self .classifier = self .make_layers_MLP (self .cfgs_MLP [cfg_MLP ], self .cfgs [cfg ], regression = True ) # regression is set to True to make the final layer a single output
181
179
182
180
def vgg (cfg , in_channels , ** kwargs ):
183
181
model = VGG2_regression (self .make_layers (self .cfgs [cfg ], in_channels ), ** kwargs )
@@ -186,7 +184,9 @@ def vgg(cfg, in_channels, **kwargs):
186
184
def forward (self , x ):
187
185
x = self .norm (x )
188
186
x = self .features (x )
187
+
189
188
x = torch .flatten (x , 1 )
189
+
190
190
x = self .classifier (x )
191
191
return x
192
192
0 commit comments