Commit aac3fc2b authored by Felix Bragman's avatar Felix Bragman

fixed bug in VGG where activation function was used on last FC layer

parent 40ea559a
Pipeline #12593 failed with stages
in 9 seconds
......@@ -19,6 +19,8 @@ class VGG16Net(BaseNet):
- No batch-norm in original paper and no drop-out
- Batch-norm is default in this implementation
- FC layers: FC + bias + activation function
- Last FC layer: FC + bias --> loss
- Preprocessing in paper: RGB image de-meaned based on training data
e.g. I[:, :, 0] = I[:, :, 0] = meanRed
......@@ -63,8 +65,6 @@ class VGG16Net(BaseNet):
{'name': 'fc_3', 'n_features': num_classes}]
def layer_op(self, images, is_training=True, layer_id=-1, **unused_kwargs):
#assert layer_util.check_spatial_dims(
# images, lambda x: x % 224 == 0)
layer_instances = []
for layer_iter, layer in enumerate(self.layers):
......@@ -93,7 +93,6 @@ class VGG16Net(BaseNet):
elif layer_iter == len(self.layers)-1:
fc_layer = FullyConnectedLayer(
n_output_chns=layer['n_features'],
acti_func=self.acti_func,
w_initializer=self.initializers['w'],
w_regularizer=self.regularizers['w'],
)
......@@ -147,7 +146,7 @@ class VGG16Net(BaseNet):
w_initializer=self.initializers['w'],
w_regularizer=self.regularizers['w'],
)
flow = fc_layer(flow, keep_prob=0.5)
flow = fc_layer(flow)
layer_instances.append((fc_layer, flow))
if is_training:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment