Skip to content

Commit bd71aa4

Browse files
committed
apply relu after batchnorma and dropout
1 parent 3cff315 commit bd71aa4

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

assignment2/nn/cnn.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,6 @@ def cnn(idx, X, filters, kernel_size, is_training,
1010
bconv = tf.get_variable(f'bconv{idx}', shape=[filters])
1111
out = tf.nn.conv2d(X, filter=Wconv, strides=strides, padding=padding) + bconv
1212

13-
# ReLU Activation Layer
14-
out = tf.nn.relu(out)
15-
1613
# Spatial Batch Normalization Layer (trainable parameters, with scale and centering)
1714
# axis=3 channel axis
1815
if use_batchnorm:
@@ -21,4 +18,7 @@ def cnn(idx, X, filters, kernel_size, is_training,
2118
if dropout is not None:
2219
out = tf.layers.dropout(out, rate=dropout, training=is_training)
2320

21+
# ReLU Activation Layer
22+
out = tf.nn.relu(out)
23+
2424
return out, [Wconv, bconv]

0 commit comments

Comments
 (0)