@@ -22,8 +22,8 @@ module nf_rnn_layer
2222 real , allocatable :: weights(:,:)
2323 real , allocatable :: recurrent(:,:)
2424 real , allocatable :: biases(:)
25- real , allocatable :: state(:)
2625 real , allocatable :: z(:) ! matmul(x, w) + b
26+ real , allocatable :: state(:)
2727 real , allocatable :: output(:) ! activation(z)
2828 real , allocatable :: gradient(:) ! matmul(w, db)
2929 real , allocatable :: dw(:,:) ! weight gradients
@@ -33,9 +33,9 @@ module nf_rnn_layer
3333
3434 contains
3535
36- ! procedure :: backward
36+ procedure :: backward
3737 procedure :: forward
38- ! procedure :: get_gradients
38+ procedure :: get_gradients
3939 procedure :: get_num_params
4040 procedure :: get_params
4141 procedure :: init
@@ -46,14 +46,14 @@ module nf_rnn_layer
4646 interface rnn_layer
4747 elemental module function rnn_layer_cons(output_size, activation) &
4848 result(res)
49- ! ! This function returns the `dense_layer ` instance.
49+ ! ! This function returns the `rnn_layer ` instance.
5050 integer , intent (in ) :: output_size
5151 ! ! Number of neurons in this layer
5252 class(activation_function), intent (in ) :: activation
5353 ! ! Instance of the activation_function to use;
5454 ! ! See nf_activation.f90 for available functions.
5555 type (rnn_layer) :: res
56- ! ! dense_layer instance
56+ ! ! rnn_layer instance
5757 end function rnn_layer_cons
5858 end interface rnn_layer
5959
@@ -74,7 +74,7 @@ end subroutine backward
7474 pure module subroutine forward(self, input)
7575 ! ! Propagate forward the layer.
7676 ! ! Calling this subroutine updates the values of a few data components
77- ! ! of `dense_layer ` that are needed for the backward pass.
77+ ! ! of `rnn_layer ` that are needed for the backward pass.
7878 class(rnn_layer), intent (in out ) :: self
7979 ! ! Dense layer instance
8080 real , intent (in ) :: input(:)
0 commit comments