@@ -4,7 +4,6 @@ module mod_layer
44
55 use mod_activation
66 use mod_kinds, only: ik, rk
7- use mod_random, only: randn
87
98 implicit none
109
@@ -32,126 +31,67 @@ module mod_layer
3231 end type array2d
3332
3433 interface layer_type
35- module procedure :: constructor
34+ type (layer_type) module function constructor(this_size, next_size) result(layer)
35+ ! ! Layer class constructor. this_size is the number of neurons in the layer.
36+ ! ! next_size is the number of neurons in the next layer, used to allocate
37+ ! ! the weights.
38+ implicit none
39+ integer (ik), intent (in ) :: this_size, next_size
40+ end function constructor
3641 end interface layer_type
3742
3843 interface array1d
39- module procedure :: array1d_constructor
44+ pure type (array1d) module function array1d_constructor(length) result(a)
45+ ! ! Overloads the default type constructor.
46+ implicit none
47+ integer (ik), intent (in ) :: length
48+ end function array1d_constructor
4049 end interface array1d
4150
42- interface array2d
43- module procedure :: array2d_constructor
51+ interface array2d
52+ pure type (array2d) module function array2d_constructor(dims) result(a)
53+ ! ! Overloads the default type constructor.
54+ integer (ik), intent (in ) :: dims(2 )
55+ end function array2d_constructor
4456 end interface array2d
45-
46- contains
47-
48- type (layer_type) function constructor(this_size, next_size) result(layer)
49- ! ! Layer class constructor. this_size is the number of neurons in the layer.
50- ! ! next_size is the number of neurons in the next layer, used to allocate
51- ! ! the weights.
52- integer (ik), intent (in ) :: this_size, next_size
53- allocate (layer % a(this_size))
54- allocate (layer % z(this_size))
55- layer % a = 0
56- layer % z = 0
57- layer % w = randn(this_size, next_size) / this_size
58- layer % b = randn(this_size)
59- end function constructor
60-
61- pure type (array1d) function array1d_constructor(length) result(a)
62- ! ! Overloads the default type constructor.
63- integer (ik), intent (in ) :: length
64- allocate (a % array(length))
65- a % array = 0
66- end function array1d_constructor
67-
68- pure type (array2d) function array2d_constructor(dims) result(a)
69- ! ! Overloads the default type constructor.
70- integer (ik), intent (in ) :: dims(2 )
71- allocate (a % array(dims(1 ), dims(2 )))
72- a % array = 0
73- end function array2d_constructor
74-
75- pure subroutine db_init (db , dims )
76- ! ! Initialises biases structure.
77- type (array1d), allocatable , intent (in out ) :: db(:)
78- integer (ik), intent (in ) :: dims(:)
79- integer (ik) :: n, nm
80- nm = size (dims)
81- allocate (db(nm))
82- do n = 1 , nm - 1
83- db(n) = array1d(dims(n))
84- end do
85- db(n) = array1d(dims(n))
86- end subroutine db_init
87-
88- pure subroutine dw_init (dw , dims )
89- ! ! Initialises weights structure.
90- type (array2d), allocatable , intent (in out ) :: dw(:)
91- integer (ik), intent (in ) :: dims(:)
92- integer (ik) :: n, nm
93- nm = size (dims)
94- allocate (dw(nm))
95- do n = 1 , nm - 1
96- dw(n) = array2d(dims(n:n+1 ))
97- end do
98- dw(n) = array2d([dims(n), 1 ])
99- end subroutine dw_init
100-
101- subroutine db_co_sum (db )
102- ! ! Performs a collective sum of bias tendencies.
103- type (array1d), allocatable , intent (in out ) :: db(:)
104- integer (ik) :: n
105- do n = 2 , size (db)
106- #ifdef CAF
107- call co_sum(db(n) % array)
108- #endif
109- end do
110- end subroutine db_co_sum
111-
112- subroutine dw_co_sum (dw )
113- ! ! Performs a collective sum of weights tendencies.
114- type (array2d), allocatable , intent (in out ) :: dw(:)
115- integer (ik) :: n
116- do n = 1 , size (dw) - 1
117- #ifdef CAF
118- call co_sum(dw(n) % array)
119- #endif
120- end do
121- end subroutine dw_co_sum
122-
123- pure elemental subroutine set_activation(self, activation)
124- ! ! Sets the activation function. Input string must match one of
125- ! ! provided activation functions, otherwise it defaults to sigmoid.
126- ! ! If activation not present, defaults to sigmoid.
127- class(layer_type), intent (in out ) :: self
128- character (len=* ), intent (in ) :: activation
129- select case (trim (activation))
130- case (' gaussian' )
131- self % activation = > gaussian
132- self % activation_prime = > gaussian_prime
133- self % activation_str = ' gaussian'
134- case (' relu' )
135- self % activation = > relu
136- self % activation_prime = > relu_prime
137- self % activation_str = ' relu'
138- case (' sigmoid' )
139- self % activation = > sigmoid
140- self % activation_prime = > sigmoid_prime
141- self % activation_str = ' sigmoid'
142- case (' step' )
143- self % activation = > step
144- self % activation_prime = > step_prime
145- self % activation_str = ' step'
146- case (' tanh' )
147- self % activation = > tanhf
148- self % activation_prime = > tanh_prime
149- self % activation_str = ' tanh'
150- case default
151- self % activation = > sigmoid
152- self % activation_prime = > sigmoid_prime
153- self % activation_str = ' sigmoid'
154- end select
155- end subroutine set_activation
57+
58+ interface
59+
60+ pure module subroutine db_init(db, dims)
61+ ! ! Initialises biases structure.
62+ implicit none
63+ type (array1d), allocatable , intent (in out ) :: db(:)
64+ integer (ik), intent (in ) :: dims(:)
65+ end subroutine db_init
66+
67+ pure module subroutine dw_init(dw, dims)
68+ ! ! Initialises weights structure.
69+ implicit none
70+ type (array2d), allocatable , intent (in out ) :: dw(:)
71+ integer (ik), intent (in ) :: dims(:)
72+ end subroutine dw_init
73+
74+ module subroutine db_co_sum (db )
75+ ! ! Performs a collective sum of bias tendencies.
76+ implicit none
77+ type (array1d), allocatable , intent (in out ) :: db(:)
78+ end subroutine db_co_sum
79+
80+ module subroutine dw_co_sum (dw )
81+ ! ! Performs a collective sum of weights tendencies.
82+ implicit none
83+ type (array2d), allocatable , intent (in out ) :: dw(:)
84+ end subroutine dw_co_sum
85+
86+ pure elemental module subroutine set_activation(self, activation)
87+ ! ! Sets the activation function. Input string must match one of
88+ ! ! provided activation functions, otherwise it defaults to sigmoid.
89+ ! ! If activation not present, defaults to sigmoid.
90+ implicit none
91+ class(layer_type), intent (in out ) :: self
92+ character (len=* ), intent (in ) :: activation
93+ end subroutine set_activation
94+
95+ end interface
15696
15797end module mod_layer
0 commit comments