@@ -61,6 +61,7 @@ type(network_type) function net_constructor(dims, activation) result(net)
6161 call net % sync(1 )
6262 end function net_constructor
6363
64+
6465 pure real (rk) function accuracy(self, x, y)
6566 ! Given input x and output y, evaluates the position of the
6667 ! maximum value of the output and returns the number of matches
@@ -77,6 +78,7 @@ pure real(rk) function accuracy(self, x, y)
7778 accuracy = real (good) / size (x, dim= 2 )
7879 end function accuracy
7980
81+
8082 pure subroutine backprop (self , y , dw , db )
8183 ! Applies a backward propagation through the network
8284 ! and returns the weight and bias gradients.
@@ -107,6 +109,7 @@ pure subroutine backprop(self, y, dw, db)
107109
108110 end subroutine backprop
109111
112+
110113 pure subroutine fwdprop (self , x )
111114 ! Performs the forward propagation and stores arguments to activation
112115 ! functions and activations themselves for use in backprop.
@@ -122,6 +125,7 @@ pure subroutine fwdprop(self, x)
122125 end associate
123126 end subroutine fwdprop
124127
128+
125129 subroutine init (self , dims )
126130 ! Allocates and initializes the layers with given dimensions dims.
127131 class(network_type), intent (in out ) :: self
@@ -137,6 +141,7 @@ subroutine init(self, dims)
137141 self % layers(size (dims)) % w = 0
138142 end subroutine init
139143
144+
140145 subroutine load (self , filename )
141146 ! Loads the network from file.
142147 class(network_type), intent (in out ) :: self
@@ -145,30 +150,32 @@ subroutine load(self, filename)
145150 integer (ik), allocatable :: dims(:)
146151 character (len= 100 ) :: buffer ! activation string
147152 open (newunit= fileunit, file= filename, status= ' old' , action= ' read' )
148- read (fileunit, fmt = * ) num_layers
153+ read (fileunit, * ) num_layers
149154 allocate (dims(num_layers))
150- read (fileunit, fmt = * ) dims
155+ read (fileunit, * ) dims
151156 call self % init(dims)
152157 do n = 1 , num_layers
153- read (fileunit, fmt = * ) layer_idx, buffer
158+ read (fileunit, * ) layer_idx, buffer
154159 call self % layers(layer_idx) % set_activation(trim (buffer))
155160 end do
156161 do n = 2 , size (self % dims)
157- read (fileunit, fmt = * ) self % layers(n) % b
162+ read (fileunit, * ) self % layers(n) % b
158163 end do
159164 do n = 1 , size (self % dims) - 1
160- read (fileunit, fmt = * ) self % layers(n) % w
165+ read (fileunit, * ) self % layers(n) % w
161166 end do
162167 close (fileunit)
163168 end subroutine load
164169
170+
165171 pure real (rk) function loss(self, x, y)
166172 ! Given input x and expected output y, returns the loss of the network.
167173 class(network_type), intent (in ) :: self
168174 real (rk), intent (in ) :: x(:), y(:)
169175 loss = 0.5 * sum ((y - self % output(x))** 2 ) / size (x)
170176 end function loss
171177
178+
172179 pure function output_single (self , x ) result(a)
173180 ! Use forward propagation to compute the output of the network.
174181 ! This specific procedure is for a single sample of 1-d input data.
@@ -184,6 +191,7 @@ pure function output_single(self, x) result(a)
184191 end associate
185192 end function output_single
186193
194+
187195 pure function output_batch (self , x ) result(a)
188196 ! Use forward propagation to compute the output of the network.
189197 ! This specific procedure is for a batch of 1-d input data.
@@ -197,6 +205,7 @@ pure function output_batch(self, x) result(a)
197205 end do
198206 end function output_batch
199207
208+
200209 subroutine save (self , filename )
201210 ! Saves the network to a file.
202211 class(network_type), intent (in out ) :: self
@@ -217,6 +226,7 @@ subroutine save(self, filename)
217226 close (fileunit)
218227 end subroutine save
219228
229+
220230 pure subroutine set_activation_equal (self , activation )
221231 ! A thin wrapper around layer % set_activation().
222232 ! This method can be used to set an activation function
@@ -226,6 +236,7 @@ pure subroutine set_activation_equal(self, activation)
226236 call self % layers(:) % set_activation(activation)
227237 end subroutine set_activation_equal
228238
239+
229240 pure subroutine set_activation_layers (self , activation )
230241 ! A thin wrapper around layer % set_activation().
231242 ! This method can be used to set different activation functions
@@ -250,6 +261,7 @@ subroutine sync(self, image)
250261 end do layers
251262 end subroutine sync
252263
264+
253265 subroutine train_batch (self , x , y , eta )
254266 ! Trains a network using input data x and output data y,
255267 ! and learning rate eta. The learning rate is normalized
@@ -290,6 +302,7 @@ subroutine train_batch(self, x, y, eta)
290302
291303 end subroutine train_batch
292304
305+
293306 subroutine train_epochs (self , x , y , eta , num_epochs , batch_size )
294307 ! Trains for num_epochs epochs with mini-bachtes of size equal to batch_size.
295308 class(network_type), intent (in out ) :: self
@@ -320,6 +333,7 @@ subroutine train_epochs(self, x, y, eta, num_epochs, batch_size)
320333
321334 end subroutine train_epochs
322335
336+
323337 pure subroutine train_single (self , x , y , eta )
324338 ! Trains a network using a single set of input data x and output data y,
325339 ! and learning rate eta.
@@ -332,6 +346,7 @@ pure subroutine train_single(self, x, y, eta)
332346 call self % update(dw, db, eta)
333347 end subroutine train_single
334348
349+
335350 pure subroutine update (self , dw , db , eta )
336351 ! Updates network weights and biases with gradients dw and db,
337352 ! scaled by learning rate eta.
0 commit comments