11module nf_conv2d_layer
22
3- ! ! This is a placeholder module that will later define a concrete conv2d
4- ! ! layer type.
3+ ! ! This modules provides a 2-d convolutional `conv2d_layer` type.
54
65 use nf_base_layer, only: base_layer
76 implicit none
@@ -14,12 +13,12 @@ module nf_conv2d_layer
1413 integer :: width
1514 integer :: height
1615 integer :: channels
17- integer :: window_size
16+ integer :: kernel_size
1817 integer :: filters
1918
20- real , allocatable :: biases(:) ! as many as there are filters
21- real , allocatable :: kernel(:,:,:,:)
22- real , allocatable :: output(:,:,:)
19+ real , allocatable :: biases(:) ! size( filters)
20+ real , allocatable :: kernel(:,:,:,:) ! filters x channels x window x window
21+ real , allocatable :: output(:,:,:) ! filters x output_width * output_height
2322
2423 contains
2524
@@ -30,55 +29,46 @@ module nf_conv2d_layer
3029 end type conv2d_layer
3130
3231 interface conv2d_layer
33- module procedure :: conv2d_layer_cons
32+ pure module function conv2d_layer_cons(filters, kernel_size, activation) &
33+ result(res)
34+ ! ! `conv2d_layer` constructor function
35+ integer , intent (in ) :: filters
36+ integer , intent (in ) :: kernel_size
37+ character (* ), intent (in ) :: activation
38+ type (conv2d_layer) :: res
39+ end function conv2d_layer_cons
3440 end interface conv2d_layer
3541
36- contains
37-
38- pure function conv2d_layer_cons (window_size , filters , activation ) result(res)
39- integer , intent (in ) :: window_size
40- integer , intent (in ) :: filters
41- character (* ), intent (in ) :: activation
42- type (conv2d_layer) :: res
43- res % window_size = window_size
44- res % filters = filters
45- call res % set_activation(activation)
46- end function conv2d_layer_cons
47-
48-
49- subroutine init (self , input_shape )
50- class(conv2d_layer), intent (in out ) :: self
51- integer , intent (in ) :: input_shape(:)
52-
53- self % width = input_shape(1 ) - self % window_size + 1
54- self % height = input_shape(2 ) - self % window_size + 1
55- self % channels = input_shape(3 )
56-
57- allocate (self % output(self % width, self % height, self % filters))
58- self % output = 0
59-
60- allocate (self % kernel(self % window_size, self % window_size, &
61- self % channels, self % filters))
62- self % kernel = 0 ! TODO 4-d randn
63-
64- allocate (self % biases(self % filters))
65- self % biases = 0
66-
67- end subroutine init
68-
69-
70- subroutine forward (self , input )
71- class(conv2d_layer), intent (in out ) :: self
72- real , intent (in ) :: input(:,:,:)
73- print * , ' Warning: conv2d forward pass not implemented'
74- end subroutine forward
75-
76-
77- subroutine backward (self , input , gradient )
78- class(conv2d_layer), intent (in out ) :: self
79- real , intent (in ) :: input(:,:,:)
80- real , intent (in ) :: gradient(:,:,:)
81- print * , ' Warning: conv2d backward pass not implemented'
82- end subroutine backward
42+ interface
43+
44+ module subroutine init (self , input_shape )
45+ ! ! Initialize the layer data structures.
46+ ! !
47+ ! ! This is a deferred procedure from the `base_layer` abstract type.
48+ class(conv2d_layer), intent (in out ) :: self
49+ ! ! A `conv2d_layer` instance
50+ integer , intent (in ) :: input_shape(:)
51+ ! ! Input layer dimensions
52+ end subroutine init
53+
54+ pure module subroutine forward(self, input)
55+ ! ! Apply a forward pass on the `conv2d` layer.
56+ class(conv2d_layer), intent (in out ) :: self
57+ ! ! A `conv2d_layer` instance
58+ real , intent (in ) :: input(:,:,:)
59+ ! ! Input data
60+ end subroutine forward
61+
62+ module subroutine backward (self , input , gradient )
63+ ! ! Apply a backward pass on the `conv2d` layer.
64+ class(conv2d_layer), intent (in out ) :: self
65+ ! ! A `conv2d_layer` instance
66+ real , intent (in ) :: input(:,:,:)
67+ ! ! Input data (previous layer)
68+ real , intent (in ) :: gradient(:,:,:)
69+ ! ! Gradient (next layer)
70+ end subroutine backward
71+
72+ end interface
8373
8474end module nf_conv2d_layer
0 commit comments