Skip to content

Commit b716099

Browse files
committed
resolve conflicts from master
2 parents 326a612 + 674fcfa commit b716099

File tree

6 files changed

+74
-11
lines changed

6 files changed

+74
-11
lines changed

CMakeLists.txt

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,13 @@ else()
3131
add_definitions(-DREAL32)
3232
endif()
3333

34+
if(SERIAL)
35+
message(STATUS "Configuring build for serial execution")
36+
else()
37+
message(STATUS "Configuring build for parallel execution")
38+
add_definitions(-DCAF)
39+
endif()
40+
3441
# compiler flags for gfortran
3542
if(CMAKE_Fortran_COMPILER_ID MATCHES GNU)
3643

@@ -52,9 +59,15 @@ endif()
5259

5360
# compiler flags for ifort
5461
if(CMAKE_Fortran_COMPILER_ID MATCHES Intel)
55-
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -coarray=shared -fpp -assume byterecl,realloc_lhs -heap-arrays")
62+
63+
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fpp -assume byterecl,realloc_lhs -heap-arrays")
5664
set(CMAKE_Fortran_FLAGS_DEBUG "-O0 -g -C -traceback")
5765
set(CMAKE_Fortran_FLAGS_RELEASE "-O3")
66+
67+
if(NOT SERIAL)
68+
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -coarray=shared")
69+
endif()
70+
5871
endif()
5972

6073
# compiler flags for Cray ftn

CONTRIBUTORS.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Contributors
2+
3+
* @ivan-pi
4+
* @jvdp1
5+
* @milancurcic
6+
* @pirpyn
7+
* @scivision

LICENSE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
MIT License
22

3-
Copyright (c) 2018 Milan Curcic
3+
Copyright (c) 2018-2019 Milan Curcic and neural-fortran contributors
44

55
Permission is hereby granted, free of charge, to any person obtaining a copy
66
of this software and associated documentation files (the "Software"), to deal

src/lib/mod_layer.f90

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ module mod_layer
1818
real(rk), allocatable :: z(:) ! arg. to activation function
1919
procedure(activation_function), pointer, nopass :: activation => null()
2020
procedure(activation_function), pointer, nopass :: activation_prime => null()
21+
character(len=:), allocatable :: activation_str ! activation character string
2122
contains
2223
procedure, public, pass(self) :: set_activation
2324
end type layer_type
@@ -102,7 +103,9 @@ subroutine db_co_sum(db)
102103
type(array1d), allocatable, intent(in out) :: db(:)
103104
integer(ik) :: n
104105
do n = 2, size(db)
106+
#ifdef CAF
105107
call co_sum(db(n) % array)
108+
#endif
106109
end do
107110
end subroutine db_co_sum
108111

@@ -111,11 +114,13 @@ subroutine dw_co_sum(dw)
111114
type(array2d), allocatable, intent(in out) :: dw(:)
112115
integer(ik) :: n
113116
do n = 1, size(dw) - 1
117+
#ifdef CAF
114118
call co_sum(dw(n) % array)
119+
#endif
115120
end do
116121
end subroutine dw_co_sum
117122

118-
pure subroutine set_activation(self, activation)
123+
pure elemental subroutine set_activation(self, activation)
119124
! Sets the activation function. Input string must match one of
120125
! provided activation functions, otherwise it defaults to sigmoid.
121126
! If activation not present, defaults to sigmoid.
@@ -125,21 +130,27 @@ pure subroutine set_activation(self, activation)
125130
case('gaussian')
126131
self % activation => gaussian
127132
self % activation_prime => gaussian_prime
133+
self % activation_str = 'gaussian'
128134
case('relu')
129135
self % activation => relu
130136
self % activation_prime => relu_prime
137+
self % activation_str = 'relu'
131138
case('sigmoid')
132139
self % activation => sigmoid
133140
self % activation_prime => sigmoid_prime
141+
self % activation_str = 'sigmoid'
134142
case('step')
135143
self % activation => step
136144
self % activation_prime => step_prime
145+
self % activation_str = 'step'
137146
case('tanh')
138147
self % activation => tanhf
139148
self % activation_prime => tanh_prime
149+
self % activation_str = 'tanh'
140150
case default
141151
self % activation => sigmoid
142152
self % activation_prime => sigmoid_prime
153+
self % activation_str = 'sigmoid'
143154
end select
144155
end subroutine set_activation
145156

src/lib/mod_network.f90

Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,16 @@ module mod_network
2626
procedure, public, pass(self) :: output_batch
2727
procedure, public, pass(self) :: output_single
2828
procedure, public, pass(self) :: save
29-
procedure, public, pass(self) :: set_activation
29+
procedure, public, pass(self) :: set_activation_equal
30+
procedure, public, pass(self) :: set_activation_layers
3031
procedure, public, pass(self) :: sync
3132
procedure, public, pass(self) :: train_batch
3233
procedure, public, pass(self) :: train_epochs
3334
procedure, public, pass(self) :: train_single
3435
procedure, public, pass(self) :: update
3536

3637
generic, public :: output => output_batch, output_single
38+
generic, public :: set_activation => set_activation_equal, set_activation_layers
3739
generic, public :: train => train_batch, train_epochs, train_single
3840

3941
end type network_type
@@ -139,13 +141,18 @@ subroutine load(self, filename)
139141
! Loads the network from file.
140142
class(network_type), intent(in out) :: self
141143
character(len=*), intent(in) :: filename
142-
integer(ik) :: fileunit, n, num_layers
144+
integer(ik) :: fileunit, n, num_layers, layer_idx
143145
integer(ik), allocatable :: dims(:)
146+
character(len=100) :: buffer ! activation string
144147
open(newunit=fileunit, file=filename, status='old', action='read')
145148
read(fileunit, fmt=*) num_layers
146149
allocate(dims(num_layers))
147150
read(fileunit, fmt=*) dims
148151
call self % init(dims)
152+
do n = 1, num_layers
153+
read(fileunit, fmt=*) layer_idx, buffer
154+
call self % layers(layer_idx) % set_activation(trim(buffer))
155+
end do
149156
do n = 2, size(self % dims)
150157
read(fileunit, fmt=*) self % layers(n) % b
151158
end do
@@ -198,6 +205,9 @@ subroutine save(self, filename)
198205
open(newunit=fileunit, file=filename)
199206
write(fileunit, fmt=*) size(self % dims)
200207
write(fileunit, fmt=*) self % dims
208+
do n = 1, size(self % dims)
209+
write(fileunit, fmt=*) n, self % layers(n) % activation_str
210+
end do
201211
do n = 2, size(self % dims)
202212
write(fileunit, fmt=*) self % layers(n) % b
203213
end do
@@ -207,17 +217,23 @@ subroutine save(self, filename)
207217
close(fileunit)
208218
end subroutine save
209219

210-
pure subroutine set_activation(self, activation)
220+
pure subroutine set_activation_equal(self, activation)
211221
! A thin wrapper around layer % set_activation().
212222
! This method can be used to set an activation function
213223
! for all layers at once.
214224
class(network_type), intent(in out) :: self
215225
character(len=*), intent(in) :: activation
216-
integer :: n
217-
do concurrent(n = 1:size(self % layers))
218-
call self % layers(n) % set_activation(activation)
219-
end do
220-
end subroutine set_activation
226+
call self % layers(:) % set_activation(activation)
227+
end subroutine set_activation_equal
228+
229+
pure subroutine set_activation_layers(self, activation)
230+
! A thin wrapper around layer % set_activation().
231+
! This method can be used to set different activation functions
232+
! for each layer separately.
233+
class(network_type), intent(in out) :: self
234+
character(len=*), intent(in) :: activation(size(self % layers))
235+
call self % layers(:) % set_activation(activation)
236+
end subroutine set_activation_layers
221237

222238
subroutine sync(self, image)
223239
! Broadcasts network weights and biases from
@@ -227,8 +243,10 @@ subroutine sync(self, image)
227243
integer(ik) :: n
228244
if (num_images() == 1) return
229245
layers: do n = 1, size(self % dims)
246+
#ifdef CAF
230247
call co_broadcast(self % layers(n) % b, image)
231248
call co_broadcast(self % layers(n) % w, image)
249+
#endif
232250
end do layers
233251
end subroutine sync
234252

src/tests/test_network_save.f90

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ program test_network_save
66
print *, 'Initializing 2 networks with random weights and biases'
77
net1 = network_type([768, 30, 10])
88
net2 = network_type([768, 30, 10])
9+
910
print *, 'Save network 1 into file'
1011
call net1 % save('test_network.dat')
1112
call net2 % load('test_network.dat')
@@ -15,4 +16,17 @@ program test_network_save
1516
all(net1 % layers(n) % w == net2 % layers(n) % w),&
1617
', biases equal:', all(net1 % layers(n) % b == net2 % layers(n) % b)
1718
end do
19+
print *, ''
20+
21+
print *, 'Setting different activation functions for each layer of network 1'
22+
call net1 % set_activation([character(len=10) :: 'sigmoid', 'tanh', 'gaussian'])
23+
print *, 'Save network 1 into file'
24+
call net1 % save('test_network.dat')
25+
call net2 % load('test_network.dat')
26+
print *, 'Load network 2 from file'
27+
do n = 1, size(net1 % layers)
28+
print *, 'Layer ', n, ', activation functions equal:',&
29+
associated(net1 % layers(n) % activation, net2 % layers(n) % activation),&
30+
'(network 1: ', net1 % layers(n) % activation_str, ', network 2: ', net2 % layers(n) % activation_str,')'
31+
end do
1832
end program test_network_save

0 commit comments

Comments
 (0)