Skip to content

Commit 472164e

Browse files
committed
Add softmax function
1 parent 8013d0d commit 472164e

File tree

2 files changed

+24
-1
lines changed

2 files changed

+24
-1
lines changed

src/nf/nf_activation.f90

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ module nf_activation
1212
public :: gaussian, gaussian_prime
1313
public :: relu, relu_prime
1414
public :: sigmoid, sigmoid_prime
15+
public :: softmax, softmax_prime
1516
public :: softplus, softplus_prime
1617
public :: step, step_prime
1718
public :: tanhf, tanh_prime
@@ -103,6 +104,21 @@ pure function sigmoid_prime(x) result(res)
103104
res = sigmoid(x) * (1 - sigmoid(x))
104105
end function sigmoid_prime
105106

107+
pure function softmax(x) result(res)
108+
!! Softmax activation function
109+
real, intent(in) :: x(:)
110+
real :: res(size(x))
111+
res = exp(x - maxval(x))
112+
res = res / sum(res)
113+
end function softmax
114+
115+
pure function softmax_prime(x) result(res)
116+
!! Derivative of the softmax activation function.
117+
real, intent(in) :: x(:)
118+
real :: res(size(x))
119+
res = softmax(x) * (1 - softmax(x))
120+
end function softmax_prime
121+
106122
pure function softplus(x) result(res)
107123
! Softplus activation function.
108124
real, intent(in) :: x(:)

src/nf/nf_base_layer_submodule.f90

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
gaussian, gaussian_prime, &
77
relu, relu_prime, &
88
sigmoid, sigmoid_prime, &
9+
softmax, softmax_prime, &
910
softplus, softplus_prime, &
1011
step, step_prime, &
1112
tanhf, tanh_prime
@@ -46,6 +47,11 @@ elemental module subroutine set_activation(self, activation)
4647
self % activation_prime => sigmoid_prime
4748
self % activation_name = 'sigmoid'
4849

50+
case('softmax')
51+
self % activation => softmax
52+
self % activation_prime => softmax_prime
53+
self % activation_name = 'softmax'
54+
4955
case('softplus')
5056
self % activation => softplus
5157
self % activation_prime => softplus_prime
@@ -64,7 +70,8 @@ elemental module subroutine set_activation(self, activation)
6470
case default
6571
error stop 'Activation must be one of: ' // &
6672
'"elu", "exponential", "gaussian", "relu", ' // &
67-
'"sigmoid", "softplus", "step", or "tanh".'
73+
'"sigmoid", "softmax", "softplus", "step", ' // &
74+
'or "tanh".'
6875

6976
end select
7077

0 commit comments

Comments
 (0)