Skip to content

Commit

Permalink
Added Leaky ReLU activation function (1D & 3D) (#123)
Browse files Browse the repository at this point in the history
* Added Leaky ReLU activation function(1D & 3D)

* Added Leaky ReLU prime & respective cases

* Missing comma

* activation functions with optional alpha

* activation functions with optional alpha

* suggested changes

* Error out if leaky ReLU is requested

---------

Co-authored-by: Milan Curcic <[email protected]>
  • Loading branch information
Spnetic-5 and milancurcic authored Mar 17, 2023
1 parent 7039276 commit f328c8d
Show file tree
Hide file tree
Showing 6 changed files with 149 additions and 57 deletions.
79 changes: 59 additions & 20 deletions src/nf/nf_activation_1d.f90
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,17 @@ module nf_activation_1d
public :: gaussian, gaussian_prime
public :: linear, linear_prime
public :: relu, relu_prime
public :: leaky_relu, leaky_relu_prime
public :: sigmoid, sigmoid_prime
public :: softmax, softmax_prime
public :: softplus, softplus_prime
public :: step, step_prime
public :: tanhf, tanh_prime

interface
pure function activation_function(x)
pure function activation_function(x, alpha)
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: activation_function(size(x))
end function activation_function
end interface
Expand All @@ -30,7 +32,7 @@ end function activation_function
pure function elu(x, alpha) result(res)
! Exponential Linear Unit (ELU) activation function.
real, intent(in) :: x(:)
real, intent(in) :: alpha
real, intent(in), optional :: alpha
real :: res(size(x))
where (x >= 0)
res = x
Expand All @@ -43,7 +45,7 @@ pure function elu_prime(x, alpha) result(res)
! First derivative of the Exponential Linear Unit (ELU)
! activation function.
real, intent(in) :: x(:)
real, intent(in) :: alpha
real, intent(in), optional :: alpha
real :: res(size(x))
where (x >= 0)
res = 1
Expand All @@ -52,51 +54,58 @@ pure function elu_prime(x, alpha) result(res)
end where
end function elu_prime

pure function exponential(x) result(res)
pure function exponential(x, alpha) result(res)
! Exponential activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(x)
end function exponential

pure function gaussian(x) result(res)
pure function gaussian(x, alpha) result(res)
! Gaussian activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(-x**2)
end function gaussian

pure function gaussian_prime(x) result(res)
pure function gaussian_prime(x, alpha) result(res)
! First derivative of the Gaussian activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = -2 * x * gaussian(x)
end function gaussian_prime

pure function linear(x) result(res)
pure function linear(x, alpha) result(res)
! Linear activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = x
end function linear

pure function linear_prime(x) result(res)
pure function linear_prime(x, alpha) result(res)
! First derivative of the Gaussian activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 1
end function linear_prime

pure function relu(x) result(res)
pure function relu(x, alpha) result(res)
!! Rectified Linear Unit (ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = max(0., x)
end function relu

pure function relu_prime(x) result(res)
pure function relu_prime(x, alpha) result(res)
! First derivative of the Rectified Linear Unit (ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
where (x > 0)
res = 1
Expand All @@ -105,52 +114,79 @@ pure function relu_prime(x) result(res)
end where
end function relu_prime

pure function sigmoid(x) result(res)
pure function leaky_relu(x, alpha) result(res)
!! Leaky Rectified Linear Unit (Leaky ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = max(alpha*x, x)
end function leaky_relu

pure function leaky_relu_prime(x, alpha) result(res)
! First derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
where (x > 0)
res = 1
elsewhere
res = alpha
end where
end function leaky_relu_prime

pure function sigmoid(x, alpha) result(res)
! Sigmoid activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 1 / (1 + exp(-x))
endfunction sigmoid

pure function sigmoid_prime(x) result(res)
pure function sigmoid_prime(x, alpha) result(res)
! First derivative of the sigmoid activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = sigmoid(x) * (1 - sigmoid(x))
end function sigmoid_prime

pure function softmax(x) result(res)
pure function softmax(x, alpha) result(res)
!! Softmax activation function
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(x - maxval(x))
res = res / sum(res)
end function softmax

pure function softmax_prime(x) result(res)
pure function softmax_prime(x, alpha) result(res)
!! Derivative of the softmax activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = softmax(x) * (1 - softmax(x))
end function softmax_prime

pure function softplus(x) result(res)
pure function softplus(x, alpha) result(res)
! Softplus activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = log(exp(x) + 1)
end function softplus

pure function softplus_prime(x) result(res)
pure function softplus_prime(x,alpha) result(res)
! First derivative of the softplus activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(x) / (exp(x) + 1)
end function softplus_prime

pure function step(x) result(res)
pure function step(x, alpha) result(res)
! Step activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
where (x > 0)
res = 1
Expand All @@ -159,26 +195,29 @@ pure function step(x) result(res)
end where
end function step

pure function step_prime(x) result(res)
pure function step_prime(x, alpha) result(res)
! First derivative of the step activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 0
end function step_prime

pure function tanhf(x) result(res)
pure function tanhf(x, alpha) result(res)
! Tangent hyperbolic activation function.
! Same as the intrinsic tanh, but must be
! defined here so that we can use procedure
! pointer with it.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = tanh(x)
end function tanhf

pure function tanh_prime(x) result(res)
pure function tanh_prime(x, alpha) result(res)
! First derivative of the tanh activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 1 - tanh(x)**2
end function tanh_prime
Expand Down
Loading

0 comments on commit f328c8d

Please sign in to comment.