Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initializers stub #151

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions src/nf/nf_initializers.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
module nf_initializers
implicit none

private
public :: initializer_type, glorot, he

type, abstract :: initializer_type
contains
procedure(init), deferred :: init
end type initializer_type

abstract interface
subroutine init(self, x)
import :: initializer_type
class(initializer_type), intent(in) :: self
real, intent(inout) :: x(:)
end subroutine init
end interface

type, extends(initializer_type) :: glorot
contains
procedure :: init => init_glorot
end type glorot

type, extends(initializer_type) :: he
contains
procedure :: init => init_he
end type he

contains

subroutine init_glorot(self, x)
class(glorot), intent(in) :: self
real, intent(inout) :: x(:)
error stop 'Not implemented'
end subroutine init_glorot

subroutine init_he(self, x)
class(he), intent(in) :: self
real, intent(inout) :: x(:)
error stop 'Not implemented'
end subroutine init_he

end module nf_initializers
2 changes: 2 additions & 0 deletions src/nf/nf_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ module nf_layer
!! user-facing API.

use nf_base_layer, only: base_layer
use nf_initializers, only: initializer_type
use nf_optimizers, only: optimizer_base_type

implicit none
Expand All @@ -17,6 +18,7 @@ module nf_layer
!! nf_layer_constructors.f90 to create `layer` instances.

class(base_layer), allocatable :: p
class(initializer_type), allocatable :: initializer
character(:), allocatable :: name
character(:), allocatable :: activation
integer, allocatable :: layer_shape(:)
Expand Down
13 changes: 9 additions & 4 deletions src/nf/nf_layer_constructors.f90
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ module nf_layer_constructors
!! This module provides the functions to instantiate specific layers.

use nf_layer, only: layer
use nf_activation, only : activation_function
use nf_initializers, only: initializer_type
use nf_activation, only: activation_function

implicit none

Expand Down Expand Up @@ -62,7 +63,7 @@ end function input3d

interface

pure module function dense(layer_size, activation) result(res)
pure module function dense(layer_size, activation, initializer) result(res)
!! Dense (fully-connected) layer constructor.
!!
!! This layer is a building block for dense, fully-connected networks,
Expand All @@ -81,6 +82,8 @@ pure module function dense(layer_size, activation) result(res)
!! The number of neurons in a dense layer
class(activation_function), intent(in), optional :: activation
!! Activation function instance (default sigmoid)
class(initializer_type), intent(in), optional :: initializer
!! Algorithm to use to initialize the weights
type(layer) :: res
!! Resulting layer instance
end function dense
Expand All @@ -106,7 +109,7 @@ pure module function flatten() result(res)
!! Resulting layer instance
end function flatten

pure module function conv2d(filters, kernel_size, activation) result(res)
pure module function conv2d(filters, kernel_size, activation, initializer) result(res)
!! 2-d convolutional layer constructor.
!!
!! This layer is for building 2-d convolutional network.
Expand All @@ -121,14 +124,16 @@ pure module function conv2d(filters, kernel_size, activation) result(res)
!! use nf, only :: conv2d, layer
!! type(layer) :: conv2d_layer
!! conv2d_layer = dense(filters=32, kernel_size=3)
!! conv2d_layer = dense(filters=32, kernel_size=3, activation='relu')
!! conv2d_layer = dense(filters=32, kernel_size=3, activation=relu())
!! ```
integer, intent(in) :: filters
!! Number of filters in the output of the layer
integer, intent(in) :: kernel_size
!! Width of the convolution window, commonly 3 or 5
class(activation_function), intent(in), optional :: activation
!! Activation function (default sigmoid)
class(initializer_type), intent(in), optional :: initializer
!! Algorithm to use to initialize the weights
type(layer) :: res
!! Resulting layer instance
end function conv2d
Expand Down
11 changes: 9 additions & 2 deletions src/nf/nf_layer_constructors_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,17 @@
use nf_maxpool2d_layer, only: maxpool2d_layer
use nf_reshape_layer, only: reshape3d_layer
use nf_activation, only: activation_function, sigmoid
use nf_initializers, only: initializer_type

implicit none

contains

pure module function conv2d(filters, kernel_size, activation) result(res)
pure module function conv2d(filters, kernel_size, activation, initializer) result(res)
integer, intent(in) :: filters
integer, intent(in) :: kernel_size
class(activation_function), intent(in), optional :: activation
class(initializer_type), intent(in), optional :: initializer
type(layer) :: res

class(activation_function), allocatable :: activation_tmp
Expand All @@ -32,6 +34,8 @@ pure module function conv2d(filters, kernel_size, activation) result(res)

res % activation = activation_tmp % get_name()

if (present(initializer)) error stop 'Initializers not yet implemented'

allocate( &
res % p, &
source=conv2d_layer(filters, kernel_size, activation_tmp) &
Expand All @@ -40,9 +44,10 @@ pure module function conv2d(filters, kernel_size, activation) result(res)
end function conv2d


pure module function dense(layer_size, activation) result(res)
pure module function dense(layer_size, activation, initializer) result(res)
integer, intent(in) :: layer_size
class(activation_function), intent(in), optional :: activation
class(initializer_type), intent(in), optional :: initializer
type(layer) :: res

class(activation_function), allocatable :: activation_tmp
Expand All @@ -58,6 +63,8 @@ pure module function dense(layer_size, activation) result(res)

res % activation = activation_tmp % get_name()

if (present(initializer)) error stop 'Initializers not yet implemented'

allocate(res % p, source=dense_layer(layer_size, activation_tmp))

end function dense
Expand Down