Skip to content

Commit

Permalink
Just creating the batchnorm layer for now; actual tests TODO
Browse files Browse the repository at this point in the history
  • Loading branch information
milancurcic committed Aug 24, 2023
1 parent de67a88 commit b1e0d39
Showing 1 changed file with 21 additions and 45 deletions.
66 changes: 21 additions & 45 deletions test/test_batchnorm_layer.f90
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
program test_batchnorm_layer

use iso_fortran_env, only: stderr => error_unit
use nf, only: batchnorm, input, layer
use nf_input3d_layer, only: input3d_layer
use nf, only: batchnorm, layer
use nf_batchnorm_layer, only: batchnorm_layer

implicit none

type(layer) :: bn_layer, input_layer
type(layer) :: bn_layer
integer, parameter :: num_features = 64
real, allocatable :: sample_input(:,:)
real, allocatable :: output(:,:)
Expand All @@ -29,54 +28,31 @@ program test_batchnorm_layer
write(stderr, '(a)') 'batchnorm layer should not be marked as initialized yet.. failed'
end if

input_layer = input(input_shape)
call bn_layer % init(input_layer)

if (.not. bn_layer % initialized) then
ok = .false.
write(stderr, '(a)') 'batchnorm layer should now be marked as initialized.. failed'
end if

if (.not. all(bn_layer % input_layer_shape == [num_features])) then
ok = .false.
write(stderr, '(a)') 'batchnorm layer input layer shape should be correct.. failed'
end if

! Initialize sample input and gradient
allocate(sample_input(num_features, 1))
allocate(gradient(num_features, 1))
sample_input = 1.0
gradient = 2.0

! Set input for the input layer
select type(this_layer => input_layer % p); type is(input3d_layer)
call this_layer % set(sample_input)
end select

! Initialize the batch normalization layer
bn_layer = batchnorm(num_features)
call bn_layer % init(input_layer)

! Perform forward and backward passes
call bn_layer % forward(input_layer)
call bn_layer % backward(input_layer, gradient)

! Retrieve output and check normalization
call bn_layer % get_output(output)
if (.not. all(abs(output - sample_input) < tolerance)) then
ok = .false.
write(stderr, '(a)') 'batchnorm layer output should be close to input.. failed'
end if

! Retrieve gamma and beta gradients
allocate(gamma_grad(num_features))
allocate(beta_grad(num_features))
call bn_layer % get_gradients(gamma_grad, beta_grad)

if (.not. all(beta_grad == sum(gradient))) then
ok = .false.
write(stderr, '(a)') 'batchnorm layer beta gradients are incorrect.. failed'
end if
!TODO run forward and backward passes directly on the batchnorm_layer instance
!TODO since we don't yet support tiying in with the input layer.

!TODO Retrieve output and check normalization
!call bn_layer % get_output(output)
!if (.not. all(abs(output - sample_input) < tolerance)) then
! ok = .false.
! write(stderr, '(a)') 'batchnorm layer output should be close to input.. failed'
!end if

!TODO Retrieve gamma and beta gradients
!allocate(gamma_grad(num_features))
!allocate(beta_grad(num_features))
!call bn_layer % get_gradients(gamma_grad, beta_grad)

!if (.not. all(beta_grad == sum(gradient))) then
! ok = .false.
! write(stderr, '(a)') 'batchnorm layer beta gradients are incorrect.. failed'
!end if

! Report test results
if (ok) then
Expand Down

0 comments on commit b1e0d39

Please sign in to comment.