Skip to content

Commit cf2caf6

Browse files
committed
added tests; to note that they don't work
1 parent b5e7f74 commit cf2caf6

File tree

5 files changed

+169
-6
lines changed

5 files changed

+169
-6
lines changed

src/nf/nf_layer_submodule.f90

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -340,6 +340,10 @@ impure elemental module subroutine init(self, input)
340340
self % layer_shape = shape(this_layer % output)
341341
type is(maxpool2d_layer)
342342
self % layer_shape = shape(this_layer % output)
343+
type is(locally_connected_1d_layer)
344+
self % layer_shape = shape(this_layer % output)
345+
type is(maxpool1d_layer)
346+
self % layer_shape = shape(this_layer % output)
343347
type is(flatten_layer)
344348
self % layer_shape = shape(this_layer % output)
345349
end select

src/nf/nf_network_submodule.f90

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -73,18 +73,21 @@ module function network_from_layers(layers) result(res)
7373
type is(conv2d_layer)
7474
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
7575
n = n + 1
76+
!type is(locally_connected_1d_layer)
77+
!res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
78+
!n = n + 1
7679
type is(maxpool2d_layer)
7780
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
7881
n = n + 1
7982
type is(reshape3d_layer)
8083
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
8184
n = n + 1
82-
type is(maxpool1d_layer)
83-
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
84-
n = n + 1
85-
type is(reshape2d_layer)
86-
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
87-
n = n + 1
85+
!type is(maxpool1d_layer)
86+
! res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
87+
! n = n + 1
88+
!type is(reshape2d_layer)
89+
! res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
90+
! n = n + 1
8891
class default
8992
n = n + 1
9093
end select

test/CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,12 @@ foreach(execid
55
parametric_activation
66
dense_layer
77
conv2d_layer
8+
maxpool1d_layer
89
maxpool2d_layer
910
flatten_layer
1011
insert_flatten
1112
reshape_layer
13+
reshape2d_layer
1214
dense_network
1315
get_set_network_params
1416
conv2d_network

test/test_maxpool1d_layer.f90

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
program test_maxpool1d_layer
2+
3+
use iso_fortran_env, only: stderr => error_unit
4+
use nf, only: maxpool1d, input, layer
5+
use nf_input2d_layer, only: input2d_layer
6+
use nf_maxpool1d_layer, only: maxpool1d_layer
7+
8+
implicit none
9+
10+
type(layer) :: maxpool_layer, input_layer
11+
integer, parameter :: pool_size = 2, stride = 2
12+
integer, parameter :: channels = 3, length = 32
13+
integer, parameter :: input_shape(2) = [channels, length]
14+
integer, parameter :: output_shape(2) = [channels, length / 2]
15+
real, allocatable :: sample_input(:,:), output(:,:), gradient(:,:)
16+
integer :: i
17+
logical :: ok = .true., gradient_ok = .true.
18+
19+
maxpool_layer = maxpool1d(pool_size)
20+
21+
if (.not. maxpool_layer % name == 'maxpool1d') then
22+
ok = .false.
23+
write(stderr, '(a)') 'maxpool1d layer has its name set correctly.. failed'
24+
end if
25+
26+
if (maxpool_layer % initialized) then
27+
ok = .false.
28+
write(stderr, '(a)') 'maxpool1d layer should not be marked as initialized yet.. failed'
29+
end if
30+
31+
input_layer = input(channels, length)
32+
call maxpool_layer % init(input_layer)
33+
34+
if (.not. maxpool_layer % initialized) then
35+
ok = .false.
36+
write(stderr, '(a)') 'maxpool1d layer should now be marked as initialized.. failed'
37+
end if
38+
39+
if (.not. all(maxpool_layer % input_layer_shape == input_shape)) then
40+
ok = .false.
41+
write(stderr, '(a)') 'maxpool1d layer input layer shape should be correct.. failed'
42+
end if
43+
44+
if (.not. all(maxpool_layer % layer_shape == output_shape)) then
45+
ok = .false.
46+
write(stderr, '(a)') 'maxpool1d layer output layer shape should be correct.. failed'
47+
end if
48+
49+
! Allocate and initialize sample input data
50+
allocate(sample_input(channels, length))
51+
do concurrent(i = 1:length)
52+
sample_input(:,i) = i
53+
end do
54+
55+
select type(this_layer => input_layer % p); type is(input2d_layer)
56+
call this_layer % set(sample_input)
57+
end select
58+
59+
call maxpool_layer % forward(input_layer)
60+
call maxpool_layer % get_output(output)
61+
62+
do i = 1, length / 2
63+
! Since input is i, maxpool1d output must be stride*i
64+
if (.not. all(output(:,i) == stride * i)) then
65+
ok = .false.
66+
write(stderr, '(a)') 'maxpool1d layer forward pass correctly propagates the max value.. failed'
67+
end if
68+
end do
69+
70+
! Test the backward pass
71+
! Allocate and initialize the downstream gradient field
72+
allocate(gradient, source=output)
73+
74+
! Make a backward pass
75+
call maxpool_layer % backward(input_layer, gradient)
76+
77+
select type(this_layer => maxpool_layer % p); type is(maxpool1d_layer)
78+
do i = 1, length
79+
if (mod(i,2) == 0) then
80+
if (.not. all(sample_input(:,i) == this_layer % gradient(:,i))) gradient_ok = .false.
81+
else
82+
if (.not. all(this_layer % gradient(:,i) == 0)) gradient_ok = .false.
83+
end if
84+
end do
85+
end select
86+
87+
if (.not. gradient_ok) then
88+
ok = .false.
89+
write(stderr, '(a)') 'maxpool1d layer backward pass produces the correct dL/dx.. failed'
90+
end if
91+
92+
if (ok) then
93+
print '(a)', 'test_maxpool1d_layer: All tests passed.'
94+
else
95+
write(stderr, '(a)') 'test_maxpool1d_layer: One or more tests failed.'
96+
stop 1
97+
end if
98+
99+
end program test_maxpool1d_layer
100+

test/test_reshape2d_layer.f90

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
program test_reshape2d_layer
2+
3+
use iso_fortran_env, only: stderr => error_unit
4+
use nf, only: input, network, reshape2d_layer => reshape2d
5+
use nf_datasets, only: download_and_unpack, keras_reshape_url
6+
7+
implicit none
8+
9+
type(network) :: net
10+
real, allocatable :: sample_input(:), output(:,:)
11+
integer, parameter :: output_shape(2) = [32, 32]
12+
integer, parameter :: input_size = product(output_shape)
13+
character(*), parameter :: keras_reshape_path = 'keras_reshape.h5'
14+
logical :: file_exists
15+
logical :: ok = .true.
16+
17+
! Create the network
18+
net = network([ &
19+
input(input_size), &
20+
reshape2d_layer(output_shape) &
21+
])
22+
23+
if (.not. size(net % layers) == 2) then
24+
write(stderr, '(a)') 'the network should have 2 layers.. failed'
25+
ok = .false.
26+
end if
27+
28+
! Initialize test data
29+
allocate(sample_input(input_size))
30+
call random_number(sample_input)
31+
32+
! Propagate forward and get the output
33+
call net % forward(sample_input)
34+
call net % layers(2) % get_output(output)
35+
36+
if (.not. all(shape(output) == output_shape)) then
37+
write(stderr, '(a)') 'the reshape layer produces expected output shape.. failed'
38+
ok = .false.
39+
end if
40+
41+
if (.not. all(reshape(sample_input, output_shape) == output)) then
42+
write(stderr, '(a)') 'the reshape layer produces expected output values.. failed'
43+
ok = .false.
44+
end if
45+
46+
if (ok) then
47+
print '(a)', 'test_reshape2d_layer: All tests passed.'
48+
else
49+
write(stderr, '(a)') 'test_reshape2d_layer: One or more tests failed.'
50+
stop 1
51+
end if
52+
53+
end program test_reshape2d_layer
54+

0 commit comments

Comments
 (0)