-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathheat_mod.F90
87 lines (66 loc) · 2.14 KB
/
heat_mod.F90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
! Field metadata for heat equation solver
module heat
use mpi_f08
use iso_fortran_env, only : REAL64
implicit none
integer, parameter :: dp = REAL64
real(dp), parameter :: DX = 0.01, DY = 0.01 ! Fixed grid spacing
type :: field
integer :: nx ! local dimension of the field
integer :: ny
integer :: nx_full ! global dimension of the field
integer :: ny_full
real(dp) :: dx
real(dp) :: dy
real(dp), dimension(:,:), allocatable :: data
end type field
type :: parallel_data
integer :: size
integer :: rank
integer :: nleft, nright ! Ranks of neighbouring MPI tasks
end type parallel_data
contains
! Initialize the field type metadata
! Arguments:
! field0 (type(field)): input field
! nx, ny, dx, dy: field dimensions and spatial step size
subroutine set_field_dimensions(field0, nx, ny, parallel)
implicit none
type(field), intent(out) :: field0
integer, intent(in) :: nx, ny
type(parallel_data), intent(in) :: parallel
integer :: nx_local, ny_local
nx_local = nx
ny_local = ny / parallel%size
field0%dx = DX
field0%dy = DY
field0%nx = nx_local
field0%ny = ny_local
field0%nx_full = nx
field0%ny_full = ny
end subroutine set_field_dimensions
subroutine parallel_setup(parallel, nx, ny)
implicit none
type(parallel_data), intent(out) :: parallel
integer, intent(in), optional :: nx, ny
integer :: ny_local
integer :: ierr
! TODO start: query number of MPI tasks and store it in
! parallel%size
! TODO end
if (present(ny)) then
ny_local = ny / parallel%size
if (ny_local * parallel%size /= ny) then
write(*,*) 'Cannot divide grid evenly to processors'
call mpi_abort(MPI_COMM_WORLD, -2, ierr)
end if
end if
! TODO start: query MPI rank of this task and store it in parallel%rank
! Determine also left and right neighbours of this domain and store
! them in parallel%nleft and parallel%nright, remember to cope with
! boundary domains appropriatly
parallel%nleft =
parallel%nright =
! TODO end
end subroutine parallel_setup
end module heat