DISPATCH
mpi_coords_mod.f90
1 !===============================================================================
2 !> $Id: 5537d0e6a4f17e3fbcc8d04daec765c27fdfb16d $
3 !> MPI calls related to Cartesian MPI coordinate systems
4 !>
5 !> To make use of this module, make sure it is compiled together with your code,
6 !> using either the Makefile and the 'make' command, or compiling manually, with
7 !>
8 !> mpi = ../../../../../mpi # or wherever
9 !> mpifort -c $mpi/mpi_mod.f90 # compiler mpi_mod.f90
10 !> mpifort -c $mpi/mpi_coords.f90 # add any other module your need
11 !> mpifort -c your_code.f90 # compile your code
12 !> mpifort *.o -o your_code.x # link together into your_code.x
13 !>
14 !> In your_code.f90 you add lines such as:
15 !>
16 !> USE mpi ! makes the mpi% object available
17 !> USE mpi_coords ! makes the mpi_coords% object available
18 !> implicit none
19 !> integer:: dims(3) ! MPI cartesian dimension
20 !> ...
21 !> call mpi%init ! initializes the mpi%object
22 !> call mpi_coords%init (dims=dims) ! initializes the mpi_coords%object
23 !> call mpi_coords%print ! prints an overview of the geometry
24 !> ...
25 !> ...
26 !> call mpi%end ! closes MPI
27 !>
28 !> Too see which variables and procedures are inside mpi%, just look below!
29 !>
30 !===============================================================================
32  USE mpi_mod
33  USE timer_mod
34  implicit none
35  private
36 #ifdef MPI
37  include 'mpif.h'
38 #endif MPI
39  logical:: cart_created=.false.! Set to true if cartesian mpi coordinates are set up
40  logical:: mpi_reorder ! Reordering of ranks allowed, or not
41  logical:: mpi_periodic(3) ! Periodic MPI dimensions, or not
42  integer:: mpi_dim3(3) ! Cartesian MPI dimensions
43  integer:: mpi_cord3(3) ! Our cordinates in Cartesian MPI space
44  integer:: mpi_plane(3) ! Communicator btw ranks in planes
45  integer:: mpi_beam(3) ! Communicator btw ranks along axis directions
46  integer:: mpi_dn(3) ! Ranks downwards (circular)
47  integer:: mpi_up(3) ! Ranks upwards (circular)
48  integer:: mpi_comm_cart ! Communicator for cartesian ordering
49  logical:: ok
50  integer:: mpi_err
51  type, public, extends(mpi_t):: mpi_coords_t
52  integer:: coords(3)
53  integer:: dims(3)
54  integer:: odims(3)
55  integer:: npatch(3)
56  integer:: plane(3)
57  integer:: beam(3)
58  integer:: dn(3)
59  integer:: up(3)
60  integer:: cart_comm
61  contains
62  procedure:: init
63  procedure:: print
64  procedure:: coords_to_rank
65  procedure:: rank_to_coords
66  procedure:: finalize
67  end type
68  type(mpi_coords_t), public:: mpi_coords
69 CONTAINS
70 
71 !===============================================================================
72 !> Set the coordinates from the rank
73 !===============================================================================
74 FUNCTION rank_to_coords (self, rank) result (coords)
75  class(mpi_coords_t):: self
76  integer:: rank, coords(3)
77  !.............................................................................
78 #ifdef MPI
79  if (self%size == 1) then
80  coords = 0
81  else
82  call mpi_cart_coords (self%cart_comm, rank, 3, coords, mpi_err)
83  call mpi%assert ('MPI_CART_COORDS', mpi_err)
84  end if
85 #else
86  coords = 0
87 #endif
88 END FUNCTION rank_to_coords
89 
90 !===============================================================================
91 !> Set the rank from the coordinates
92 !===============================================================================
93 FUNCTION coords_to_rank (self, coords) result (rank)
94  class(mpi_coords_t):: self
95  integer:: coords(3), rank
96  !.............................................................................
97 #ifdef MPI
98  if (mpi%size==1) then
99  rank=0
100  else
101  call mpi_cart_rank (self%cart_comm, coords, rank, mpi_err)
102  end if
103  call mpi%assert ('MPI_CART_RANK', mpi_err)
104 #else
105  rank = 0
106 #endif
107 END FUNCTION coords_to_rank
108 
109 !===============================================================================
110 !> Extent the basic MPI object with MPI coordinate info
111 !===============================================================================
112 SUBROUTINE init (self, mpi_dims, dims)
113  class(mpi_coords_t):: self
114  integer, optional:: mpi_dims(3), dims(3)
115  interface
116  subroutine mpi_create_mpi (mpi_dims, dims)
117  integer, dimension(3), optional:: mpi_dims, dims
118  end subroutine
119  end interface
120  !.............................................................................
121  call self%mpi_t%init
122  call cart_create_mpi (self, mpi_dims, dims=dims)
123  self%coords = mpi_cord3
124  self%dims = mpi_dim3
125  self%plane = mpi_plane
126  self%beam = mpi_beam
127  self%dn = mpi_dn
128  self%up = mpi_up
129  self%ok = ok
130  mpi_coords%cart_comm = self%cart_comm
131  mpi_coords%coords = self%coords
132  mpi_coords%dims = self%dims
133  mpi_coords%plane = self%plane
134  mpi_coords%beam = self%beam
135  mpi_coords%dn = self%dn
136  mpi_coords%up = self%up
137  mpi_coords%ok = self%ok
138  if (mpi%size /= product(mpi_dim3)) then
139  print*,'inconsistent mpi%size:', mpi%size, mpi_dim3
140  stop
141  end if
142 END SUBROUTINE init
143 
144 !===============================================================================
145 !===============================================================================
146 SUBROUTINE print (self)
147  class(mpi_coords_t):: self
148  integer:: rank
149  !.............................................................................
150  if (self%ok) then
151  call self%mpi_t%print
152  do rank=0,self%size-1
153  if (rank==self%rank) then
154  print *,'...............................................'
155  print *, 'mpi%rank =', self%rank
156  print *, 'mpi%dims =', self%dims
157  print *, 'mpi%coords =', self%coords
158  print *, 'mpi%beam =', self%beam
159  print *, 'mpi%plane =', self%plane
160  print *, 'mpi%dn =', self%dn
161  print *, 'mpi%up =', self%up
162  end if
163  call mpi%barrier (delay=0.25)
164  end do
165  else if (self%master) then
166  print *, "WARNING: no MPI cartesian coordinates"
167  end if
168 END SUBROUTINE
169 
170 !===============================================================================
171 !> Create a Cartesian arrangement of MPI ranks, with dimension n_mpi(3). If
172 !> dims(3) is non-zero, these are taken as information about the global number
173 !> of mesh points, and an attempt is made to distribute the MPI-coordinate
174 !> dimension to get product(dims) = mpi%size. If mpi_dims is present, the
175 !> dims(3) information is ignored, even if present.
176 !===============================================================================
177 SUBROUTINE cart_create_mpi (self, mpi_dims, dims)
178  implicit none
179  class(mpi_coords_t):: self
180  integer, dimension(3), optional:: mpi_dims, dims
181  integer:: i, m, color, size, n(3)
182  integer:: mpi_err
183  character(len=120), save:: id= &
184  'mpi_coords.f90 $Id: 5537d0e6a4f17e3fbcc8d04daec765c27fdfb16d $'
185  integer, save:: itimer=0
186 !-------------------------------------------------------------------------------
187 #ifndef MPI
188  mpi_err = 0
189 #endif
190  mpi_reorder = .true.
191  mpi_periodic = .true.
192  !-----------------------------------------------------------------------------
193  ! Create a communicator (cart) with Cartesian ordering
194  !-----------------------------------------------------------------------------
195  ok = .false.
196  if (present(mpi_dims)) then
197  mpi_dim3 = mpi_dims
198  if (present(dims)) then
199  ok = test_is_ok()
200  else
201  ok = product(mpi_dims)==self%size
202  end if
203  if (self%rank==0) print'(a,i7,2x,3i5)',' mpi%size, mpi_dims =', self%size, mpi_dims
204  else
205  if (present(dims)) then
206  if (product(dims)/self%size*self%size == product(dims)) then
207  do while (.true.)
208  m = self%size**0.333334
209  if (m**3 == self%size) then
210  mpi_dim3 = [m,m,m]; if (test_is_ok()) exit
211  end if
212  m = self%size**0.500001
213  if (m**2 == self%size) then
214  mpi_dim3 = [m,m, 1]; if (test_is_ok()) exit
215  mpi_dim3 = [m, 1,m]; if (test_is_ok()) exit
216  mpi_dim3 = [ 1,m,m]; if (test_is_ok()) exit
217  end if
218  m = self%size
219  mpi_dim3 = [m,1,1]; if (test_is_ok()) exit
220  mpi_dim3 = [1,m,1]; if (test_is_ok()) exit
221  mpi_dim3 = [1,1,m]; if (test_is_ok()) exit
222  call fail ('WARNING: none of the simple choices worked')
223  end do
224  else
225  call fail ('WARNING: mpi%size must be a divisor in =', product(dims))
226  end if
227  else
228  call fail ('WARNING: either mpi_dims(3) or dims(3) must be present')
229  end if
230  end if
231  if (self%size /= product(mpi_dim3)) then
232  call fail ('WARNING: mpi%size must be equal to product(mpi_dims) =', &
233  product(mpi_dim3))
234  end if
235  if (.not. ok) return
236 #ifdef MPI
237  call mpi_cart_create (mpi_comm_world, 3, mpi_dim3, mpi_periodic, mpi_reorder, &
238  mpi_comm_cart, mpi_err)
239 #endif
240  self%cart_comm = mpi_comm_cart
241  if (self%rank==0) print '(13x,a,3i5)', ' Using mpi_dims =', mpi_dim3
242  call mpi%assert ('cart_creat_mpi: cart', mpi_err)
243  !-----------------------------------------------------------------------------
244  ! Compute mpi_cord3; our coordinates in Cartesian MPI space
245  !-----------------------------------------------------------------------------
246 #ifdef MPI
247  call mpi_cart_coords (mpi_comm_cart, self%rank, 3, mpi_cord3, mpi_err) ! mpi_cord3(i) = mpi_[xyz]
248 #endif
249  call mpi%assert ('cart_creat_mpi: mpi_cord3', mpi_err)
250  do i=1,3
251  !---------------------------------------------------------------------------
252  ! Compute mpi_up and mpi_dn; the ranks of nearest neighbors
253  !---------------------------------------------------------------------------
254 #ifdef MPI
255  call mpi_cart_shift (mpi_comm_cart, i-1, 1, mpi_dn(i), mpi_up(i), mpi_err) ! mpi_{up,dn} ranks
256 #endif
257  call mpi%assert ('cart_creat_mpi: mpi_dn/up', mpi_err)
258  !---------------------------------------------------------------------------
259  ! Compute mpi_plane; the communicators for planes in MPI-space
260  !---------------------------------------------------------------------------
261 #ifdef MPI
262  call mpi_comm_split (mpi_comm_world, mpi_cord3(i), self%rank, mpi_plane(i),&! mpi_plane(
263  mpi_err)
264 #endif
265  call mpi%assert ('cart_creat_mpi: mpi_plane', mpi_err)
266  !---------------------------------------------------------------------------
267  ! Compute mpi_beam; the communicators along MPI coordinat axes
268  !---------------------------------------------------------------------------
269  color = merge(0,mpi_cord3(1),i==1) &
270  + merge(0,mpi_cord3(2),i==2)*mpi_dim3(1) &
271  + merge(0,mpi_cord3(3),i==3)*mpi_dim3(1)*mpi_dim3(2)
272 #ifdef MPI
273  call mpi_comm_split (mpi_comm_world, color, mpi_cord3(i), mpi_beam(i), mpi_err)
274 #endif MPI
275  call mpi%assert ('cart_creat_mpi: mpi_beam', mpi_err)
276  end do
277  cart_created = .true.
278  CONTAINS
279  logical function test_is_ok ()
280  n = dims/mpi_dim3
281  test_is_ok = all(n*mpi_dim3 == dims) .and. product(mpi_dim3)==self%size
282  ok = test_is_ok
283  if (self%rank==0) print '(13x,a,3i5,l5)', 'trying mpi_dims =', mpi_dim3, ok
284  end function
285  subroutine fail (label, i)
286  character(len=*):: label
287  integer, optional:: i
288  if (self%rank==0) then
289  if (present(i)) then
290  print *, label, i
291  else
292  print *, label
293  end if
294  end if
295  end subroutine
296 END SUBROUTINE cart_create_mpi
297 !
298 !===============================================================================
299 !===============================================================================
300 SUBROUTINE finalize (self)
301  implicit none
302  class(mpi_coords_t):: self
303  integer:: mpi_err, i
304 !-------------------------------------------------------------------------------
305 #ifdef MPI
306  if (self%ok) then
307  call mpi_comm_free(mpi_comm_cart,mpi_err)
308  do i=1,3
309  call mpi_comm_free(mpi_plane(i),mpi_err)
310  call mpi%assert ('finalize_cart: mpi_plane',mpi_err)
311  call mpi_comm_free(mpi_beam(i),mpi_err)
312  call mpi%assert ('finalize_cart: mpi_beam',mpi_err)
313  enddo
314  endif
315 #endif
316 END SUBROUTINE finalize
317 END MODULE mpi_coords_mod
Each thread uses a private timer data type, with arrays for start time and total time for each regist...
Definition: timer_mod.f90:11