MONC
decomposition.F90
Go to the documentation of this file.
1 
4  use datadefn_mod, only : string_length
6  use state_mod, only : model_state_type
8  use collections_mod, only : map_type
13  use mpi
14  implicit none
15 
16 #ifndef TEST_MODE
17  private
18 #endif
19 
21 
22 contains
23 
27  decomposition_get_descriptor%name="decomposition"
30  end function decomposition_get_descriptor
31 
34  subroutine init_callback(current_state)
35  type(model_state_type), target, intent(inout) :: current_state
36 
37  character(len=STRING_LENGTH) :: method
38 
39  method=options_get_string(current_state%options_database, "decomposition_method")
40 
41  if (method .eq. "onedim") then
42  current_state%parallel%decomposition_procedure => one_dim_decomposition
43  else if (method .eq. "twodim") then
44  current_state%parallel%decomposition_procedure => two_dim_decomposition
45  else
46  current_state%parallel%decomposition_procedure => serial_decomposition
47  if (method .ne. "serial") call log_log(log_warn, "Decomposition method "//trim(method)//&
48  " not recognised so defaulting to serial")
49  end if
50  end subroutine init_callback
51 
57  subroutine two_dim_decomposition(current_state)
58  type(model_state_type), intent(inout) :: current_state
59 
60  integer :: ierr
61  integer, dimension(2) :: coords, distributed_dims
62 
63  current_state%local_grid%active = current_state%global_grid%active
64  current_state%local_grid%dimensions = current_state%global_grid%dimensions
65 
66  if (current_state%global_grid%dimensions .ne. 3) then
67  call log_master_log(log_warn, "Two dimension decomposition selected with only "// &
68  trim(conv_to_string(current_state%global_grid%dimensions)) //" so defaulting to one dimension instead")
69  call one_dim_decomposition(current_state)
70  return
71  end if
72 
73  distributed_dims= (/0,0/)
74  call mpi_dims_create(current_state%parallel%processes, 2, distributed_dims, ierr)
75 
76  current_state%parallel%dim_sizes(z_index)=1
77  current_state%parallel%dim_sizes(y_index) = distributed_dims(1)
78  current_state%parallel%dim_sizes(x_index) = distributed_dims(2)
79 
80  if (.not. does_two_dim_work_with_domain_size(current_state, current_state%parallel%dim_sizes(y_index), &
81  current_state%parallel%dim_sizes(x_index))) then
82  call log_master_log(log_warn, "Defaulting to one dimension decomposition due to solution size too small")
83  call one_dim_decomposition(current_state)
84  return
85  end if
86 
87  call mpi_cart_create(current_state%parallel%monc_communicator, 2, (/distributed_dims(1), distributed_dims(2)/),&
88  (/.false., .false./), .false., current_state%parallel%neighbour_comm, ierr)
89  call mpi_cart_coords(current_state%parallel%neighbour_comm, current_state%parallel%my_rank, 2, coords, ierr)
90 
92  call apply_z_dimension_information(current_state)
93 
94  ! sets the limits for each rank in X and Y
95  call apply_dimension_bounds(current_state, y_index, merge(current_state%global_grid%size(y_index), 1, &
96  current_state%global_grid%active(y_index)), distributed_dims(1), coords(1))
97  call apply_dimension_bounds(current_state, x_index, merge(current_state%global_grid%size(x_index), 1, &
98  current_state%global_grid%active(x_index)), distributed_dims(2), coords(2))
99 
100  ! get all information about the neighbours and corner neighbours
101  call apply_two_dim_neighbour_information(current_state, distributed_dims(1), distributed_dims(2), coords)
102 
103  call apply_data_start_end_bounds(current_state%local_grid)
104 
105  if (log_get_logging_level() .ge. log_debug) then
106  call log_log(log_debug, "PID "//trim(conv_to_string(current_state%parallel%my_rank))//": y="//&
107  trim(conv_to_string(current_state%parallel%my_coords(y_index)))//&
108  " x="//trim(conv_to_string(current_state%parallel%my_coords(x_index)))//" ny-1="//&
109  trim(conv_to_string(current_state%local_grid%neighbours(y_index,1)))//&
110  " ny+1="//trim(conv_to_string(current_state%local_grid%neighbours(y_index,3)))//" nx-1="//&
111  trim(conv_to_string(current_state%local_grid%neighbours(x_index,1)))//" nx+1="//&
112  trim(conv_to_string(current_state%local_grid%neighbours(x_index,3))))
113  end if
114  call display_decomposition_information(current_state, "TwoDim")
115  end subroutine two_dim_decomposition
116 
124  subroutine apply_two_dim_neighbour_information(current_state, y_procs, x_procs, coords)
126  type(model_state_type), intent(inout) :: current_state
127  integer, intent(in) :: y_procs, x_procs, coords(2)
128 
129  integer :: ierr, i, halo_depth
130 
131  current_state%parallel%wrapped_around=.false.
132 
133  call mpi_cart_shift(current_state%parallel%neighbour_comm, 0, 1, current_state%local_grid%neighbours(y_index,1), &
134  current_state%local_grid%neighbours(y_index,3), ierr)
135  if (current_state%local_grid%neighbours(y_index,1) .lt. 0) then
136  call mpi_cart_rank(current_state%parallel%neighbour_comm,&
137  (/y_procs-1, coords(2)/), current_state%local_grid%neighbours(y_index,1), ierr)
138  current_state%parallel%wrapped_around(y_index, 1)=.true.
139  end if
140 
141  if (current_state%local_grid%neighbours(y_index,3) .lt. 0) then
142  call mpi_cart_rank(current_state%parallel%neighbour_comm, &
143  (/0, coords(2)/), current_state%local_grid%neighbours(y_index,3), ierr)
144  current_state%parallel%wrapped_around(y_index, 2)=.true.
145  end if
146 
147  ! find out who is on the left and on the right
148  call mpi_cart_shift(current_state%parallel%neighbour_comm, 1, 1, &
149  current_state%local_grid%neighbours(x_index,1), &
150  current_state%local_grid%neighbours(x_index,3), ierr)
151  if (current_state%local_grid%neighbours(x_index,1) .lt. 0) then
152  call mpi_cart_rank(current_state%parallel%neighbour_comm, &
153  (/coords(1), x_procs-1/), current_state%local_grid%neighbours(x_index,1), ierr)
154  current_state%parallel%wrapped_around(x_index, 1)=.true.
155  end if
156  if (current_state%local_grid%neighbours(x_index,3) .lt. 0) then
157  call mpi_cart_rank(current_state%parallel%neighbour_comm, &
158  (/coords(1), 0/), current_state%local_grid%neighbours(x_index,3), ierr)
159  current_state%parallel%wrapped_around(x_index, 2)=.true.
160  end if
161 
162  ! the second column is on the same rank than the first one
163  halo_depth = options_get_integer(current_state%options_database, "halo_depth")
164 
165  do i = 1, halo_depth -1
166  current_state%local_grid%neighbours(y_index,i+1) = &
167  current_state%local_grid%neighbours(y_index,i)
168  current_state%local_grid%neighbours(y_index,i+halo_depth + 1) = &
169  current_state%local_grid%neighbours(y_index,i+halo_depth)
170  current_state%local_grid%neighbours(x_index,i+1) = &
171  current_state%local_grid%neighbours(x_index,i)
172  current_state%local_grid%neighbours(x_index,i+halo_depth + 1) = &
173  current_state%local_grid%neighbours(x_index,i+halo_depth)
174  end do
175  ! obtain the rank of the corner neighbours
176  call mpi_cart_rank(current_state%parallel%neighbour_comm, &
177  (/merge(coords(1)-1, y_procs-1, coords(1) .ge. 1), &
178  merge(coords(2)-1, x_procs-1, coords(2) .ge. 1)/), &
179  current_state%local_grid%corner_neighbours(1,1), ierr)
180  call mpi_cart_rank(current_state%parallel%neighbour_comm, &
181  (/merge(coords(1)-1, y_procs-1, coords(1) .ge. 1), &
182  merge(coords(2)+1, 0, coords(2) .lt. x_procs-1)/), &
183  current_state%local_grid%corner_neighbours(2,1), ierr)
184  call mpi_cart_rank(current_state%parallel%neighbour_comm, &
185  (/merge(coords(1)+1, 0, coords(1) .lt. y_procs-1), &
186  merge(coords(2)-1, x_procs-1, coords(2) .ge. 1)/), &
187  current_state%local_grid%corner_neighbours(3,1), ierr)
188  call mpi_cart_rank(current_state%parallel%neighbour_comm, &
189  (/merge(coords(1)+1, 0, coords(1) .lt. y_procs-1), &
190  merge(coords(2)+1, 0, coords(2) .lt. x_procs-1)/), &
191  current_state%local_grid%corner_neighbours(4,1), ierr)
192 
193  !! TODO: hardcoded for halo depth two?
194  current_state%local_grid%corner_neighbours(:,2)=current_state%local_grid%corner_neighbours(:,1)
196 
202  logical function does_two_dim_work_with_domain_size(current_state, y_dims, x_dims)
203  type(model_state_type), intent(inout) :: current_state
204  integer, intent(in) :: y_dims, x_dims
205 
206  if (current_state%global_grid%active(y_index)) then
207  if (floor(real(current_state%global_grid%size(Y_INDEX)) / y_dims) .lt. 2) then
209  return
210  end if
211  end if
212  if (current_state%global_grid%active(x_index)) then
213  if (floor(real(current_state%global_grid%size(X_INDEX)) / x_dims) .lt. 2) then
215  return
216  end if
217  end if
220 
227  subroutine apply_dimension_bounds(current_state, dim, dim_size, dim_processes, dim_my_rank)
228  type(model_state_type), intent(inout) :: current_state
229  integer, intent(in) :: dim, dim_size, dim_my_rank, dim_processes
230 
231  integer :: dimension_division, dimension_extra
232 
233  dimension_division = dim_size / dim_processes
234  dimension_extra = dim_size - (dimension_division * dim_processes)
235 
236  current_state%local_grid%start(dim) = dimension_division*dim_my_rank + merge(dimension_extra, dim_my_rank, &
237  dimension_extra .lt. dim_my_rank) + 1
238  current_state%local_grid%end(dim) = (current_state%local_grid%start(dim)-1) + dimension_division + &
239  merge(1, 0, dim_my_rank .lt. dimension_extra)
240  current_state%local_grid%size(dim)=(current_state%local_grid%end(dim) - current_state%local_grid%start(dim)) + 1
241  current_state%parallel%my_coords(dim) = dim_my_rank
242  end subroutine apply_dimension_bounds
243 
247  subroutine apply_z_dimension_information(current_state)
248  type(model_state_type), intent(inout) :: current_state
249 
250  current_state%local_grid%start(z_index) = 1
251  current_state%local_grid%end(z_index) = current_state%global_grid%size(z_index)
252  current_state%local_grid%size(z_index) = current_state%global_grid%size(z_index)
253  current_state%parallel%my_coords(z_index) = 0
254  current_state%parallel%dim_sizes(z_index) = 1
255  end subroutine apply_z_dimension_information
256 
262  subroutine one_dim_decomposition(current_state)
263  type(model_state_type), intent(inout) :: current_state
264 
265  integer :: x_size, y_size, split_size, dimension_division, dimension_extra
266 
268 
269  current_state%parallel%wrapped_around=.false.
270 
271  x_size = merge(current_state%global_grid%size(x_index), 1, current_state%global_grid%active(x_index))
272  y_size = merge(current_state%global_grid%size(y_index), 1, current_state%global_grid%active(y_index))
273 
274  split_size = merge(x_size, y_size, x_size .gt. y_size)
275  dimension_division = split_size / current_state%parallel%processes
276  dimension_extra = split_size - (dimension_division * current_state%parallel%processes)
277 
278  current_state%local_grid%active = current_state%global_grid%active
279  current_state%local_grid%dimensions = current_state%global_grid%dimensions
280  current_state%parallel%neighbour_comm = current_state%parallel%monc_communicator
281  call apply_z_dimension_information(current_state)
282 
283  if (x_size .gt. y_size) then
284  ! Decompose in X
285  current_state%local_grid%start(y_index)=1
286  current_state%local_grid%end(y_index)=y_size
287  current_state%local_grid%size(y_index)=y_size
288  current_state%parallel%my_coords(y_index)=0
289  current_state%parallel%dim_sizes(y_index)=1
290  current_state%local_grid%start(x_index)=dimension_division*current_state%parallel%my_rank+merge(&
291  dimension_extra, current_state%parallel%my_rank, dimension_extra .lt. current_state%parallel%my_rank) + 1
292  current_state%local_grid%end(x_index)=(current_state%local_grid%start(x_index)-1) + dimension_division + merge(&
293  1, 0, current_state%parallel%my_rank .lt. dimension_extra)
294  current_state%local_grid%size(x_index)=(current_state%local_grid%end(x_index) - current_state%local_grid%start(x_index)) + 1
295  current_state%parallel%my_coords(x_index) = current_state%parallel%my_rank
296  current_state%parallel%dim_sizes(x_index) = current_state%parallel%processes
297  current_state%local_grid%neighbours(y_index,:) = current_state%parallel%my_rank
298  ! Currently assume same PID in single direction (TODO: relax this restraint)
299  current_state%local_grid%neighbours(x_index,1:2) = merge(current_state%parallel%my_rank, current_state%parallel%processes, &
300  current_state%parallel%my_rank .gt. 0) - 1
301  current_state%parallel%wrapped_around(x_index, 1)=&
302  current_state%local_grid%neighbours(x_index,1)==current_state%parallel%processes-1
303  current_state%local_grid%neighbours(x_index,3:4) = merge(current_state%parallel%my_rank+1, 0, &
304  current_state%parallel%my_rank .lt. current_state%parallel%processes-1)
305  current_state%parallel%wrapped_around(x_index, 2)=current_state%local_grid%neighbours(x_index,3)==0
306  current_state%local_grid%corner_neighbours(1,:)=current_state%local_grid%neighbours(x_index,1)
307  current_state%local_grid%corner_neighbours(3,:)=current_state%local_grid%neighbours(x_index,1)
308  current_state%local_grid%corner_neighbours(2,:)=current_state%local_grid%neighbours(x_index,3)
309  current_state%local_grid%corner_neighbours(4,:)=current_state%local_grid%neighbours(x_index,3)
310  current_state%parallel%wrapped_around(y_index, :)=.true.
311  else
312  ! Decompose in Y
313  current_state%local_grid%start(x_index)=1
314  current_state%local_grid%end(x_index)=x_size
315  current_state%local_grid%size(x_index)=x_size
316  current_state%parallel%my_coords(x_index)=0
317  current_state%parallel%dim_sizes(x_index)=1
318  current_state%local_grid%start(y_index)=dimension_division*current_state%parallel%my_rank+merge(&
319  dimension_extra, current_state%parallel%my_rank, dimension_extra .lt. current_state%parallel%my_rank) + 1
320  current_state%local_grid%end(y_index)=(current_state%local_grid%start(y_index)-1) + dimension_division + merge(&
321  1, 0, current_state%parallel%my_rank .lt. dimension_extra)
322  current_state%local_grid%size(y_index)=(current_state%local_grid%end(y_index) - current_state%local_grid%start(y_index)) + 1
323  current_state%parallel%my_coords(y_index)=current_state%parallel%my_rank
324  current_state%parallel%dim_sizes(y_index) = current_state%parallel%processes
325  current_state%local_grid%neighbours(x_index,:) = current_state%parallel%my_rank
326  current_state%local_grid%neighbours(y_index,1:2) = merge(current_state%parallel%my_rank, current_state%parallel%processes, &
327  current_state%parallel%my_rank .gt. 0) - 1
328  current_state%parallel%wrapped_around(y_index, 1)=&
329  current_state%local_grid%neighbours(y_index,1)==current_state%parallel%processes-1
330  current_state%local_grid%neighbours(y_index,3:4) = merge(current_state%parallel%my_rank+1, 0, &
331  current_state%parallel%my_rank .lt. current_state%parallel%processes-1)
332  current_state%parallel%wrapped_around(y_index, 2)=current_state%local_grid%neighbours(y_index,3)==0
333  current_state%local_grid%corner_neighbours(1:2,:)=current_state%local_grid%neighbours(y_index,1)
334  current_state%local_grid%corner_neighbours(3:4,:)=current_state%local_grid%neighbours(y_index,3)
335  current_state%parallel%wrapped_around(x_index, :)=.true.
336  end if
337 
338  call apply_data_start_end_bounds(current_state%local_grid)
339  call display_decomposition_information(current_state, "OneDim")
340  end subroutine one_dim_decomposition
341 
346  subroutine serial_decomposition(current_state)
347  type(model_state_type), intent(inout) :: current_state
348 
350 
351  current_state%local_grid%active = current_state%global_grid%active
352  current_state%local_grid%dimensions = current_state%global_grid%dimensions
353  current_state%parallel%neighbour_comm = current_state%parallel%monc_communicator
354  call apply_z_dimension_information(current_state)
355 
356  current_state%local_grid%start(y_index)=1
357  current_state%local_grid%start(x_index)=1
358  current_state%parallel%my_coords(y_index)=0
359  current_state%parallel%my_coords(x_index)=0
360  current_state%parallel%dim_sizes(y_index)=0
361  current_state%parallel%dim_sizes(x_index)=0
362 
363  current_state%local_grid%end(x_index)= &
364  merge(current_state%global_grid%size(x_index), 1, current_state%global_grid%active(x_index))
365  current_state%local_grid%size(x_index)=current_state%local_grid%end(x_index)
366 
367  current_state%local_grid%end(y_index)=&
368  merge(current_state%global_grid%size(y_index), 1, current_state%global_grid%active(y_index))
369  current_state%local_grid%size(y_index)=current_state%local_grid%end(y_index)
370 
371  current_state%local_grid%neighbours(x_index,:) = current_state%parallel%my_rank
372  current_state%local_grid%neighbours(y_index,:) = current_state%parallel%my_rank
373  current_state%local_grid%corner_neighbours=current_state%parallel%my_rank
374 
375  current_state%parallel%wrapped_around=.true.
376 
377  call apply_data_start_end_bounds(current_state%local_grid)
378  call display_decomposition_information(current_state, "Serial")
379  end subroutine serial_decomposition
380 
384  subroutine display_decomposition_information(current_state, decomp_name)
385  type(model_state_type), intent(inout) :: current_state
386  character(len=*), intent(in) :: decomp_name
387 
388  if (log_get_logging_level() .le. log_debug) then
389  call log_log(log_debug, decomp_name//" rank="//trim(conv_to_string(current_state%parallel%my_rank))//&
390  ": W=("//trim(conv_to_string(current_state%local_grid%start(z_index)))//"->"&
391  //trim(conv_to_string(current_state%local_grid%end(z_index)))//") V=("//trim(conv_to_string(&
392  current_state%local_grid%start(y_index)))//"->"//trim(conv_to_string(current_state%local_grid%end(&
393  y_index)))//") U=("//trim(conv_to_string(current_state%local_grid%start(x_index)))//&
394  "->"//trim(conv_to_string(current_state%local_grid%end(x_index)))//")")
395  call log_log(log_debug, "Neighbours of "//trim(conv_to_string(current_state%parallel%my_rank))//": y=(["//&
396  trim(conv_to_string(current_state%local_grid%neighbours(y_index,1)))//"],["//&
397  trim(conv_to_string(current_state%local_grid%neighbours(y_index,2)))//"]) x=(["//&
398  trim(conv_to_string(current_state%local_grid%neighbours(x_index,1)))//","//&
399  trim(conv_to_string(current_state%local_grid%neighbours(x_index,2)))//"],["//&
400  trim(conv_to_string(current_state%local_grid%neighbours(x_index,3)))//","//&
401  trim(conv_to_string(current_state%local_grid%neighbours(x_index,4)))//"])")
402  call log_master_log(log_debug, "Halo size z="//&
403  trim(conv_to_string(current_state%local_grid%halo_size(z_index)))//&
404  " y="//trim(conv_to_string(current_state%local_grid%halo_size(y_index)))//" x="//&
405  trim(conv_to_string(current_state%local_grid%halo_size(x_index))))
406  end if
407  call log_master_log(log_info, "Decomposed "//trim(conv_to_string(current_state%parallel%processes))//&
408  " processes via '"//decomp_name// "' into z="//trim(conv_to_string(current_state%parallel%dim_sizes(&
409  z_index)))//" y="//trim(conv_to_string(current_state%parallel%dim_sizes(y_index)))//" x="//&
410  trim(conv_to_string(current_state%parallel%dim_sizes(x_index))))
411  end subroutine display_decomposition_information
412 
416  subroutine apply_data_start_end_bounds(local_grid)
417  type(local_grid_type), intent(inout) :: local_grid
418 
419  integer :: i,n_dim
420 
421  !Total number of dimensions
422  n_dim = 3
423 
424  do i = 1,n_dim
425  local_grid%local_domain_start_index(i) = local_grid%halo_size(i) + 1
426  local_grid%local_domain_end_index(i) = local_grid%halo_size(i) + local_grid%size(i)
427  end do
428  ! call log_master_log(LOG_DEBUG,"start_index(1)= "//&
429  ! trim(conv_to_string(local_grid%local_domain_start_index(1)))//" end_index(1)="//&
430  ! trim(conv_to_string(local_grid%local_domain_end_index(1)))//" start_index(2)="//&
431  ! trim(conv_to_string(local_grid%local_domain_start_index(2)))//" end_index(2)="//&
432  ! trim(conv_to_string(local_grid%local_domain_end_index(2)))//" start_index(3)="//&
433  ! trim(conv_to_string(local_grid%local_domain_start_index(3)))//" end_index(3)="//&
434  ! trim(conv_to_string(local_grid%local_domain_end_index(3))))
435 
436  end subroutine apply_data_start_end_bounds
437 
440  subroutine apply_halo_information_and_allocate_neighbours(current_state)
441  type(model_state_type), target, intent(inout) :: current_state
442  integer :: n_dim, n_corners,total_halo_size_XY_dim
443  integer :: halo_depth
444 
445  ! Read halo_depth value
446  halo_depth = options_get_integer(current_state%options_database, "halo_depth")
447 
448  ! There is no halo_depth in Z direction
449  current_state%local_grid%halo_size(z_index) = 0
450  current_state%local_grid%halo_size(y_index) = halo_depth
451  current_state%local_grid%halo_size(x_index) = halo_depth
452 
453  ! Left and right or Up and down
454  total_halo_size_xy_dim = current_state%local_grid%halo_size(x_index)*2
455  ! Total number of dimensions
456  n_dim = 3
457  ! Total number of corners
458  n_corners = 4
459  allocate(current_state%local_grid%neighbours(n_dim,total_halo_size_xy_dim), &
460  current_state%local_grid%corner_neighbours(n_corners,halo_depth))
462 end module decomposition_mod
subroutine init_callback(current_state)
The initialisation hook. Will set up the appropriate decomposition.
character(len=string_length) function, public options_get_string(options_database, key, index)
Retrieves a string value from the database that matches the provided key.
subroutine apply_two_dim_neighbour_information(current_state, y_procs, x_procs, coords)
Will apply the two dimensional neighbour information. This implements the wrap around aspect if there...
Parallel decomposition to determine the grid points and data columns that are located on this process...
Logging utility.
Definition: logging.F90:2
integer, parameter, public z_index
Grid index parameters.
Definition: grids.F90:14
subroutine one_dim_decomposition(current_state)
One dimension decomposition.
logical function does_two_dim_work_with_domain_size(current_state, y_dims, x_dims)
Determines whether or not the planned two dimensional decomposition will fit into the X and Y global ...
Contains common definitions for the data and datatypes used by MONC.
Definition: datadefn.F90:2
The ModelState which represents the current state of a run.
Definition: state.F90:39
subroutine, public log_master_log(level, message)
Will log just from the master process.
Definition: logging.F90:47
integer, parameter, public log_debug
Log DEBUG, INFO, WARNING and ERROR messages.
Definition: logging.F90:14
Conversion between common inbuilt FORTRAN data types.
Definition: conversions.F90:5
Converts data types to strings.
Definition: conversions.F90:36
subroutine, public log_log(level, message, str)
Logs a message at the specified level. If the level is above the current level then the message is ig...
Definition: logging.F90:75
Map data structure that holds string (length 20 maximum) key value pairs.
Definition: collections.F90:86
Interfaces and types that MONC components must specify.
Defined the local grid, i.e. the grid held on this process after decomposition.
Definition: grids.F90:111
Collection data structures.
Definition: collections.F90:7
subroutine two_dim_decomposition(current_state)
Decomposition into two dimensions.
integer, parameter, public log_warn
Log WARNING and ERROR messages.
Definition: logging.F90:12
integer, parameter, public string_length
Default length of strings.
Definition: datadefn.F90:10
subroutine apply_halo_information_and_allocate_neighbours(current_state)
Will apply halo size information and allocated the neighbour array data.
Functionality to support the different types of grid and abstraction between global grids and local o...
Definition: grids.F90:5
subroutine apply_dimension_bounds(current_state, dim, dim_size, dim_processes, dim_my_rank)
Applys the local bounds (start, end and size) for a specific dimension.
integer function, public options_get_integer(options_database, key, index)
Retrieves an integer value from the database that matches the provided key.
Manages the options database. Contains administration functions and deduce runtime options from the c...
integer, parameter, public log_info
Log INFO, WARNING and ERROR messages.
Definition: logging.F90:13
subroutine apply_z_dimension_information(current_state)
Applys Z dimension information. As we always decompose into columns then Z is never split and as such...
subroutine apply_data_start_end_bounds(local_grid)
Calculates and applys the start and end bounds of the local data. Held locally is the halo and local ...
type(component_descriptor_type) function, public decomposition_get_descriptor()
Provides the component descriptor for the core to register.
The model state which represents the current state of a run.
Definition: state.F90:2
integer function, public log_get_logging_level()
Retrieves the current logging level.
Definition: logging.F90:122
integer, parameter, public y_index
Definition: grids.F90:14
subroutine display_decomposition_information(current_state, decomp_name)
Dumps out information about the decomposition at DEBUG level.
integer, parameter, public x_index
Definition: grids.F90:14
subroutine serial_decomposition(current_state)
Serial decomposition.