MFC
Exascale flow solver
Loading...
Searching...
No Matches
m_data_input.f90
Go to the documentation of this file.
1!>
2!! @file
3!> @brief Contains module m_data_input
4
5!> @brief Reads raw simulation grid and conservative-variable data for a given time-step and fills buffer regions
7
8#ifdef MFC_MPI
9 use mpi
10#endif
11
14 use m_mpi_proxy
15 use m_mpi_common
18 use m_helper
19
20 implicit none
21
24
25 abstract interface
26
27 !> Subroutine for reading data files
28 impure subroutine s_read_abstract_data_files(t_step)
29
30 implicit none
31
32 integer, intent(in) :: t_step
33
34 end subroutine s_read_abstract_data_files
35 end interface
36
37 type(scalar_field), allocatable, dimension(:), public :: q_cons_vf !< Conservative variables
38 type(scalar_field), allocatable, dimension(:), public :: q_cons_temp
39 type(scalar_field), allocatable, dimension(:), public :: q_prim_vf !< Primitive variables
40 type(integer_field), allocatable, dimension(:,:), public :: bc_type !< Boundary condition identifiers
41 type(scalar_field), public :: q_t_sf !< Temperature field
42 ! type(scalar_field), public :: ib_markers !<
43 type(integer_field), public :: ib_markers
44
45 procedure(s_read_abstract_data_files), pointer :: s_read_data_files => null()
46
47contains
48
49 !> Helper subroutine to read grid data files for a given direction
50 impure subroutine s_read_grid_data_direction(t_step_dir, direction, cb_array, d_array, cc_array, size_dim)
51
52 character(len=*), intent(in) :: t_step_dir
53 character(len=1), intent(in) :: direction
54 real(wp), dimension(-1:), intent(out) :: cb_array
55 real(wp), dimension(0:), intent(out) :: d_array
56 real(wp), dimension(0:), intent(out) :: cc_array
57 integer, intent(in) :: size_dim
58 character(LEN=len_trim(t_step_dir) + 10) :: file_loc
59 logical :: file_check
60
61 file_loc = trim(t_step_dir) // '/' // direction // '_cb.dat'
62 inquire (file=trim(file_loc), exist=file_check)
63
64 if (file_check) then
65 open (1, file=trim(file_loc), form='unformatted', status='old', action='read')
66 read (1) cb_array(-1:size_dim)
67 close (1)
68 else
69 call s_mpi_abort('File ' // direction // '_cb.dat is missing in ' // trim(t_step_dir) // '. Exiting.')
70 end if
71
72 d_array(0:size_dim) = cb_array(0:size_dim) - cb_array(-1:size_dim - 1)
73 cc_array(0:size_dim) = cb_array(-1:size_dim - 1) + d_array(0:size_dim)/2._wp
74
75 end subroutine s_read_grid_data_direction
76
77#ifdef MFC_MPI
78 !> Helper subroutine to setup MPI data I/O parameters
79 impure subroutine s_setup_mpi_io_params(data_size, m_MOK, n_MOK, p_MOK, WP_MOK, MOK, str_MOK, NVARS_MOK)
80
81 integer, intent(out) :: data_size
82 integer(KIND=MPI_OFFSET_KIND), intent(out) :: m_mok, n_mok, p_mok
83 integer(KIND=MPI_OFFSET_KIND), intent(out) :: wp_mok, mok, str_mok, nvars_mok
84
85 if (ib) then
87 else
89 end if
90
91 data_size = (m + 1)*(n + 1)*(p + 1)
92
93 m_mok = int(m_glb + 1, mpi_offset_kind)
94 n_mok = int(n_glb + 1, mpi_offset_kind)
95 p_mok = int(p_glb + 1, mpi_offset_kind)
96 wp_mok = int(storage_size(0._stp)/8, mpi_offset_kind)
97 mok = int(1._wp, mpi_offset_kind)
98 str_mok = int(name_len, mpi_offset_kind)
99 nvars_mok = int(sys_size, mpi_offset_kind)
100
101 end subroutine s_setup_mpi_io_params
102#endif
103
104 !> Helper subroutine to read IB data files
105 impure subroutine s_read_ib_data_files(file_loc_base, t_step)
106
107 character(len=*), intent(in) :: file_loc_base
108 integer, intent(in), optional :: t_step
109 character(LEN=len_trim(file_loc_base) + 20) :: file_loc
110 logical :: file_exist
111 integer :: ifile, ierr, data_size, var_mok
112
113#ifdef MFC_MPI
114 integer, dimension(MPI_STATUS_SIZE) :: status
115 integer(KIND=MPI_OFFSET_KIND) :: disp
116 integer :: m_mok, n_mok, p_mok, mok, wp_mok, save_index
117#endif
118
119 if (.not. ib) return
120
121 if (parallel_io) then
122 write (file_loc, '(A)') trim(file_loc_base) // 'ib.dat'
123 else
124 write (file_loc, '(A)') trim(file_loc_base) // '/ib_data.dat'
125 end if
126 inquire (file=trim(file_loc), exist=file_exist)
127
128 if (file_exist) then
129 if (parallel_io) then
130#ifdef MFC_MPI
131 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
132
133 m_mok = int(m_glb + 1, mpi_offset_kind)
134 n_mok = int(n_glb + 1, mpi_offset_kind)
135 p_mok = int(p_glb + 1, mpi_offset_kind)
136 mok = int(1._wp, mpi_offset_kind)
137 wp_mok = int(storage_size(0._stp)/8, mpi_offset_kind)
138 save_index = t_step/t_step_save ! get the number of saves done to this point
139
140 data_size = (m + 1)*(n + 1)*(p + 1)
141 var_mok = int(sys_size + 1, mpi_offset_kind)
142 if (t_step == 0) then
143 disp = 0
144 else
145 disp = m_mok*max(mok, n_mok)*max(mok, p_mok)*wp_mok*(var_mok - 1 + int(save_index, mpi_offset_kind))
146 end if
147
148 call mpi_file_set_view(ifile, disp, mpi_integer, mpi_io_ib_data%view, 'native', mpi_info_int, ierr)
149 call mpi_file_read(ifile, mpi_io_ib_data%var%sf, data_size, mpi_integer, status, ierr)
150
151 call mpi_file_close(ifile, ierr)
152#endif
153 else
154 open (2, file=trim(file_loc), form='unformatted', action='read', status='old')
155 read (2) ib_markers%sf(0:m,0:n,0:p)
156 close (2)
157 end if
158 else
159 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
160 end if
161
162 end subroutine s_read_ib_data_files
163
164 !> Helper subroutine to allocate field arrays for given dimensionality
165 impure subroutine s_allocate_field_arrays(local_start_idx, end_x, end_y, end_z)
166
167 integer, intent(in) :: local_start_idx, end_x, end_y, end_z
168 integer :: i
169
170 do i = 1, sys_size
171 allocate (q_cons_vf(i)%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
172 allocate (q_prim_vf(i)%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
173 end do
174
175 if (ib) then
176 allocate (ib_markers%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
177 end if
178
179 if (chemistry) then
180 allocate (q_t_sf%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
181 end if
182
183 end subroutine s_allocate_field_arrays
184
185 !> Read the raw data files present in the corresponding time-step directory and to populate the associated grid and conservative
186 !! variables.
187 impure subroutine s_read_serial_data_files(t_step)
188
189 integer, intent(in) :: t_step
190 character(LEN=len_trim(case_dir) + 2*name_len) :: t_step_dir
191 character(LEN=len_trim(case_dir) + 3*name_len) :: file_loc
192 character(len=int(floor(log10(real(sys_size, wp)))) + 1) :: file_num
193 character(LEN=len_trim(case_dir) + 2*name_len) :: t_step_ib_dir
194 logical :: dir_check
195 logical :: file_check
196 integer :: i
197
198 write (t_step_dir, '(A,I0,A,I0)') '/p_all/p', proc_rank, '/', t_step
199 t_step_dir = trim(case_dir) // trim(t_step_dir)
200
201 file_loc = trim(t_step_dir) // '/.'
202 call my_inquire(file_loc, dir_check)
203
204 if (dir_check .neqv. .true.) then
205 call s_mpi_abort('Time-step folder ' // trim(t_step_dir) // ' is missing. Exiting.')
206 end if
207
208 if (bc_io) then
210 else
212 end if
213
214 call s_read_grid_data_direction(t_step_dir, 'x', x_cb, dx, x_cc, m)
215
216 if (n > 0) then
217 call s_read_grid_data_direction(t_step_dir, 'y', y_cb, dy, y_cc, n)
218
219 if (p > 0) then
220 call s_read_grid_data_direction(t_step_dir, 'z', z_cb, dz, z_cc, p)
221 end if
222 end if
223
224 do i = 1, sys_size
225 write (file_num, '(I0)') i
226 file_loc = trim(t_step_dir) // '/q_cons_vf' // trim(file_num) // '.dat'
227 inquire (file=trim(file_loc), exist=file_check)
228
229 if (file_check) then
230 open (1, file=trim(file_loc), form='unformatted', status='old', action='read')
231 read (1) q_cons_vf(i)%sf(0:m,0:n,0:p)
232 close (1)
233 else if (bubbles_lagrange .and. i == beta_idx) then
234 ! beta (Lagrangian void fraction) is not written by pre_process for t_step_start; initialize to zero.
235 q_cons_vf(i)%sf(0:m,0:n,0:p) = 0._wp
236 else
237 call s_mpi_abort('File q_cons_vf' // trim(file_num) // '.dat is missing in ' // trim(t_step_dir) // '. Exiting.')
238 end if
239 end do
240
241 call s_read_ib_data_files(t_step_dir)
242
243 end subroutine s_read_serial_data_files
244
245 !> Parallel-read the raw data files present in the corresponding time-step directory and to populate the associated grid and
246 !! conservative variables.
247 impure subroutine s_read_parallel_data_files(t_step)
248
249 integer, intent(in) :: t_step
250
251#ifdef MFC_MPI
252 real(wp), allocatable, dimension(:) :: x_cb_glb, y_cb_glb, z_cb_glb
253 integer :: ifile, ierr, data_size, filetype, stride
254 integer, dimension(MPI_STATUS_SIZE) :: status
255 integer(KIND=MPI_OFFSET_KIND) :: disp
256 integer(KIND=MPI_OFFSET_KIND) :: m_mok, n_mok, p_mok
257 integer(KIND=MPI_OFFSET_KIND) :: wp_mok, var_mok, str_mok
258 integer(KIND=MPI_OFFSET_KIND) :: nvars_mok
259 integer(KIND=MPI_OFFSET_KIND) :: mok
260 integer(kind=MPI_OFFSET_KIND) :: offset
261 real(wp) :: delx, dely, delz
262 character(LEN=path_len + 2*name_len) :: file_loc
263 logical :: file_exist
264 character(len=10) :: t_step_string
265 integer :: i
266
267 allocate (x_cb_glb(-1:m_glb))
268 allocate (y_cb_glb(-1:n_glb))
269 allocate (z_cb_glb(-1:p_glb))
270
271 if (down_sample) then
272 stride = 3
273 else
274 stride = 1
275 end if
276
277 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // 'x_cb.dat'
278 inquire (file=trim(file_loc), exist=file_exist)
279
280 if (file_exist) then
281 data_size = m_glb + 2
282 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
283
284 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
285 call mpi_type_commit(filetype, ierr)
286
287 offset = 0
288 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
289
290 call mpi_file_read(ifile, x_cb_glb, data_size, mpi_p, status, ierr)
291 call mpi_file_close(ifile, ierr)
292 else
293 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
294 end if
295
296 x_cb(-1:m) = x_cb_glb((start_idx(1) - 1):(start_idx(1) + m))
297 dx(0:m) = x_cb(0:m) - x_cb(-1:m - 1)
298 x_cc(0:m) = x_cb(-1:m - 1) + dx(0:m)/2._wp
299
300 if (n > 0) then
301 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // 'y_cb.dat'
302 inquire (file=trim(file_loc), exist=file_exist)
303
304 if (file_exist) then
305 data_size = n_glb + 2
306 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
307
308 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
309 call mpi_type_commit(filetype, ierr)
310
311 offset = 0
312 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
313
314 call mpi_file_read(ifile, y_cb_glb, data_size, mpi_p, status, ierr)
315 call mpi_file_close(ifile, ierr)
316 else
317 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
318 end if
319
320 y_cb(-1:n) = y_cb_glb((start_idx(2) - 1):(start_idx(2) + n))
321 dy(0:n) = y_cb(0:n) - y_cb(-1:n - 1)
322 y_cc(0:n) = y_cb(-1:n - 1) + dy(0:n)/2._wp
323
324 if (p > 0) then
325 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // 'z_cb.dat'
326 inquire (file=trim(file_loc), exist=file_exist)
327
328 if (file_exist) then
329 data_size = p_glb + 2
330 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
331
332 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
333 call mpi_type_commit(filetype, ierr)
334
335 offset = 0
336 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
337
338 call mpi_file_read(ifile, z_cb_glb, data_size, mpi_p, status, ierr)
339 call mpi_file_close(ifile, ierr)
340 else
341 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
342 end if
343
344 z_cb(-1:p) = z_cb_glb((start_idx(3) - 1):(start_idx(3) + p))
345 dz(0:p) = z_cb(0:p) - z_cb(-1:p - 1)
346 z_cc(0:p) = z_cb(-1:p - 1) + dz(0:p)/2._wp
347 end if
348 end if
349
350 call s_read_parallel_conservative_data(t_step, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
351
352 deallocate (x_cb_glb, y_cb_glb, z_cb_glb)
353
354 if (bc_io) then
356 else
358 end if
359#endif
360
361 end subroutine s_read_parallel_data_files
362
363#ifdef MFC_MPI
364 !> Helper subroutine to read parallel conservative variable data
365 impure subroutine s_read_parallel_conservative_data(t_step, m_MOK, n_MOK, p_MOK, WP_MOK, MOK, str_MOK, NVARS_MOK)
366
367 integer, intent(in) :: t_step
368 integer(KIND=MPI_OFFSET_KIND), intent(inout) :: m_mok, n_mok, p_mok
369 integer(KIND=MPI_OFFSET_KIND), intent(inout) :: wp_mok, mok, str_mok, nvars_mok
370 integer :: ifile, ierr, data_size
371 integer, dimension(MPI_STATUS_SIZE) :: status
372 integer(KIND=MPI_OFFSET_KIND) :: disp, var_mok
373 character(LEN=path_len + 2*name_len) :: file_loc
374 logical :: file_exist
375 character(len=10) :: t_step_string
376 integer :: i
377
378 if (file_per_process) then
379 call s_int_to_str(t_step, t_step_string)
380 write (file_loc, '(I0,A1,I7.7,A)') t_step, '_', proc_rank, '.dat'
381 file_loc = trim(case_dir) // '/restart_data/lustre_' // trim(t_step_string) // trim(mpiiofs) // trim(file_loc)
382 inquire (file=trim(file_loc), exist=file_exist)
383
384 if (file_exist) then
385 call mpi_file_open(mpi_comm_self, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
386
387 if (down_sample) then
389 else
390 if (ib) then
392 else
394 end if
395 end if
396
397 if (down_sample) then
398 data_size = (m + 3)*(n + 3)*(p + 3)
399 else
400 data_size = (m + 1)*(n + 1)*(p + 1)
401 end if
402
403 m_mok = int(m_glb + 1, mpi_offset_kind)
404 n_mok = int(n_glb + 1, mpi_offset_kind)
405 p_mok = int(p_glb + 1, mpi_offset_kind)
406 wp_mok = int(storage_size(0._stp)/8, mpi_offset_kind)
407 mok = int(1._wp, mpi_offset_kind)
408 str_mok = int(name_len, mpi_offset_kind)
409 nvars_mok = int(sys_size, mpi_offset_kind)
410
411 if (bubbles_euler .or. elasticity .or. mhd) then
412 do i = 1, sys_size
413 var_mok = int(i, mpi_offset_kind)
414 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, mpi_io_p, status, ierr)
415 end do
416 else
417 do i = 1, sys_size
418 var_mok = int(i, mpi_offset_kind)
419 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, mpi_io_p, status, ierr)
420 end do
421 end if
422
423 call s_mpi_barrier()
424 call mpi_file_close(ifile, ierr)
425
426 if (down_sample) then
427 do i = 1, sys_size
428 q_cons_vf(i)%sf(0:m,0:n,0:p) = q_cons_temp(i)%sf(0:m,0:n,0:p)
429 end do
430 end if
431
432 call s_read_ib_data_files(trim(case_dir) // '/restart_data' // trim(mpiiofs), t_step)
433 else
434 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
435 end if
436 else
437 write (file_loc, '(I0,A)') t_step, '.dat'
438 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // trim(file_loc)
439 inquire (file=trim(file_loc), exist=file_exist)
440
441 if (file_exist) then
442 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
443
444 call s_setup_mpi_io_params(data_size, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
445
446 do i = 1, sys_size
447 var_mok = int(i, mpi_offset_kind)
448
449 disp = m_mok*max(mok, n_mok)*max(mok, p_mok)*wp_mok*(var_mok - 1)
450
451 call mpi_file_set_view(ifile, disp, mpi_p, mpi_io_data%view(i), 'native', mpi_info_int, ierr)
452 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, mpi_io_p, status, ierr)
453 end do
454
455 call s_mpi_barrier()
456 call mpi_file_close(ifile, ierr)
457
458 call s_read_ib_data_files(trim(case_dir) // '/restart_data' // trim(mpiiofs), t_step)
459 else
460 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
461 end if
462 end if
463
465#endif
466
467 !> Computation of parameters, allocation procedures, and/or any other tasks needed to properly setup the module
469
470 integer :: i
471
472 allocate (q_cons_vf(1:sys_size))
473 allocate (q_prim_vf(1:sys_size))
474 allocate (q_cons_temp(1:sys_size))
475
476 if (n > 0) then
477 if (p > 0) then
479 if (down_sample) then
480 do i = 1, sys_size
481 allocate (q_cons_temp(i)%sf(-1:m + 1,-1:n + 1,-1:p + 1))
482 end do
483 end if
484 else
486 end if
487 else
489 end if
490
491 allocate (bc_type(1:num_dims,1:2))
492
493 allocate (bc_type(1, 1)%sf(0:0,0:n,0:p))
494 allocate (bc_type(1, 2)%sf(0:0,0:n,0:p))
495 if (n > 0) then
496 allocate (bc_type(2, 1)%sf(-buff_size:m + buff_size,0:0,0:p))
497 allocate (bc_type(2, 2)%sf(-buff_size:m + buff_size,0:0,0:p))
498 if (p > 0) then
499 allocate (bc_type(3, 1)%sf(-buff_size:m + buff_size,-buff_size:n + buff_size,0:0))
500 allocate (bc_type(3, 2)%sf(-buff_size:m + buff_size,-buff_size:n + buff_size,0:0))
501 end if
502 end if
503
504 if (parallel_io .neqv. .true.) then
506 else
508 end if
509
510 end subroutine s_initialize_data_input_module
511
512 !> Deallocation procedures for the module
514
515 integer :: i
516
517 do i = 1, sys_size
518 deallocate (q_cons_vf(i)%sf)
519 deallocate (q_prim_vf(i)%sf)
520 if (down_sample) then
521 deallocate (q_cons_temp(i)%sf)
522 end if
523 end do
524
525 deallocate (q_cons_vf)
526 deallocate (q_prim_vf)
527 deallocate (q_cons_temp)
528
529 if (ib) then
530 deallocate (ib_markers%sf)
531 end if
532
533 if (chemistry) then
534 deallocate (q_t_sf%sf)
535 end if
536
537 deallocate (bc_type(1, 1)%sf, bc_type(1, 2)%sf)
538 if (n > 0) then
539 deallocate (bc_type(2, 1)%sf, bc_type(2, 2)%sf)
540 if (p > 0) then
541 deallocate (bc_type(3, 1)%sf, bc_type(3, 2)%sf)
542 end if
543 end if
544
545 deallocate (bc_type)
546
547 s_read_data_files => null()
548
549 end subroutine s_finalize_data_input_module
550
551end module m_data_input
Subroutine for reading data files.
Noncharacteristic and processor boundary condition application for ghost cells and buffer regions.
subroutine, public s_read_parallel_boundary_condition_files(bc_type)
Read boundary condition type and buffer data from per-rank parallel files using MPI I/O.
subroutine, public s_read_serial_boundary_condition_files(step_dirpath, bc_type)
Read boundary condition type and buffer data from serial (unformatted) restart files.
subroutine, public s_assign_default_bc_type(bc_type)
Initialize the per-cell boundary condition type arrays with the global default BC values.
Platform-specific file and directory operations: create, delete, inquire, getcwd, and basename.
impure subroutine my_inquire(fileloc, dircheck)
Inquires on the existence of a directory.
Reads raw simulation grid and conservative-variable data for a given time-step and fills buffer regio...
impure subroutine s_allocate_field_arrays(local_start_idx, end_x, end_y, end_z)
Helper subroutine to allocate field arrays for given dimensionality.
impure subroutine, public s_read_parallel_data_files(t_step)
Parallel-read the raw data files present in the corresponding time-step directory and to populate the...
type(scalar_field), public q_t_sf
Temperature field.
type(scalar_field), dimension(:), allocatable, public q_cons_vf
Conservative variables.
impure subroutine s_read_parallel_conservative_data(t_step, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
Helper subroutine to read parallel conservative variable data.
impure subroutine, public s_initialize_data_input_module
Computation of parameters, allocation procedures, and/or any other tasks needed to properly setup the...
type(scalar_field), dimension(:), allocatable, public q_prim_vf
Primitive variables.
impure subroutine s_setup_mpi_io_params(data_size, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
Helper subroutine to setup MPI data I/O parameters.
impure subroutine, public s_finalize_data_input_module
Deallocation procedures for the module.
impure subroutine s_read_grid_data_direction(t_step_dir, direction, cb_array, d_array, cc_array, size_dim)
Helper subroutine to read grid data files for a given direction.
impure subroutine, public s_read_serial_data_files(t_step)
Read the raw data files present in the corresponding time-step directory and to populate the associat...
type(scalar_field), dimension(:), allocatable, public q_cons_temp
procedure(s_read_abstract_data_files), pointer, public s_read_data_files
type(integer_field), dimension(:,:), allocatable, public bc_type
Boundary condition identifiers.
type(integer_field), public ib_markers
impure subroutine s_read_ib_data_files(file_loc_base, t_step)
Helper subroutine to read IB data files.
Shared derived types for field data, patch geometry, bubble dynamics, and MPI I/O structures.
Global parameters for the post-process: domain geometry, equation of state, and output database setti...
logical, parameter chemistry
Chemistry modeling.
integer beta_idx
Index of lagrange bubbles beta.
real(wp), dimension(:), allocatable y_cc
integer proc_rank
Rank of the local processor.
type(mpi_io_ib_var), public mpi_io_ib_data
real(wp), dimension(:), allocatable y_cb
character(len=name_len) mpiiofs
integer, dimension(:), allocatable start_idx
Starting cell-center index of local processor in global grid.
integer sys_size
Number of unknowns in the system of equations.
real(wp), dimension(:), allocatable dz
integer buff_size
Number of ghost cells for boundary condition storage.
real(wp), dimension(:), allocatable z_cb
integer num_dims
Number of spatial dimensions.
real(wp), dimension(:), allocatable x_cc
real(wp), dimension(:), allocatable x_cb
real(wp), dimension(:), allocatable dy
integer t_step_save
Interval between consecutive time-step directory.
real(wp), dimension(:), allocatable z_cc
character(len=path_len) case_dir
Case folder location.
logical mhd
Magnetohydrodynamics.
logical parallel_io
Format of the data files.
logical down_sample
down sampling of the database file(s)
logical file_per_process
output format
logical elasticity
elasticity modeling, true for hyper or hypo
type(mpi_io_var), public mpi_io_data
real(wp), dimension(:), allocatable dx
Cell-width distributions in the x-, y- and z-coordinate directions.
Utility routines for bubble model setup, coordinate transforms, array sampling, and special functions...
elemental subroutine, public s_int_to_str(i, res)
Convert an integer to its trimmed string representation.
MPI communication layer: domain decomposition, halo exchange, reductions, and parallel I/O setup.
impure subroutine s_mpi_abort(prnt, code)
The subroutine terminates the MPI execution environment.
impure subroutine s_mpi_barrier
Halts all processes until all have reached barrier.
impure subroutine s_initialize_mpi_data(q_cons_vf, ib_markers, beta)
Set up MPI I/O data views and variable pointers for parallel file output.
subroutine s_initialize_mpi_data_ds(q_cons_vf)
Set up MPI I/O data views for downsampled (coarsened) parallel file output.
MPI gather and scatter operations for distributing post-process grid and flow-variable data.
Derived type annexing an integer scalar field (SF).
Derived type annexing a scalar field (SF).