MFC
Exascale flow solver
Loading...
Searching...
No Matches
m_data_input.f90
Go to the documentation of this file.
1!>
2!! @file
3!> @brief Contains module m_data_input
4
5!> @brief Reads raw simulation grid and conservative-variable data for a given time-step and fills buffer regions
7
8#ifdef MFC_MPI
9 use mpi
10#endif
11
14 use m_mpi_proxy
15 use m_mpi_common
18 use m_helper
19
20 implicit none
21
24
25 abstract interface
26
27 !> Subroutine for reading data files
28 impure subroutine s_read_abstract_data_files(t_step)
29
30 implicit none
31
32 integer, intent(in) :: t_step
33
34 end subroutine s_read_abstract_data_files
35 end interface
36
37 type(scalar_field), allocatable, dimension(:), public :: q_cons_vf !< Conservative variables
38 type(scalar_field), allocatable, dimension(:), public :: q_cons_temp
39 type(scalar_field), allocatable, dimension(:), public :: q_prim_vf !< Primitive variables
40 type(integer_field), allocatable, dimension(:,:), public :: bc_type !< Boundary condition identifiers
41 type(scalar_field), public :: q_t_sf !< Temperature field
42 ! type(scalar_field), public :: ib_markers !<
43 type(integer_field), public :: ib_markers
44
45 procedure(s_read_abstract_data_files), pointer :: s_read_data_files => null()
46
47contains
48
49 !> Helper subroutine to read grid data files for a given direction
50 impure subroutine s_read_grid_data_direction(t_step_dir, direction, cb_array, d_array, cc_array, size_dim)
51
52 character(len=*), intent(in) :: t_step_dir
53 character(len=1), intent(in) :: direction
54 real(wp), dimension(-1:), intent(out) :: cb_array
55 real(wp), dimension(0:), intent(out) :: d_array
56 real(wp), dimension(0:), intent(out) :: cc_array
57 integer, intent(in) :: size_dim
58 character(LEN=len_trim(t_step_dir) + 10) :: file_loc
59 logical :: file_check
60
61 file_loc = trim(t_step_dir) // '/' // direction // '_cb.dat'
62 inquire (file=trim(file_loc), exist=file_check)
63
64 if (file_check) then
65 open (1, file=trim(file_loc), form='unformatted', status='old', action='read')
66 read (1) cb_array(-1:size_dim)
67 close (1)
68 else
69 call s_mpi_abort('File ' // direction // '_cb.dat is missing in ' // trim(t_step_dir) // '. Exiting.')
70 end if
71
72 d_array(0:size_dim) = cb_array(0:size_dim) - cb_array(-1:size_dim - 1)
73 cc_array(0:size_dim) = cb_array(-1:size_dim - 1) + d_array(0:size_dim)/2._wp
74
75 end subroutine s_read_grid_data_direction
76
77#ifdef MFC_MPI
78 !> Helper subroutine to setup MPI data I/O parameters
79 impure subroutine s_setup_mpi_io_params(data_size, m_MOK, n_MOK, p_MOK, WP_MOK, MOK, str_MOK, NVARS_MOK)
80
81 integer, intent(out) :: data_size
82 integer(KIND=MPI_OFFSET_KIND), intent(out) :: m_mok, n_mok, p_mok
83 integer(KIND=MPI_OFFSET_KIND), intent(out) :: wp_mok, mok, str_mok, nvars_mok
84
85 if (ib) then
87 else
89 end if
90
91 data_size = (m + 1)*(n + 1)*(p + 1)
92
93 m_mok = int(m_glb + 1, mpi_offset_kind)
94 n_mok = int(n_glb + 1, mpi_offset_kind)
95 p_mok = int(p_glb + 1, mpi_offset_kind)
96 wp_mok = int(storage_size(0._stp)/8, mpi_offset_kind)
97 mok = int(1._wp, mpi_offset_kind)
98 str_mok = int(name_len, mpi_offset_kind)
99 nvars_mok = int(sys_size, mpi_offset_kind)
100
101 end subroutine s_setup_mpi_io_params
102#endif
103
104 !> Helper subroutine to read IB data files
105 impure subroutine s_read_ib_data_files(file_loc_base, t_step)
106
107 character(len=*), intent(in) :: file_loc_base
108 integer, intent(in), optional :: t_step
109 character(LEN=len_trim(file_loc_base) + 20) :: file_loc
110 logical :: file_exist
111 integer :: ifile, ierr, data_size
112
113#ifdef MFC_MPI
114 integer, dimension(MPI_STATUS_SIZE) :: status
115 integer(KIND=MPI_OFFSET_KIND) :: disp
116 integer(KIND=MPI_OFFSET_KIND) :: m_mok, n_mok, p_mok, mok, wp_mok, var_mok
117 integer :: save_index
118#endif
119
120 if (.not. ib) return
121
122 if (parallel_io) then
123 write (file_loc, '(A)') trim(file_loc_base) // 'ib.dat'
124 else
125 write (file_loc, '(A)') trim(file_loc_base) // '/ib_data.dat'
126 end if
127 inquire (file=trim(file_loc), exist=file_exist)
128
129 if (file_exist) then
130 if (parallel_io) then
131#ifdef MFC_MPI
132 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
133
134 m_mok = int(m_glb + 1, mpi_offset_kind)
135 n_mok = int(n_glb + 1, mpi_offset_kind)
136 p_mok = int(p_glb + 1, mpi_offset_kind)
137 mok = int(1._wp, mpi_offset_kind)
138 wp_mok = int(storage_size(0._stp)/8, mpi_offset_kind)
139 save_index = t_step/t_step_save ! get the number of saves done to this point
140
141 data_size = (m + 1)*(n + 1)*(p + 1)
142 var_mok = int(sys_size + 1, mpi_offset_kind)
143 if (t_step == 0) then
144 disp = 0
145 else
146 disp = m_mok*max(mok, n_mok)*max(mok, p_mok)*wp_mok*(var_mok - 1 + int(save_index, mpi_offset_kind))
147 end if
148
149 call mpi_file_set_view(ifile, disp, mpi_integer, mpi_io_ib_data%view, 'native', mpi_info_int, ierr)
150 call mpi_file_read(ifile, mpi_io_ib_data%var%sf, data_size, mpi_integer, status, ierr)
151
152 call mpi_file_close(ifile, ierr)
153#endif
154 else
155 open (2, file=trim(file_loc), form='unformatted', action='read', status='old')
156 read (2) ib_markers%sf(0:m,0:n,0:p)
157 close (2)
158 end if
159 else
160 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
161 end if
162
163 end subroutine s_read_ib_data_files
164
165 !> Helper subroutine to allocate field arrays for given dimensionality
166 impure subroutine s_allocate_field_arrays(local_start_idx, end_x, end_y, end_z)
167
168 integer, intent(in) :: local_start_idx, end_x, end_y, end_z
169 integer :: i
170
171 do i = 1, sys_size
172 allocate (q_cons_vf(i)%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
173 allocate (q_prim_vf(i)%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
174 end do
175
176 if (ib) then
177 allocate (ib_markers%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
178 end if
179
180 if (chemistry) then
181 allocate (q_t_sf%sf(local_start_idx:end_x,local_start_idx:end_y,local_start_idx:end_z))
182 end if
183
184 end subroutine s_allocate_field_arrays
185
186 !> Read the raw data files present in the corresponding time-step directory and to populate the associated grid and conservative
187 !! variables.
188 impure subroutine s_read_serial_data_files(t_step)
189
190 integer, intent(in) :: t_step
191 character(LEN=len_trim(case_dir) + 2*name_len) :: t_step_dir
192 character(LEN=len_trim(case_dir) + 3*name_len) :: file_loc
193 character(len=int(floor(log10(real(sys_size, wp)))) + 1) :: file_num
194 logical :: dir_check
195 logical :: file_check
196 integer :: i
197
198 write (t_step_dir, '(A,I0,A,I0)') '/p_all/p', proc_rank, '/', t_step
199 t_step_dir = trim(case_dir) // trim(t_step_dir)
200
201 file_loc = trim(t_step_dir) // '/.'
202 call my_inquire(file_loc, dir_check)
203
204 if (dir_check .neqv. .true.) then
205 call s_mpi_abort('Time-step folder ' // trim(t_step_dir) // ' is missing. Exiting.')
206 end if
207
208 if (bc_io) then
210 else
212 end if
213
214 call s_read_grid_data_direction(t_step_dir, 'x', x_cb, dx, x_cc, m)
215
216 if (n > 0) then
217 call s_read_grid_data_direction(t_step_dir, 'y', y_cb, dy, y_cc, n)
218
219 if (p > 0) then
220 call s_read_grid_data_direction(t_step_dir, 'z', z_cb, dz, z_cc, p)
221 end if
222 end if
223
224 do i = 1, sys_size
225 write (file_num, '(I0)') i
226 file_loc = trim(t_step_dir) // '/q_cons_vf' // trim(file_num) // '.dat'
227 inquire (file=trim(file_loc), exist=file_check)
228
229 if (file_check) then
230 open (1, file=trim(file_loc), form='unformatted', status='old', action='read')
231 read (1) q_cons_vf(i)%sf(0:m,0:n,0:p)
232 close (1)
233 else if (bubbles_lagrange .and. i == beta_idx) then
234 ! beta (Lagrangian void fraction) is not written by pre_process for t_step_start; initialize to zero.
235 q_cons_vf(i)%sf(0:m,0:n,0:p) = 0._wp
236 else
237 call s_mpi_abort('File q_cons_vf' // trim(file_num) // '.dat is missing in ' // trim(t_step_dir) // '. Exiting.')
238 end if
239 end do
240
241 call s_read_ib_data_files(t_step_dir)
242
243 end subroutine s_read_serial_data_files
244
245 !> Parallel-read the raw data files present in the corresponding time-step directory and to populate the associated grid and
246 !! conservative variables.
247 impure subroutine s_read_parallel_data_files(t_step)
248
249 integer, intent(in) :: t_step
250
251#ifdef MFC_MPI
252 real(wp), allocatable, dimension(:) :: x_cb_glb, y_cb_glb, z_cb_glb
253 integer :: ifile, ierr, data_size, filetype, stride
254 integer, dimension(MPI_STATUS_SIZE) :: status
255 integer(KIND=MPI_OFFSET_KIND) :: disp
256 integer(KIND=MPI_OFFSET_KIND) :: m_mok, n_mok, p_mok
257 integer(KIND=MPI_OFFSET_KIND) :: wp_mok, var_mok, str_mok
258 integer(KIND=MPI_OFFSET_KIND) :: nvars_mok
259 integer(KIND=MPI_OFFSET_KIND) :: mok
260 integer(kind=MPI_OFFSET_KIND) :: offset
261 character(LEN=path_len + 2*name_len) :: file_loc
262 logical :: file_exist
263 character(len=10) :: t_step_string
264 integer :: i
265
266 allocate (x_cb_glb(-1:m_glb))
267 allocate (y_cb_glb(-1:n_glb))
268 allocate (z_cb_glb(-1:p_glb))
269
270 if (down_sample) then
271 stride = 3
272 else
273 stride = 1
274 end if
275
276 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // 'x_cb.dat'
277 inquire (file=trim(file_loc), exist=file_exist)
278
279 if (file_exist) then
280 data_size = m_glb + 2
281 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
282
283 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
284 call mpi_type_commit(filetype, ierr)
285
286 offset = 0
287 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
288
289 call mpi_file_read(ifile, x_cb_glb, data_size, mpi_p, status, ierr)
290 call mpi_file_close(ifile, ierr)
291 else
292 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
293 end if
294
295 x_cb(-1:m) = x_cb_glb((start_idx(1) - 1):(start_idx(1) + m))
296 dx(0:m) = x_cb(0:m) - x_cb(-1:m - 1)
297 x_cc(0:m) = x_cb(-1:m - 1) + dx(0:m)/2._wp
298
299 if (n > 0) then
300 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // 'y_cb.dat'
301 inquire (file=trim(file_loc), exist=file_exist)
302
303 if (file_exist) then
304 data_size = n_glb + 2
305 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
306
307 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
308 call mpi_type_commit(filetype, ierr)
309
310 offset = 0
311 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
312
313 call mpi_file_read(ifile, y_cb_glb, data_size, mpi_p, status, ierr)
314 call mpi_file_close(ifile, ierr)
315 else
316 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
317 end if
318
319 y_cb(-1:n) = y_cb_glb((start_idx(2) - 1):(start_idx(2) + n))
320 dy(0:n) = y_cb(0:n) - y_cb(-1:n - 1)
321 y_cc(0:n) = y_cb(-1:n - 1) + dy(0:n)/2._wp
322
323 if (p > 0) then
324 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // 'z_cb.dat'
325 inquire (file=trim(file_loc), exist=file_exist)
326
327 if (file_exist) then
328 data_size = p_glb + 2
329 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
330
331 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
332 call mpi_type_commit(filetype, ierr)
333
334 offset = 0
335 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
336
337 call mpi_file_read(ifile, z_cb_glb, data_size, mpi_p, status, ierr)
338 call mpi_file_close(ifile, ierr)
339 else
340 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
341 end if
342
343 z_cb(-1:p) = z_cb_glb((start_idx(3) - 1):(start_idx(3) + p))
344 dz(0:p) = z_cb(0:p) - z_cb(-1:p - 1)
345 z_cc(0:p) = z_cb(-1:p - 1) + dz(0:p)/2._wp
346 end if
347 end if
348
349 call s_read_parallel_conservative_data(t_step, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
350
351 deallocate (x_cb_glb, y_cb_glb, z_cb_glb)
352
353 if (bc_io) then
355 else
357 end if
358#endif
359
360 end subroutine s_read_parallel_data_files
361
362#ifdef MFC_MPI
363 !> Helper subroutine to read parallel conservative variable data
364 impure subroutine s_read_parallel_conservative_data(t_step, m_MOK, n_MOK, p_MOK, WP_MOK, MOK, str_MOK, NVARS_MOK)
365
366 integer, intent(in) :: t_step
367 integer(KIND=MPI_OFFSET_KIND), intent(inout) :: m_mok, n_mok, p_mok
368 integer(KIND=MPI_OFFSET_KIND), intent(inout) :: wp_mok, mok, str_mok, nvars_mok
369 integer :: ifile, ierr, data_size
370 integer, dimension(MPI_STATUS_SIZE) :: status
371 integer(KIND=MPI_OFFSET_KIND) :: disp, var_mok
372 character(LEN=path_len + 2*name_len) :: file_loc
373 logical :: file_exist
374 character(len=10) :: t_step_string
375 integer :: i
376
377 if (file_per_process) then
378 call s_int_to_str(t_step, t_step_string)
379 write (file_loc, '(I0,A1,I7.7,A)') t_step, '_', proc_rank, '.dat'
380 file_loc = trim(case_dir) // '/restart_data/lustre_' // trim(t_step_string) // trim(mpiiofs) // trim(file_loc)
381 inquire (file=trim(file_loc), exist=file_exist)
382
383 if (file_exist) then
384 call mpi_file_open(mpi_comm_self, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
385
386 if (down_sample) then
388 else
389 if (ib) then
391 else
393 end if
394 end if
395
396 if (down_sample) then
397 data_size = (m + 3)*(n + 3)*(p + 3)
398 else
399 data_size = (m + 1)*(n + 1)*(p + 1)
400 end if
401
402 m_mok = int(m_glb + 1, mpi_offset_kind)
403 n_mok = int(n_glb + 1, mpi_offset_kind)
404 p_mok = int(p_glb + 1, mpi_offset_kind)
405 wp_mok = int(storage_size(0._stp)/8, mpi_offset_kind)
406 mok = int(1._wp, mpi_offset_kind)
407 str_mok = int(name_len, mpi_offset_kind)
408 nvars_mok = int(sys_size, mpi_offset_kind)
409
410 if (bubbles_euler .or. elasticity .or. mhd) then
411 do i = 1, sys_size
412 var_mok = int(i, mpi_offset_kind)
413 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, mpi_io_p, status, ierr)
414 end do
415 else
416 do i = 1, sys_size
417 var_mok = int(i, mpi_offset_kind)
418 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, mpi_io_p, status, ierr)
419 end do
420 end if
421
422 call s_mpi_barrier()
423 call mpi_file_close(ifile, ierr)
424
425 if (down_sample) then
426 do i = 1, sys_size
427 q_cons_vf(i)%sf(0:m,0:n,0:p) = q_cons_temp(i)%sf(0:m,0:n,0:p)
428 end do
429 end if
430
431 call s_read_ib_data_files(trim(case_dir) // '/restart_data' // trim(mpiiofs), t_step)
432 else
433 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
434 end if
435 else
436 write (file_loc, '(I0,A)') t_step, '.dat'
437 file_loc = trim(case_dir) // '/restart_data' // trim(mpiiofs) // trim(file_loc)
438 inquire (file=trim(file_loc), exist=file_exist)
439
440 if (file_exist) then
441 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
442
443 call s_setup_mpi_io_params(data_size, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
444
445 do i = 1, sys_size
446 var_mok = int(i, mpi_offset_kind)
447
448 disp = m_mok*max(mok, n_mok)*max(mok, p_mok)*wp_mok*(var_mok - 1)
449
450 call mpi_file_set_view(ifile, disp, mpi_p, mpi_io_data%view(i), 'native', mpi_info_int, ierr)
451 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, mpi_io_p, status, ierr)
452 end do
453
454 call s_mpi_barrier()
455 call mpi_file_close(ifile, ierr)
456
457 call s_read_ib_data_files(trim(case_dir) // '/restart_data' // trim(mpiiofs), t_step)
458 else
459 call s_mpi_abort('File ' // trim(file_loc) // ' is missing. Exiting.')
460 end if
461 end if
462
464#endif
465
466 !> Computation of parameters, allocation procedures, and/or any other tasks needed to properly setup the module
468
469 integer :: i
470
471 allocate (q_cons_vf(1:sys_size))
472 allocate (q_prim_vf(1:sys_size))
473 allocate (q_cons_temp(1:sys_size))
474
475 if (n > 0) then
476 if (p > 0) then
478 if (down_sample) then
479 do i = 1, sys_size
480 allocate (q_cons_temp(i)%sf(-1:m + 1,-1:n + 1,-1:p + 1))
481 end do
482 end if
483 else
485 end if
486 else
488 end if
489
490 allocate (bc_type(1:num_dims,1:2))
491
492 allocate (bc_type(1, 1)%sf(0:0,0:n,0:p))
493 allocate (bc_type(1, 2)%sf(0:0,0:n,0:p))
494 if (n > 0) then
495 allocate (bc_type(2, 1)%sf(-buff_size:m + buff_size,0:0,0:p))
496 allocate (bc_type(2, 2)%sf(-buff_size:m + buff_size,0:0,0:p))
497 if (p > 0) then
498 allocate (bc_type(3, 1)%sf(-buff_size:m + buff_size,-buff_size:n + buff_size,0:0))
499 allocate (bc_type(3, 2)%sf(-buff_size:m + buff_size,-buff_size:n + buff_size,0:0))
500 end if
501 end if
502
503 if (parallel_io .neqv. .true.) then
505 else
507 end if
508
509 end subroutine s_initialize_data_input_module
510
511 !> Deallocation procedures for the module
513
514 integer :: i
515
516 do i = 1, sys_size
517 deallocate (q_cons_vf(i)%sf)
518 deallocate (q_prim_vf(i)%sf)
519 if (down_sample) then
520 deallocate (q_cons_temp(i)%sf)
521 end if
522 end do
523
524 deallocate (q_cons_vf)
525 deallocate (q_prim_vf)
526 deallocate (q_cons_temp)
527
528 if (ib) then
529 deallocate (ib_markers%sf)
530 end if
531
532 if (chemistry) then
533 deallocate (q_t_sf%sf)
534 end if
535
536 deallocate (bc_type(1, 1)%sf, bc_type(1, 2)%sf)
537 if (n > 0) then
538 deallocate (bc_type(2, 1)%sf, bc_type(2, 2)%sf)
539 if (p > 0) then
540 deallocate (bc_type(3, 1)%sf, bc_type(3, 2)%sf)
541 end if
542 end if
543
544 deallocate (bc_type)
545
546 s_read_data_files => null()
547
548 end subroutine s_finalize_data_input_module
549
550end module m_data_input
Subroutine for reading data files.
Noncharacteristic and processor boundary condition application for ghost cells and buffer regions.
subroutine, public s_read_parallel_boundary_condition_files(bc_type)
Read boundary condition type and buffer data from per-rank parallel files using MPI I/O.
subroutine, public s_read_serial_boundary_condition_files(step_dirpath, bc_type)
Read boundary condition type and buffer data from serial (unformatted) restart files.
subroutine, public s_assign_default_bc_type(bc_type)
Initialize the per-cell boundary condition type arrays with the global default BC values.
Platform-specific file and directory operations: create, delete, inquire, getcwd, and basename.
impure subroutine my_inquire(fileloc, dircheck)
Inquires on the existence of a directory.
Reads raw simulation grid and conservative-variable data for a given time-step and fills buffer regio...
impure subroutine s_allocate_field_arrays(local_start_idx, end_x, end_y, end_z)
Helper subroutine to allocate field arrays for given dimensionality.
impure subroutine, public s_read_parallel_data_files(t_step)
Parallel-read the raw data files present in the corresponding time-step directory and to populate the...
type(scalar_field), public q_t_sf
Temperature field.
type(scalar_field), dimension(:), allocatable, public q_cons_vf
Conservative variables.
impure subroutine s_read_parallel_conservative_data(t_step, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
Helper subroutine to read parallel conservative variable data.
impure subroutine, public s_initialize_data_input_module
Computation of parameters, allocation procedures, and/or any other tasks needed to properly setup the...
type(scalar_field), dimension(:), allocatable, public q_prim_vf
Primitive variables.
impure subroutine s_setup_mpi_io_params(data_size, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
Helper subroutine to setup MPI data I/O parameters.
impure subroutine, public s_finalize_data_input_module
Deallocation procedures for the module.
impure subroutine s_read_grid_data_direction(t_step_dir, direction, cb_array, d_array, cc_array, size_dim)
Helper subroutine to read grid data files for a given direction.
impure subroutine, public s_read_serial_data_files(t_step)
Read the raw data files present in the corresponding time-step directory and to populate the associat...
type(scalar_field), dimension(:), allocatable, public q_cons_temp
procedure(s_read_abstract_data_files), pointer, public s_read_data_files
type(integer_field), dimension(:,:), allocatable, public bc_type
Boundary condition identifiers.
type(integer_field), public ib_markers
impure subroutine s_read_ib_data_files(file_loc_base, t_step)
Helper subroutine to read IB data files.
Shared derived types for field data, patch geometry, bubble dynamics, and MPI I/O structures.
Global parameters for the post-process: domain geometry, equation of state, and output database setti...
logical, parameter chemistry
Chemistry modeling.
integer beta_idx
Index of lagrange bubbles beta.
real(wp), dimension(:), allocatable y_cc
integer proc_rank
Rank of the local processor.
type(mpi_io_ib_var), public mpi_io_ib_data
real(wp), dimension(:), allocatable y_cb
character(len=name_len) mpiiofs
integer, dimension(:), allocatable start_idx
Starting cell-center index of local processor in global grid.
integer sys_size
Number of unknowns in the system of equations.
real(wp), dimension(:), allocatable dz
integer buff_size
Number of ghost cells for boundary condition storage.
real(wp), dimension(:), allocatable z_cb
integer num_dims
Number of spatial dimensions.
real(wp), dimension(:), allocatable x_cc
real(wp), dimension(:), allocatable x_cb
real(wp), dimension(:), allocatable dy
integer t_step_save
Interval between consecutive time-step directory.
real(wp), dimension(:), allocatable z_cc
character(len=path_len) case_dir
Case folder location.
logical mhd
Magnetohydrodynamics.
logical parallel_io
Format of the data files.
logical down_sample
down sampling of the database file(s)
logical file_per_process
output format
logical elasticity
elasticity modeling, true for hyper or hypo
type(mpi_io_var), public mpi_io_data
real(wp), dimension(:), allocatable dx
Cell-width distributions in the x-, y- and z-coordinate directions.
Utility routines for bubble model setup, coordinate transforms, array sampling, and special functions...
elemental subroutine, public s_int_to_str(i, res)
Convert an integer to its trimmed string representation.
MPI communication layer: domain decomposition, halo exchange, reductions, and parallel I/O setup.
impure subroutine s_mpi_abort(prnt, code)
The subroutine terminates the MPI execution environment.
impure subroutine s_mpi_barrier
Halts all processes until all have reached barrier.
impure subroutine s_initialize_mpi_data(q_cons_vf, ib_markers, beta)
Set up MPI I/O data views and variable pointers for parallel file output.
subroutine s_initialize_mpi_data_ds(q_cons_vf)
Set up MPI I/O data views for downsampled (coarsened) parallel file output.
MPI gather and scatter operations for distributing post-process grid and flow-variable data.
Derived type annexing an integer scalar field (SF).
Derived type annexing a scalar field (SF).