MFC
Exascale flow solver
Loading...
Searching...
No Matches
m_data_input.f90
Go to the documentation of this file.
1!>
2!! @file
3!> @brief Contains module m_data_input
4
5!> @brief Reads raw simulation grid and conservative-variable data for a given time-step and fills buffer regions
7
8#ifdef MFC_MPI
9 use mpi !< message passing interface (mpi) module
10#endif
11
12 use m_derived_types !< definitions of the derived types
13
14 use m_global_parameters !< global parameters for the code
15
16 use m_mpi_proxy !< message passing interface (mpi) module proxy
17
18 use m_mpi_common
19
21
23
24 use m_helper
25
26 implicit none
27
28 private; public :: s_initialize_data_input_module, &
33
34 abstract interface
35
36 !> Subroutine for reading data files
37 !! @param t_step Current time-step to input
38 impure subroutine s_read_abstract_data_files(t_step)
39
40 implicit none
41
42 integer, intent(in) :: t_step
43
44 end subroutine s_read_abstract_data_files
45
46 end interface
47
48 type(scalar_field), allocatable, dimension(:), public :: q_cons_vf !<
49 !! Conservative variables
50
51 type(scalar_field), allocatable, dimension(:), public :: q_cons_temp
52
53 type(scalar_field), allocatable, dimension(:), public :: q_prim_vf !<
54 !! Primitive variables
55
56 type(integer_field), allocatable, dimension(:, :), public :: bc_type !<
57 !! Boundary condition identifiers
58
59 type(scalar_field), public :: q_t_sf !<
60 !! Temperature field
61
62 ! type(scalar_field), public :: ib_markers !<
63 type(integer_field), public :: ib_markers
64
65 procedure(s_read_abstract_data_files), pointer :: s_read_data_files => null()
66
67contains
68
69 !> Helper subroutine to read grid data files for a given direction
70 !! @param t_step_dir Directory containing the time-step data
71 !! @param direction Direction name ('x', 'y', 'z')
72 !! @param cb_array Cell boundary array to populate
73 !! @param d_array Cell width array to populate
74 !! @param cc_array Cell center array to populate
75 !! @param size_dim Size of the dimension
76 impure subroutine s_read_grid_data_direction(t_step_dir, direction, cb_array, d_array, cc_array, size_dim)
77
78 character(len=*), intent(in) :: t_step_dir
79 character(len=1), intent(in) :: direction
80 real(wp), dimension(-1:), intent(out) :: cb_array
81 real(wp), dimension(0:), intent(out) :: d_array
82 real(wp), dimension(0:), intent(out) :: cc_array
83 integer, intent(in) :: size_dim
84
85 character(LEN=len_trim(t_step_dir) + 10) :: file_loc
86 logical :: file_check
87
88 ! Checking whether direction_cb.dat exists
89 file_loc = trim(t_step_dir)//'/'//direction//'_cb.dat'
90 inquire (file=trim(file_loc), exist=file_check)
91
92 ! Reading direction_cb.dat if it exists, exiting otherwise
93 if (file_check) then
94 open (1, file=trim(file_loc), form='unformatted', &
95 status='old', action='read')
96 read (1) cb_array(-1:size_dim)
97 close (1)
98 else
99 call s_mpi_abort('File '//direction//'_cb.dat is missing in '// &
100 trim(t_step_dir)//'. Exiting.')
101 end if
102
103 ! Computing the cell-width distribution
104 d_array(0:size_dim) = cb_array(0:size_dim) - cb_array(-1:size_dim - 1)
105
106 ! Computing the cell-center locations
107 cc_array(0:size_dim) = cb_array(-1:size_dim - 1) + d_array(0:size_dim)/2._wp
108
109 end subroutine s_read_grid_data_direction
110
111#ifdef MFC_MPI
112 !> Helper subroutine to setup MPI data I/O parameters
113 !! @param data_size Local array size (output)
114 !! @param m_MOK, n_MOK, p_MOK MPI offset kinds for dimensions (output)
115 !! @param WP_MOK, MOK, str_MOK, NVARS_MOK Other MPI offset kinds (output)
116 impure subroutine s_setup_mpi_io_params(data_size, m_MOK, n_MOK, p_MOK, WP_MOK, MOK, str_MOK, NVARS_MOK)
117
118 integer, intent(out) :: data_size
119 integer(KIND=MPI_OFFSET_KIND), intent(out) :: m_mok, n_mok, p_mok
120 integer(KIND=MPI_OFFSET_KIND), intent(out) :: wp_mok, mok, str_mok, nvars_mok
121
122 ! Initialize MPI data I/O
123 if (ib) then
125 else
127 end if
128
129 ! Size of local arrays
130 data_size = (m + 1)*(n + 1)*(p + 1)
131
132 ! Resize some integers so MPI can read even the biggest file
133 m_mok = int(m_glb + 1, mpi_offset_kind)
134 n_mok = int(n_glb + 1, mpi_offset_kind)
135 p_mok = int(p_glb + 1, mpi_offset_kind)
136 wp_mok = int(8._wp, mpi_offset_kind)
137 mok = int(1._wp, mpi_offset_kind)
138 str_mok = int(name_len, mpi_offset_kind)
139 nvars_mok = int(sys_size, mpi_offset_kind)
140
141 end subroutine s_setup_mpi_io_params
142#endif
143
144 !> Helper subroutine to read IB data files
145 !! @param file_loc_base Base file location for IB data
146 !! @param t_step Time step index
147 impure subroutine s_read_ib_data_files(file_loc_base, t_step)
148
149 character(len=*), intent(in) :: file_loc_base
150 integer, intent(in), optional :: t_step
151
152 character(LEN=len_trim(file_loc_base) + 20) :: file_loc
153 logical :: file_exist
154 integer :: ifile, ierr, data_size, var_mok
155
156#ifdef MFC_MPI
157 integer, dimension(MPI_STATUS_SIZE) :: status
158 integer(KIND=MPI_OFFSET_KIND) :: disp
159 integer :: m_mok, n_mok, p_mok, mok, wp_mok, save_index
160
161#endif
162 if (.not. ib) return
163
164 if (parallel_io) then
165 write (file_loc, '(A)') trim(file_loc_base)//'ib.dat'
166 else
167 write (file_loc, '(A)') trim(file_loc_base)//'/ib.dat'
168 end if
169 inquire (file=trim(file_loc), exist=file_exist)
170
171 if (file_exist) then
172 if (parallel_io) then
173#ifdef MFC_MPI
174 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
175
176 m_mok = int(m_glb + 1, mpi_offset_kind)
177 n_mok = int(n_glb + 1, mpi_offset_kind)
178 p_mok = int(p_glb + 1, mpi_offset_kind)
179 mok = int(1._wp, mpi_offset_kind)
180 wp_mok = int(8._wp, mpi_offset_kind)
181 save_index = t_step/t_step_save ! get the number of saves done to this point
182
183 data_size = (m + 1)*(n + 1)*(p + 1)
184 var_mok = int(sys_size + 1, mpi_offset_kind)
185 if (t_step == 0) then
186 disp = 0
187 else
188 disp = m_mok*max(mok, n_mok)*max(mok, p_mok)*wp_mok*(var_mok - 1 + int(save_index, mpi_offset_kind))
189 end if
190
191 call mpi_file_set_view(ifile, disp, mpi_integer, mpi_io_ib_data%view, &
192 'native', mpi_info_int, ierr)
193 call mpi_file_read(ifile, mpi_io_ib_data%var%sf, data_size, &
194 mpi_integer, status, ierr)
195
196 call mpi_file_close(ifile, ierr)
197#endif
198 else
199 open (2, file=trim(file_loc), &
200 form='unformatted', &
201 action='read', &
202 status='old')
203 read (2) ib_markers%sf(0:m, 0:n, 0:p)
204 close (2)
205 end if
206 else
207 call s_mpi_abort('File '//trim(file_loc)//' is missing. Exiting.')
208 end if
209
210 end subroutine s_read_ib_data_files
211
212 !> Helper subroutine to allocate field arrays for given dimensionality
213 !! @param local_start_idx Starting index for allocation
214 !! @param end_x End index for x dimension
215 !! @param end_y End index for y dimension
216 !! @param end_z End index for z dimension
217 impure subroutine s_allocate_field_arrays(local_start_idx, end_x, end_y, end_z)
218
219 integer, intent(in) :: local_start_idx, end_x, end_y, end_z
220 integer :: i
221
222 do i = 1, sys_size
223 allocate (q_cons_vf(i)%sf(local_start_idx:end_x, local_start_idx:end_y, local_start_idx:end_z))
224 allocate (q_prim_vf(i)%sf(local_start_idx:end_x, local_start_idx:end_y, local_start_idx:end_z))
225 end do
226
227 if (ib) then
228 allocate (ib_markers%sf(local_start_idx:end_x, local_start_idx:end_y, local_start_idx:end_z))
229 end if
230
231 if (chemistry) then
232 allocate (q_t_sf%sf(local_start_idx:end_x, local_start_idx:end_y, local_start_idx:end_z))
233 end if
234
235 end subroutine s_allocate_field_arrays
236
237 !> This subroutine is called at each time-step that has to
238 !! be post-processed in order to read the raw data files
239 !! present in the corresponding time-step directory and to
240 !! populate the associated grid and conservative variables.
241 !! @param t_step Current time-step
242 impure subroutine s_read_serial_data_files(t_step)
243
244 integer, intent(in) :: t_step
245
246 character(LEN=len_trim(case_dir) + 2*name_len) :: t_step_dir !<
247 !! Location of the time-step directory associated with t_step
248
249 character(LEN=len_trim(case_dir) + 3*name_len) :: file_loc !<
250 !! Generic string used to store the location of a particular file
251
252 character(len= &
253 int(floor(log10(real(sys_size, wp)))) + 1) :: file_num !<
254 !! Used to store the variable position, in character form, of the
255 !! currently manipulated conservative variable file
256
257 character(LEN=len_trim(case_dir) + 2*name_len) :: t_step_ib_dir !<
258 !! Location of the time-step directory associated with t_step
259
260 logical :: dir_check !<
261 !! Generic logical used to test the existence of a particular folder
262
263 logical :: file_check !<
264 !! Generic logical used to test the existence of a particular file
265
266 integer :: i !< Generic loop iterator
267
268 ! Setting location of time-step folder based on current time-step
269 write (t_step_dir, '(A,I0,A,I0)') '/p_all/p', proc_rank, '/', t_step
270 t_step_dir = trim(case_dir)//trim(t_step_dir)
271
272 ! Inquiring as to the existence of the time-step directory
273 file_loc = trim(t_step_dir)//'/.'
274 call my_inquire(file_loc, dir_check)
275
276 ! If the time-step directory is missing, the post-process exits.
277 if (dir_check .neqv. .true.) then
278 call s_mpi_abort('Time-step folder '//trim(t_step_dir)// &
279 ' is missing. Exiting.')
280 end if
281
282 if (bc_io) then
284 else
286 end if
287
288 ! Reading the Grid Data Files using helper subroutine
289 call s_read_grid_data_direction(t_step_dir, 'x', x_cb, dx, x_cc, m)
290
291 if (n > 0) then
292 call s_read_grid_data_direction(t_step_dir, 'y', y_cb, dy, y_cc, n)
293
294 if (p > 0) then
295 call s_read_grid_data_direction(t_step_dir, 'z', z_cb, dz, z_cc, p)
296 end if
297 end if
298
299 ! Reading the Conservative Variables Data Files
300 do i = 1, sys_size
301
302 ! Checking whether the data file associated with the variable
303 ! position of currently manipulated conservative variable exists
304 write (file_num, '(I0)') i
305 file_loc = trim(t_step_dir)//'/q_cons_vf'// &
306 trim(file_num)//'.dat'
307 inquire (file=trim(file_loc), exist=file_check)
308
309 ! Reading the data file if it exists, exiting otherwise
310 if (file_check) then
311 open (1, file=trim(file_loc), form='unformatted', &
312 status='old', action='read')
313 read (1) q_cons_vf(i)%sf(0:m, 0:n, 0:p)
314 close (1)
315 print *, q_cons_vf(i)%sf(:, 0, 0)
316 else
317 call s_mpi_abort('File q_cons_vf'//trim(file_num)// &
318 '.dat is missing in '//trim(t_step_dir)// &
319 '. Exiting.')
320 end if
321
322 end do
323
324 ! Reading IB data using helper subroutine
325 call s_read_ib_data_files(t_step_dir)
326
327 end subroutine s_read_serial_data_files
328
329 !> This subroutine is called at each time-step that has to
330 !! be post-processed in order to parallel-read the raw data files
331 !! present in the corresponding time-step directory and to
332 !! populate the associated grid and conservative variables.
333 !! @param t_step Current time-step
334 impure subroutine s_read_parallel_data_files(t_step)
335
336 integer, intent(in) :: t_step
337
338#ifdef MFC_MPI
339
340 real(wp), allocatable, dimension(:) :: x_cb_glb, y_cb_glb, z_cb_glb
341
342 integer :: ifile, ierr, data_size, filetype, stride
343 integer, dimension(MPI_STATUS_SIZE) :: status
344
345 integer(KIND=MPI_OFFSET_KIND) :: disp
346 integer(KIND=MPI_OFFSET_KIND) :: m_mok, n_mok, p_mok
347 integer(KIND=MPI_OFFSET_KIND) :: wp_mok, var_mok, str_mok
348 integer(KIND=MPI_OFFSET_KIND) :: nvars_mok
349 integer(KIND=MPI_OFFSET_KIND) :: mok
350 integer(kind=MPI_OFFSET_KIND) :: offset
351 real(wp) :: delx, dely, delz
352
353 character(LEN=path_len + 2*name_len) :: file_loc
354 logical :: file_exist
355
356 character(len=10) :: t_step_string
357
358 integer :: i
359
360 allocate (x_cb_glb(-1:m_glb))
361 allocate (y_cb_glb(-1:n_glb))
362 allocate (z_cb_glb(-1:p_glb))
363
364 if (down_sample) then
365 stride = 3
366 else
367 stride = 1
368 end if
369
370 ! Read in cell boundary locations in x-direction
371 file_loc = trim(case_dir)//'/restart_data'//trim(mpiiofs)//'x_cb.dat'
372 inquire (file=trim(file_loc), exist=file_exist)
373
374 if (file_exist) then
375 data_size = m_glb + 2
376 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
377
378 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
379 call mpi_type_commit(filetype, ierr)
380
381 offset = 0
382 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
383
384 call mpi_file_read(ifile, x_cb_glb, data_size, mpi_p, status, ierr)
385 call mpi_file_close(ifile, ierr)
386 else
387 call s_mpi_abort('File '//trim(file_loc)//' is missing. Exiting.')
388 end if
389
390 ! Assigning local cell boundary locations
391 x_cb(-1:m) = x_cb_glb((start_idx(1) - 1):(start_idx(1) + m))
392 ! Computing the cell width distribution
393 dx(0:m) = x_cb(0:m) - x_cb(-1:m - 1)
394 ! Computing the cell center location
395 x_cc(0:m) = x_cb(-1:m - 1) + dx(0:m)/2._wp
396
397 if (n > 0) then
398 ! Read in cell boundary locations in y-direction
399 file_loc = trim(case_dir)//'/restart_data'//trim(mpiiofs)//'y_cb.dat'
400 inquire (file=trim(file_loc), exist=file_exist)
401
402 if (file_exist) then
403 data_size = n_glb + 2
404 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
405
406 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
407 call mpi_type_commit(filetype, ierr)
408
409 offset = 0
410 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
411
412 call mpi_file_read(ifile, y_cb_glb, data_size, mpi_p, status, ierr)
413 call mpi_file_close(ifile, ierr)
414 else
415 call s_mpi_abort('File '//trim(file_loc)//' is missing. Exiting.')
416 end if
417
418 ! Assigning local cell boundary locations
419 y_cb(-1:n) = y_cb_glb((start_idx(2) - 1):(start_idx(2) + n))
420 ! Computing the cell width distribution
421 dy(0:n) = y_cb(0:n) - y_cb(-1:n - 1)
422 ! Computing the cell center location
423 y_cc(0:n) = y_cb(-1:n - 1) + dy(0:n)/2._wp
424
425 if (p > 0) then
426 ! Read in cell boundary locations in z-direction
427 file_loc = trim(case_dir)//'/restart_data'//trim(mpiiofs)//'z_cb.dat'
428 inquire (file=trim(file_loc), exist=file_exist)
429
430 if (file_exist) then
431 data_size = p_glb + 2
432 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
433
434 call mpi_type_vector(data_size, 1, stride, mpi_p, filetype, ierr)
435 call mpi_type_commit(filetype, ierr)
436
437 offset = 0
438 call mpi_file_set_view(ifile, offset, mpi_p, filetype, 'native', mpi_info_int, ierr)
439
440 call mpi_file_read(ifile, z_cb_glb, data_size, mpi_p, status, ierr)
441 call mpi_file_close(ifile, ierr)
442 else
443 call s_mpi_abort('File '//trim(file_loc)//' is missing. Exiting.')
444 end if
445
446 ! Assigning local cell boundary locations
447 z_cb(-1:p) = z_cb_glb((start_idx(3) - 1):(start_idx(3) + p))
448 ! Computing the cell width distribution
449 dz(0:p) = z_cb(0:p) - z_cb(-1:p - 1)
450 ! Computing the cell center location
451 z_cc(0:p) = z_cb(-1:p - 1) + dz(0:p)/2._wp
452 end if
453 end if
454
455 call s_read_parallel_conservative_data(t_step, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
456
457 deallocate (x_cb_glb, y_cb_glb, z_cb_glb)
458
459 if (bc_io) then
461 else
463 end if
464
465#endif
466
467 end subroutine s_read_parallel_data_files
468
469#ifdef MFC_MPI
470 !> Helper subroutine to read parallel conservative variable data
471 !! @param t_step Current time-step
472 !! @param m_MOK, n_MOK, p_MOK MPI offset kinds for dimensions
473 !! @param WP_MOK, MOK, str_MOK, NVARS_MOK Other MPI offset kinds
474 impure subroutine s_read_parallel_conservative_data(t_step, m_MOK, n_MOK, p_MOK, WP_MOK, MOK, str_MOK, NVARS_MOK)
475
476 integer, intent(in) :: t_step
477 integer(KIND=MPI_OFFSET_KIND), intent(inout) :: m_mok, n_mok, p_mok
478 integer(KIND=MPI_OFFSET_KIND), intent(inout) :: wp_mok, mok, str_mok, nvars_mok
479
480 integer :: ifile, ierr, data_size
481 integer, dimension(MPI_STATUS_SIZE) :: status
482 integer(KIND=MPI_OFFSET_KIND) :: disp, var_mok
483 character(LEN=path_len + 2*name_len) :: file_loc
484 logical :: file_exist
485 character(len=10) :: t_step_string
486 integer :: i
487
488 if (file_per_process) then
489 call s_int_to_str(t_step, t_step_string)
490 ! Open the file to read conservative variables
491 write (file_loc, '(I0,A1,I7.7,A)') t_step, '_', proc_rank, '.dat'
492 file_loc = trim(case_dir)//'/restart_data/lustre_'//trim(t_step_string)//trim(mpiiofs)//trim(file_loc)
493 inquire (file=trim(file_loc), exist=file_exist)
494
495 if (file_exist) then
496 call mpi_file_open(mpi_comm_self, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
497
498 if (down_sample) then
500 else
501 ! Initialize MPI data I/O
502 if (ib) then
504 else
506 end if
507 end if
508
509 if (down_sample) then
510 ! Size of local arrays
511 data_size = (m + 3)*(n + 3)*(p + 3)
512 else
513 ! Size of local arrays
514 data_size = (m + 1)*(n + 1)*(p + 1)
515 end if
516
517 ! Resize some integers so MPI can read even the biggest file
518 m_mok = int(m_glb + 1, mpi_offset_kind)
519 n_mok = int(n_glb + 1, mpi_offset_kind)
520 p_mok = int(p_glb + 1, mpi_offset_kind)
521 wp_mok = int(8._wp, mpi_offset_kind)
522 mok = int(1._wp, mpi_offset_kind)
523 str_mok = int(name_len, mpi_offset_kind)
524 nvars_mok = int(sys_size, mpi_offset_kind)
525
526 ! Read the data for each variable
527 if (bubbles_euler .or. elasticity .or. mhd) then
528 do i = 1, sys_size
529 var_mok = int(i, mpi_offset_kind)
530 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, &
531 mpi_io_p, status, ierr)
532 end do
533 else
534 do i = 1, sys_size
535 var_mok = int(i, mpi_offset_kind)
536 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, &
537 mpi_io_p, status, ierr)
538 end do
539 end if
540
541 call s_mpi_barrier()
542 call mpi_file_close(ifile, ierr)
543
544 if (down_sample) then
545 do i = 1, sys_size
546 q_cons_vf(i)%sf(0:m, 0:n, 0:p) = q_cons_temp(i)%sf(0:m, 0:n, 0:p)
547 end do
548 end if
549
550 call s_read_ib_data_files(trim(case_dir)//'/restart_data'//trim(mpiiofs), t_step)
551 else
552 call s_mpi_abort('File '//trim(file_loc)//' is missing. Exiting.')
553 end if
554 else
555 ! Open the file to read conservative variables
556 write (file_loc, '(I0,A)') t_step, '.dat'
557 file_loc = trim(case_dir)//'/restart_data'//trim(mpiiofs)//trim(file_loc)
558 inquire (file=trim(file_loc), exist=file_exist)
559
560 if (file_exist) then
561 call mpi_file_open(mpi_comm_world, file_loc, mpi_mode_rdonly, mpi_info_int, ifile, ierr)
562
563 call s_setup_mpi_io_params(data_size, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
564
565 ! Read the data for each variable
566 do i = 1, sys_size
567 var_mok = int(i, mpi_offset_kind)
568
569 ! Initial displacement to skip at beginning of file
570 disp = m_mok*max(mok, n_mok)*max(mok, p_mok)*wp_mok*(var_mok - 1)
571
572 call mpi_file_set_view(ifile, disp, mpi_p, mpi_io_data%view(i), &
573 'native', mpi_info_int, ierr)
574 call mpi_file_read_all(ifile, mpi_io_data%var(i)%sf, data_size*mpi_io_type, &
575 mpi_io_p, status, ierr)
576 end do
577
578 call s_mpi_barrier()
579 call mpi_file_close(ifile, ierr)
580
581 call s_read_ib_data_files(trim(case_dir)//'/restart_data'//trim(mpiiofs), t_step)
582 else
583 call s_mpi_abort('File '//trim(file_loc)//' is missing. Exiting.')
584 end if
585 end if
587#endif
588
589 !> Computation of parameters, allocation procedures, and/or
590 !! any other tasks needed to properly setup the module
592
593 integer :: i !< Generic loop iterator
594
595 ! Allocating the parts of the conservative and primitive variables
596 ! that do not require the direct knowledge of the dimensionality of
597 ! the simulation
598 allocate (q_cons_vf(1:sys_size))
599 allocate (q_prim_vf(1:sys_size))
600 allocate (q_cons_temp(1:sys_size))
601
602 ! Allocating the parts of the conservative and primitive variables
603 ! that do require the direct knowledge of the dimensionality of
604 ! the simulation using helper subroutine
605
606 ! Simulation is at least 2D
607 if (n > 0) then
608 ! Simulation is 3D
609 if (p > 0) then
611 if (down_sample) then
612 do i = 1, sys_size
613 allocate (q_cons_temp(i)%sf(-1:m + 1, -1:n + 1, -1:p + 1))
614 end do
615 end if
616 else
617 ! Simulation is 2D
619 end if
620 else
621 ! Simulation is 1D
623 end if
624
625 ! Allocating arrays to store the bc types
626 allocate (bc_type(1:num_dims, 1:2))
627
628 allocate (bc_type(1, 1)%sf(0:0, 0:n, 0:p))
629 allocate (bc_type(1, 2)%sf(0:0, 0:n, 0:p))
630 if (n > 0) then
631 allocate (bc_type(2, 1)%sf(-buff_size:m + buff_size, 0:0, 0:p))
632 allocate (bc_type(2, 2)%sf(-buff_size:m + buff_size, 0:0, 0:p))
633 if (p > 0) then
634 allocate (bc_type(3, 1)%sf(-buff_size:m + buff_size, -buff_size:n + buff_size, 0:0))
635 allocate (bc_type(3, 2)%sf(-buff_size:m + buff_size, -buff_size:n + buff_size, 0:0))
636 end if
637 end if
638
639 if (parallel_io .neqv. .true.) then
641 else
643 end if
644
645 end subroutine s_initialize_data_input_module
646
647 !> Deallocation procedures for the module
649
650 integer :: i !< Generic loop iterator
651
652 ! Deallocating the conservative and primitive variables
653 do i = 1, sys_size
654 deallocate (q_cons_vf(i)%sf)
655 deallocate (q_prim_vf(i)%sf)
656 if (down_sample) then
657 deallocate (q_cons_temp(i)%sf)
658 end if
659 end do
660
661 deallocate (q_cons_vf)
662 deallocate (q_prim_vf)
663 deallocate (q_cons_temp)
664
665 if (ib) then
666 deallocate (ib_markers%sf)
667 end if
668
669 if (chemistry) then
670 deallocate (q_t_sf%sf)
671 end if
672
673 deallocate (bc_type(1, 1)%sf, bc_type(1, 2)%sf)
674 if (n > 0) then
675 deallocate (bc_type(2, 1)%sf, bc_type(2, 2)%sf)
676 if (p > 0) then
677 deallocate (bc_type(3, 1)%sf, bc_type(3, 2)%sf)
678 end if
679 end if
680
681 deallocate (bc_type)
682
683 s_read_data_files => null()
684
685 end subroutine s_finalize_data_input_module
686
687end module m_data_input
Subroutine for reading data files.
Noncharacteristic and processor boundary condition application for ghost cells and buffer regions.
subroutine, public s_read_parallel_boundary_condition_files(bc_type)
Reads boundary condition type and buffer data from per-rank parallel files using MPI I/O.
subroutine, public s_read_serial_boundary_condition_files(step_dirpath, bc_type)
Reads boundary condition type and buffer data from serial (unformatted) restart files.
subroutine, public s_assign_default_bc_type(bc_type)
Initializes the per-cell boundary condition type arrays with the global default BC values.
Platform-specific file and directory operations: create, delete, inquire, getcwd, and basename.
impure subroutine my_inquire(fileloc, dircheck)
Inquires on the existence of a directory.
Reads raw simulation grid and conservative-variable data for a given time-step and fills buffer regio...
impure subroutine s_allocate_field_arrays(local_start_idx, end_x, end_y, end_z)
Helper subroutine to allocate field arrays for given dimensionality.
impure subroutine, public s_read_parallel_data_files(t_step)
This subroutine is called at each time-step that has to be post-processed in order to parallel-read t...
type(scalar_field), public q_t_sf
Temperature field.
type(scalar_field), dimension(:), allocatable, public q_cons_vf
Conservative variables.
impure subroutine s_read_parallel_conservative_data(t_step, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
Helper subroutine to read parallel conservative variable data.
impure subroutine, public s_initialize_data_input_module
Computation of parameters, allocation procedures, and/or any other tasks needed to properly setup the...
type(scalar_field), dimension(:), allocatable, public q_prim_vf
Primitive variables.
impure subroutine s_setup_mpi_io_params(data_size, m_mok, n_mok, p_mok, wp_mok, mok, str_mok, nvars_mok)
Helper subroutine to setup MPI data I/O parameters.
impure subroutine, public s_finalize_data_input_module
Deallocation procedures for the module.
impure subroutine s_read_grid_data_direction(t_step_dir, direction, cb_array, d_array, cc_array, size_dim)
Helper subroutine to read grid data files for a given direction.
impure subroutine, public s_read_serial_data_files(t_step)
This subroutine is called at each time-step that has to be post-processed in order to read the raw da...
type(scalar_field), dimension(:), allocatable, public q_cons_temp
type(integer_field), dimension(:, :), allocatable, public bc_type
Boundary condition identifiers.
procedure(s_read_abstract_data_files), pointer, public s_read_data_files
type(integer_field), public ib_markers
impure subroutine s_read_ib_data_files(file_loc_base, t_step)
Helper subroutine to read IB data files.
Shared derived types for field data, patch geometry, bubble dynamics, and MPI I/O structures.
Global parameters for the post-process: domain geometry, equation of state, and output database setti...
logical, parameter chemistry
Chemistry modeling.
real(wp), dimension(:), allocatable y_cc
integer proc_rank
Rank of the local processor.
type(mpi_io_ib_var), public mpi_io_ib_data
real(wp), dimension(:), allocatable y_cb
character(len=name_len) mpiiofs
integer, dimension(:), allocatable start_idx
Starting cell-center index of local processor in global grid.
integer sys_size
Number of unknowns in the system of equations.
real(wp), dimension(:), allocatable dz
integer buff_size
Number of cells in buffer region. For the variables which feature a buffer region,...
real(wp), dimension(:), allocatable z_cb
integer num_dims
Number of spatial dimensions.
real(wp), dimension(:), allocatable x_cc
real(wp), dimension(:), allocatable x_cb
real(wp), dimension(:), allocatable dy
integer t_step_save
Interval between consecutive time-step directory.
real(wp), dimension(:), allocatable z_cc
character(len=path_len) case_dir
Case folder location.
logical mhd
Magnetohydrodynamics.
logical parallel_io
Format of the data files.
logical down_sample
down sampling of the database file(s)
logical file_per_process
output format
logical elasticity
elasticity modeling, true for hyper or hypo
type(mpi_io_var), public mpi_io_data
real(wp), dimension(:), allocatable dx
Cell-width distributions in the x-, y- and z-coordinate directions.
Utility routines for bubble model setup, coordinate transforms, array sampling, and special functions...
elemental subroutine, public s_int_to_str(i, res)
Converts an integer to its trimmed string representation.
MPI communication layer: domain decomposition, halo exchange, reductions, and parallel I/O setup.
impure subroutine s_mpi_abort(prnt, code)
The subroutine terminates the MPI execution environment.
impure subroutine s_mpi_barrier
Halts all processes until all have reached barrier.
impure subroutine s_initialize_mpi_data(q_cons_vf, ib_markers, beta)
subroutine s_initialize_mpi_data_ds(q_cons_vf)
MPI gather and scatter operations for distributing post-process grid and flow-variable data.
Derived type annexing an integer scalar field (SF).
Derived type annexing a scalar field (SF).