MFC
Exascale flow solver
Loading...
Searching...
No Matches
m_mpi_proxy.fpp.f90
Go to the documentation of this file.
1# 1 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
2!>
3!! @file
4!! @brief Contains module m_mpi_proxy
5
6!> @brief MPI gather and scatter operations for distributing post-process grid and flow-variable data
8
9#ifdef MFC_MPI
10 use mpi !< message passing interface (mpi) module
11#endif
12
15 use m_mpi_common
16 use ieee_arithmetic
17
18 implicit none
19
20 !> @name Receive counts and displacement vector variables, respectively, used in enabling MPI to gather varying amounts of data
21 !! from all processes to the root process
22 !> @{
23 integer, allocatable, dimension(:) :: recvcounts
24 integer, allocatable, dimension(:) :: displs
25 !> @}
26
27contains
28
29 !> Computation of parameters, allocation procedures, and/or any other tasks needed to properly setup the module
31
32#ifdef MFC_MPI
33 integer :: i !< Generic loop iterator
34 integer :: ierr !< Generic flag used to identify and report MPI errors
35 ! Allocating and configuring the receive counts and the displacement vector variables used in variable-gather communication
36 ! procedures. Note that these are only needed for either multidimensional runs that utilize the Silo database file format or
37 ! for 1D simulations.
38
39 if ((format == 1 .and. n > 0) .or. n == 0) then
40 allocate (recvcounts(0:num_procs - 1))
41 allocate (displs(0:num_procs - 1))
42
43 if (n == 0) then
44 call mpi_gather(m + 1, 1, mpi_integer, recvcounts(0), 1, mpi_integer, 0, mpi_comm_world, ierr)
45 else if (proc_rank == 0) then
46 recvcounts = 1
47 end if
48
49 if (proc_rank == 0) then
50 displs(0) = 0
51
52 do i = 1, num_procs - 1
53 displs(i) = displs(i - 1) + recvcounts(i - 1)
54 end do
55 end if
56 end if
57#endif
58
60
61 !> Since only processor with rank 0 is in charge of reading and checking the consistency of the user provided inputs, these are
62 !! not available to the remaining processors. This subroutine is then in charge of broadcasting the required information.
63 impure subroutine s_mpi_bcast_user_inputs
64
65#ifdef MFC_MPI
66 integer :: i !< Generic loop iterator
67 integer :: ierr !< Generic flag used to identify and report MPI errors
68 ! Logistics
69
70 call mpi_bcast(case_dir, len(case_dir), mpi_character, 0, mpi_comm_world, ierr)
71
72# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
73 call mpi_bcast(m, 1, mpi_integer, 0, mpi_comm_world, ierr)
74# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
75 call mpi_bcast(n, 1, mpi_integer, 0, mpi_comm_world, ierr)
76# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
77 call mpi_bcast(p, 1, mpi_integer, 0, mpi_comm_world, ierr)
78# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
79 call mpi_bcast(m_glb, 1, mpi_integer, 0, mpi_comm_world, ierr)
80# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
81 call mpi_bcast(n_glb, 1, mpi_integer, 0, mpi_comm_world, ierr)
82# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
83 call mpi_bcast(p_glb, 1, mpi_integer, 0, mpi_comm_world, ierr)
84# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
85 call mpi_bcast(t_step_start, 1, mpi_integer, 0, mpi_comm_world, ierr)
86# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
87 call mpi_bcast(t_step_stop, 1, mpi_integer, 0, mpi_comm_world, ierr)
88# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
89 call mpi_bcast(t_step_save, 1, mpi_integer, 0, mpi_comm_world, ierr)
90# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
91 call mpi_bcast(weno_order, 1, mpi_integer, 0, mpi_comm_world, ierr)
92# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
93 call mpi_bcast(model_eqns, 1, mpi_integer, 0, mpi_comm_world, ierr)
94# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
95 call mpi_bcast(num_fluids, 1, mpi_integer, 0, mpi_comm_world, ierr)
96# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
97 call mpi_bcast(bc_x%beg, 1, mpi_integer, 0, mpi_comm_world, ierr)
98# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
99 call mpi_bcast(bc_x%end, 1, mpi_integer, 0, mpi_comm_world, ierr)
100# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
101 call mpi_bcast(bc_y%beg, 1, mpi_integer, 0, mpi_comm_world, ierr)
102# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
103 call mpi_bcast(bc_y%end, 1, mpi_integer, 0, mpi_comm_world, ierr)
104# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
105 call mpi_bcast(bc_z%beg, 1, mpi_integer, 0, mpi_comm_world, ierr)
106# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
107 call mpi_bcast(bc_z%end, 1, mpi_integer, 0, mpi_comm_world, ierr)
108# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
109 call mpi_bcast(flux_lim, 1, mpi_integer, 0, mpi_comm_world, ierr)
110# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
111 call mpi_bcast(format, 1, mpi_integer, 0, mpi_comm_world, ierr)
112# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
113 call mpi_bcast(precision, 1, mpi_integer, 0, mpi_comm_world, ierr)
114# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
115 call mpi_bcast(fd_order, 1, mpi_integer, 0, mpi_comm_world, ierr)
116# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
117 call mpi_bcast(thermal, 1, mpi_integer, 0, mpi_comm_world, ierr)
118# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
119 call mpi_bcast(nb, 1, mpi_integer, 0, mpi_comm_world, ierr)
120# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
121 call mpi_bcast(relax_model, 1, mpi_integer, 0, mpi_comm_world, ierr)
122# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
123 call mpi_bcast(n_start, 1, mpi_integer, 0, mpi_comm_world, ierr)
124# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
125 call mpi_bcast(num_ibs, 1, mpi_integer, 0, mpi_comm_world, ierr)
126# 77 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
127 call mpi_bcast(muscl_order, 1, mpi_integer, 0, mpi_comm_world, ierr)
128# 79 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
129
130# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
131 call mpi_bcast(cyl_coord, 1, mpi_logical, 0, mpi_comm_world, ierr)
132# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
133 call mpi_bcast(mpp_lim, 1, mpi_logical, 0, mpi_comm_world, ierr)
134# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
135 call mpi_bcast(mixture_err, 1, mpi_logical, 0, mpi_comm_world, ierr)
136# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
137 call mpi_bcast(alt_soundspeed, 1, mpi_logical, 0, mpi_comm_world, ierr)
138# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
139 call mpi_bcast(hypoelasticity, 1, mpi_logical, 0, mpi_comm_world, ierr)
140# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
141 call mpi_bcast(mhd, 1, mpi_logical, 0, mpi_comm_world, ierr)
142# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
143 call mpi_bcast(parallel_io, 1, mpi_logical, 0, mpi_comm_world, ierr)
144# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
145 call mpi_bcast(rho_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
146# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
147 call mpi_bcast(e_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
148# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
149 call mpi_bcast(pres_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
150# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
151 call mpi_bcast(gamma_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
152# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
153 call mpi_bcast(sim_data, 1, mpi_logical, 0, mpi_comm_world, ierr)
154# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
155 call mpi_bcast(heat_ratio_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
156# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
157 call mpi_bcast(pi_inf_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
158# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
159 call mpi_bcast(pres_inf_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
160# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
161 call mpi_bcast(cons_vars_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
162# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
163 call mpi_bcast(prim_vars_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
164# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
165 call mpi_bcast(c_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
166# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
167 call mpi_bcast(qm_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
168# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
169 call mpi_bcast(schlieren_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
170# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
171 call mpi_bcast(chem_wrt_t, 1, mpi_logical, 0, mpi_comm_world, ierr)
172# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
173 call mpi_bcast(bubbles_euler, 1, mpi_logical, 0, mpi_comm_world, ierr)
174# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
175 call mpi_bcast(qbmm, 1, mpi_logical, 0, mpi_comm_world, ierr)
176# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
177 call mpi_bcast(polytropic, 1, mpi_logical, 0, mpi_comm_world, ierr)
178# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
179 call mpi_bcast(polydisperse, 1, mpi_logical, 0, mpi_comm_world, ierr)
180# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
181 call mpi_bcast(file_per_process, 1, mpi_logical, 0, mpi_comm_world, ierr)
182# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
183 call mpi_bcast(relax, 1, mpi_logical, 0, mpi_comm_world, ierr)
184# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
185 call mpi_bcast(cf_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
186# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
187 call mpi_bcast(igr, 1, mpi_logical, 0, mpi_comm_world, ierr)
188# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
189 call mpi_bcast(liutex_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
190# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
191 call mpi_bcast(bc_x%isothermal_in, 1, mpi_logical, 0, mpi_comm_world, ierr)
192# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
193 call mpi_bcast(bc_y%isothermal_in, 1, mpi_logical, 0, mpi_comm_world, ierr)
194# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
195 call mpi_bcast(bc_z%isothermal_in, 1, mpi_logical, 0, mpi_comm_world, ierr)
196# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
197 call mpi_bcast(bc_x%isothermal_out, 1, mpi_logical, 0, mpi_comm_world, ierr)
198# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
199 call mpi_bcast(bc_y%isothermal_out, 1, mpi_logical, 0, mpi_comm_world, ierr)
200# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
201 call mpi_bcast(bc_z%isothermal_out, 1, mpi_logical, 0, mpi_comm_world, ierr)
202# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
203 call mpi_bcast(adv_n, 1, mpi_logical, 0, mpi_comm_world, ierr)
204# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
205 call mpi_bcast(ib, 1, mpi_logical, 0, mpi_comm_world, ierr)
206# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
207 call mpi_bcast(cfl_adap_dt, 1, mpi_logical, 0, mpi_comm_world, ierr)
208# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
209 call mpi_bcast(cfl_const_dt, 1, mpi_logical, 0, mpi_comm_world, ierr)
210# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
211 call mpi_bcast(cfl_dt, 1, mpi_logical, 0, mpi_comm_world, ierr)
212# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
213 call mpi_bcast(surface_tension, 1, mpi_logical, 0, mpi_comm_world, ierr)
214# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
215 call mpi_bcast(hyperelasticity, 1, mpi_logical, 0, mpi_comm_world, ierr)
216# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
217 call mpi_bcast(bubbles_lagrange, 1, mpi_logical, 0, mpi_comm_world, ierr)
218# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
219 call mpi_bcast(output_partial_domain, 1, mpi_logical, 0, mpi_comm_world, ierr)
220# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
221 call mpi_bcast(relativity, 1, mpi_logical, 0, mpi_comm_world, ierr)
222# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
223 call mpi_bcast(cont_damage, 1, mpi_logical, 0, mpi_comm_world, ierr)
224# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
225 call mpi_bcast(bc_io, 1, mpi_logical, 0, mpi_comm_world, ierr)
226# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
227 call mpi_bcast(down_sample, 1, mpi_logical, 0, mpi_comm_world, ierr)
228# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
229 call mpi_bcast(fft_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
230# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
231 call mpi_bcast(hyper_cleaning, 1, mpi_logical, 0, mpi_comm_world, ierr)
232# 93 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
233 call mpi_bcast(ib_state_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
234# 95 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
235
236 if (bubbles_lagrange) then
237# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
238 call mpi_bcast(lag_header, 1, mpi_logical, 0, mpi_comm_world, ierr)
239# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
240 call mpi_bcast(lag_txt_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
241# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
242 call mpi_bcast(lag_db_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
243# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
244 call mpi_bcast(lag_id_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
245# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
246 call mpi_bcast(lag_pos_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
247# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
248 call mpi_bcast(lag_pos_prev_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
249# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
250 call mpi_bcast(lag_vel_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
251# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
252 call mpi_bcast(lag_rad_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
253# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
254 call mpi_bcast(lag_rvel_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
255# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
256 call mpi_bcast(lag_r0_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
257# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
258 call mpi_bcast(lag_rmax_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
259# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
260 call mpi_bcast(lag_rmin_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
261# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
262 call mpi_bcast(lag_dphidt_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
263# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
264 call mpi_bcast(lag_pres_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
265# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
266 call mpi_bcast(lag_mv_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
267# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
268 call mpi_bcast(lag_mg_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
269# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
270 call mpi_bcast(lag_betat_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
271# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
272 call mpi_bcast(lag_betac_wrt, 1, mpi_logical, 0, mpi_comm_world, ierr)
273# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
274 call mpi_bcast(bc_io, 1, mpi_logical, 0, mpi_comm_world, ierr)
275# 102 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
276 call mpi_bcast(down_sample, 1, mpi_logical, 0, mpi_comm_world, ierr)
277# 104 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
278 end if
279
280 call mpi_bcast(flux_wrt(1), 3, mpi_logical, 0, mpi_comm_world, ierr)
281 call mpi_bcast(omega_wrt(1), 3, mpi_logical, 0, mpi_comm_world, ierr)
282 call mpi_bcast(mom_wrt(1), 3, mpi_logical, 0, mpi_comm_world, ierr)
283 call mpi_bcast(vel_wrt(1), 3, mpi_logical, 0, mpi_comm_world, ierr)
284 call mpi_bcast(alpha_rho_wrt(1), num_fluids_max, mpi_logical, 0, mpi_comm_world, ierr)
285 call mpi_bcast(alpha_rho_e_wrt(1), num_fluids_max, mpi_logical, 0, mpi_comm_world, ierr)
286 call mpi_bcast(alpha_wrt(1), num_fluids_max, mpi_logical, 0, mpi_comm_world, ierr)
287
288 do i = 1, num_fluids_max
289 call mpi_bcast(fluid_pp(i)%gamma, 1, mpi_p, 0, mpi_comm_world, ierr)
290 call mpi_bcast(fluid_pp(i)%pi_inf, 1, mpi_p, 0, mpi_comm_world, ierr)
291 call mpi_bcast(fluid_pp(i)%cv, 1, mpi_p, 0, mpi_comm_world, ierr)
292 call mpi_bcast(fluid_pp(i)%qv, 1, mpi_p, 0, mpi_comm_world, ierr)
293 call mpi_bcast(fluid_pp(i)%qvp, 1, mpi_p, 0, mpi_comm_world, ierr)
294 call mpi_bcast(fluid_pp(i)%G, 1, mpi_p, 0, mpi_comm_world, ierr)
295 end do
296
297 ! Subgrid bubble parameters
298 if (bubbles_euler .or. bubbles_lagrange) then
299# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
300 call mpi_bcast(bub_pp%R0ref, 1, mpi_p, 0, mpi_comm_world, ierr)
301# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
302 call mpi_bcast(bub_pp%p0ref, 1, mpi_p, 0, mpi_comm_world, ierr)
303# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
304 call mpi_bcast(bub_pp%rho0ref, 1, mpi_p, 0, mpi_comm_world, ierr)
305# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
306 call mpi_bcast(bub_pp%T0ref, 1, mpi_p, 0, mpi_comm_world, ierr)
307# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
308 call mpi_bcast(bub_pp%ss, 1, mpi_p, 0, mpi_comm_world, ierr)
309# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
310 call mpi_bcast(bub_pp%pv, 1, mpi_p, 0, mpi_comm_world, ierr)
311# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
312 call mpi_bcast(bub_pp%vd, 1, mpi_p, 0, mpi_comm_world, ierr)
313# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
314 call mpi_bcast(bub_pp%mu_l, 1, mpi_p, 0, mpi_comm_world, ierr)
315# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
316 call mpi_bcast(bub_pp%mu_v, 1, mpi_p, 0, mpi_comm_world, ierr)
317# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
318 call mpi_bcast(bub_pp%mu_g, 1, mpi_p, 0, mpi_comm_world, ierr)
319# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
320 call mpi_bcast(bub_pp%gam_v, 1, mpi_p, 0, mpi_comm_world, ierr)
321# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
322 call mpi_bcast(bub_pp%gam_g, 1, mpi_p, 0, mpi_comm_world, ierr)
323# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
324 call mpi_bcast(bub_pp%M_v, 1, mpi_p, 0, mpi_comm_world, ierr)
325# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
326 call mpi_bcast(bub_pp%M_g, 1, mpi_p, 0, mpi_comm_world, ierr)
327# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
328 call mpi_bcast(bub_pp%k_v, 1, mpi_p, 0, mpi_comm_world, ierr)
329# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
330 call mpi_bcast(bub_pp%k_g, 1, mpi_p, 0, mpi_comm_world, ierr)
331# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
332 call mpi_bcast(bub_pp%cp_v, 1, mpi_p, 0, mpi_comm_world, ierr)
333# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
334 call mpi_bcast(bub_pp%cp_g, 1, mpi_p, 0, mpi_comm_world, ierr)
335# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
336 call mpi_bcast(bub_pp%R_v, 1, mpi_p, 0, mpi_comm_world, ierr)
337# 128 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
338 call mpi_bcast(bub_pp%R_g, 1, mpi_p, 0, mpi_comm_world, ierr)
339# 130 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
340 end if
341
342# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
343 call mpi_bcast(pref, 1, mpi_p, 0, mpi_comm_world, ierr)
344# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
345 call mpi_bcast(rhoref, 1, mpi_p, 0, mpi_comm_world, ierr)
346# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
347 call mpi_bcast(r0ref, 1, mpi_p, 0, mpi_comm_world, ierr)
348# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
349 call mpi_bcast(poly_sigma, 1, mpi_p, 0, mpi_comm_world, ierr)
350# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
351 call mpi_bcast(web, 1, mpi_p, 0, mpi_comm_world, ierr)
352# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
353 call mpi_bcast(ca, 1, mpi_p, 0, mpi_comm_world, ierr)
354# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
355 call mpi_bcast(re_inv, 1, mpi_p, 0, mpi_comm_world, ierr)
356# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
357 call mpi_bcast(bx0, 1, mpi_p, 0, mpi_comm_world, ierr)
358# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
359 call mpi_bcast(sigma, 1, mpi_p, 0, mpi_comm_world, ierr)
360# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
361 call mpi_bcast(t_save, 1, mpi_p, 0, mpi_comm_world, ierr)
362# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
363 call mpi_bcast(t_stop, 1, mpi_p, 0, mpi_comm_world, ierr)
364# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
365 call mpi_bcast(x_output%beg, 1, mpi_p, 0, mpi_comm_world, ierr)
366# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
367 call mpi_bcast(x_output%end, 1, mpi_p, 0, mpi_comm_world, ierr)
368# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
369 call mpi_bcast(y_output%beg, 1, mpi_p, 0, mpi_comm_world, ierr)
370# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
371 call mpi_bcast(y_output%end, 1, mpi_p, 0, mpi_comm_world, ierr)
372# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
373 call mpi_bcast(z_output%beg, 1, mpi_p, 0, mpi_comm_world, ierr)
374# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
375 call mpi_bcast(z_output%end, 1, mpi_p, 0, mpi_comm_world, ierr)
376# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
377 call mpi_bcast(bc_x%Twall_in, 1, mpi_p, 0, mpi_comm_world, ierr)
378# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
379 call mpi_bcast(bc_x%Twall_out, 1, mpi_p, 0, mpi_comm_world, ierr)
380# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
381 call mpi_bcast(bc_y%Twall_in, 1, mpi_p, 0, mpi_comm_world, ierr)
382# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
383 call mpi_bcast(bc_y%Twall_out, 1, mpi_p, 0, mpi_comm_world, ierr)
384# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
385 call mpi_bcast(bc_z%Twall_in, 1, mpi_p, 0, mpi_comm_world, ierr)
386# 138 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
387 call mpi_bcast(bc_z%Twall_out, 1, mpi_p, 0, mpi_comm_world, ierr)
388# 140 "/home/runner/work/MFC/MFC/src/post_process/m_mpi_proxy.fpp"
389 call mpi_bcast(schlieren_alpha(1), num_fluids_max, mpi_p, 0, mpi_comm_world, ierr)
390#endif
391
392 end subroutine s_mpi_bcast_user_inputs
393
394 !> Gather spatial extents from all ranks for Silo database metadata
395 impure subroutine s_mpi_gather_spatial_extents(spatial_extents)
396
397 real(wp), dimension(1:,0:), intent(inout) :: spatial_extents
398
399#ifdef MFC_MPI
400 integer :: ierr !< Generic flag used to identify and report MPI errors
401 real(wp) :: ext_temp(0:num_procs - 1)
402
403 ! Simulation is 3D
404
405 if (p > 0) then
406 if (grid_geometry == 3) then
407 ! Minimum spatial extent in the r-direction
408 call mpi_gatherv(minval(y_cb), 1, mpi_p, spatial_extents(1, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
409 & ierr)
410
411 ! Minimum spatial extent in the theta-direction
412 call mpi_gatherv(minval(z_cb), 1, mpi_p, spatial_extents(2, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
413 & ierr)
414
415 ! Minimum spatial extent in the z-direction
416 call mpi_gatherv(minval(x_cb), 1, mpi_p, spatial_extents(3, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
417 & ierr)
418
419 ! Maximum spatial extent in the r-direction
420 call mpi_gatherv(maxval(y_cb), 1, mpi_p, spatial_extents(4, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
421 & ierr)
422
423 ! Maximum spatial extent in the theta-direction
424 call mpi_gatherv(maxval(z_cb), 1, mpi_p, spatial_extents(5, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
425 & ierr)
426
427 ! Maximum spatial extent in the z-direction
428 call mpi_gatherv(maxval(x_cb), 1, mpi_p, spatial_extents(6, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
429 & ierr)
430 else
431 ! Minimum spatial extent in the x-direction
432 call mpi_gatherv(minval(x_cb), 1, mpi_p, spatial_extents(1, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
433 & ierr)
434
435 ! Minimum spatial extent in the y-direction
436 call mpi_gatherv(minval(y_cb), 1, mpi_p, spatial_extents(2, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
437 & ierr)
438
439 ! Minimum spatial extent in the z-direction
440 call mpi_gatherv(minval(z_cb), 1, mpi_p, spatial_extents(3, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
441 & ierr)
442
443 ! Maximum spatial extent in the x-direction
444 call mpi_gatherv(maxval(x_cb), 1, mpi_p, spatial_extents(4, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
445 & ierr)
446
447 ! Maximum spatial extent in the y-direction
448 call mpi_gatherv(maxval(y_cb), 1, mpi_p, spatial_extents(5, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
449 & ierr)
450
451 ! Maximum spatial extent in the z-direction
452 call mpi_gatherv(maxval(z_cb), 1, mpi_p, spatial_extents(6, 0), recvcounts, 6*displs, mpi_p, 0, mpi_comm_world, &
453 & ierr)
454 end if
455 ! Simulation is 2D
456 else if (n > 0) then
457 ! Minimum spatial extent in the x-direction
458 call mpi_gatherv(minval(x_cb), 1, mpi_p, spatial_extents(1, 0), recvcounts, 4*displs, mpi_p, 0, mpi_comm_world, ierr)
459
460 ! Minimum spatial extent in the y-direction
461 call mpi_gatherv(minval(y_cb), 1, mpi_p, spatial_extents(2, 0), recvcounts, 4*displs, mpi_p, 0, mpi_comm_world, ierr)
462
463 ! Maximum spatial extent in the x-direction
464 call mpi_gatherv(maxval(x_cb), 1, mpi_p, spatial_extents(3, 0), recvcounts, 4*displs, mpi_p, 0, mpi_comm_world, ierr)
465
466 ! Maximum spatial extent in the y-direction
467 call mpi_gatherv(maxval(y_cb), 1, mpi_p, spatial_extents(4, 0), recvcounts, 4*displs, mpi_p, 0, mpi_comm_world, ierr)
468 ! Simulation is 1D
469 else
470 ! For 1D, recvcounts/displs are sized for grid defragmentation (m+1 per rank), not for scalar gathers. Use MPI_GATHER
471 ! instead.
472
473 ! Minimum spatial extent in the x-direction
474 call mpi_gather(minval(x_cb), 1, mpi_p, ext_temp, 1, mpi_p, 0, mpi_comm_world, ierr)
475 if (proc_rank == 0) spatial_extents(1,:) = ext_temp
476
477 ! Maximum spatial extent in the x-direction
478 call mpi_gather(maxval(x_cb), 1, mpi_p, ext_temp, 1, mpi_p, 0, mpi_comm_world, ierr)
479 if (proc_rank == 0) spatial_extents(2,:) = ext_temp
480 end if
481#endif
482
483 end subroutine s_mpi_gather_spatial_extents
484
485 !> Collect the sub-domain cell-boundary or cell-center location data from all processors and put back together the grid of the
486 !! entire computational domain on the rank 0 processor. This is only done for 1D simulations.
488
489#ifdef MFC_MPI
490 integer :: ierr !< Generic flag used to identify and report MPI errors
491 ! Silo-HDF5 database format
492
493 if (format == 1) then
494 call mpi_gatherv(x_cc(0), m + 1, mpi_p, x_root_cc(0), recvcounts, displs, mpi_p, 0, mpi_comm_world, ierr)
495
496 ! Binary database format
497 else
498 call mpi_gatherv(x_cb(0), m + 1, mpi_p, x_root_cb(0), recvcounts, displs, mpi_p, 0, mpi_comm_world, ierr)
499
500 if (proc_rank == 0) x_root_cb(-1) = x_cb(-1)
501 end if
502#endif
503
505
506 !> Gather the Silo database metadata for the flow variable's extents to boost performance of the multidimensional visualization.
507 !! @param q_sf Flow variable on a single computational sub-domain
508 impure subroutine s_mpi_gather_data_extents(q_sf, data_extents)
509
510 real(wp), dimension(:,:,:), intent(in) :: q_sf
511 real(wp), dimension(1:2,0:num_procs - 1), intent(inout) :: data_extents
512
513#ifdef MFC_MPI
514 integer :: ierr !< Generic flag used to identify and report MPI errors
515 real(wp) :: ext_temp(0:num_procs - 1)
516
517 if (n > 0) then
518 ! Multi-D: recvcounts = 1, so strided MPI_GATHERV works correctly Minimum flow variable extent
519 call mpi_gatherv(minval(q_sf), 1, mpi_p, data_extents(1, 0), recvcounts, 2*displs, mpi_p, 0, mpi_comm_world, ierr)
520
521 ! Maximum flow variable extent
522 call mpi_gatherv(maxval(q_sf), 1, mpi_p, data_extents(2, 0), recvcounts, 2*displs, mpi_p, 0, mpi_comm_world, ierr)
523 else
524 ! 1D: recvcounts/displs are sized for grid defragmentation (m+1 per rank), not for scalar gathers. Use MPI_GATHER
525 ! instead.
526
527 ! Minimum flow variable extent
528 call mpi_gather(minval(q_sf), 1, mpi_p, ext_temp, 1, mpi_p, 0, mpi_comm_world, ierr)
529 if (proc_rank == 0) data_extents(1,:) = ext_temp
530
531 ! Maximum flow variable extent
532 call mpi_gather(maxval(q_sf), 1, mpi_p, ext_temp, 1, mpi_p, 0, mpi_comm_world, ierr)
533 if (proc_rank == 0) data_extents(2,:) = ext_temp
534 end if
535#endif
536
537 end subroutine s_mpi_gather_data_extents
538
539 !> Gather the sub-domain flow variable data from all processors and reassemble it for the entire computational domain on the
540 !! rank 0 processor. This is only done for 1D simulations.
541 !! @param q_sf Flow variable on a single computational sub-domain
542 !! @param q_root_sf Flow variable on the entire computational domain
543 impure subroutine s_mpi_defragment_1d_flow_variable(q_sf, q_root_sf)
544
545 real(wp), dimension(0:m), intent(in) :: q_sf
546 real(wp), dimension(0:m), intent(inout) :: q_root_sf
547
548#ifdef MFC_MPI
549 integer :: ierr !< Generic flag used to identify and report MPI errors
550 ! Gathering the sub-domain flow variable data from all the processes and putting it back together for the entire
551 ! computational domain on the process with rank 0
552
553 call mpi_gatherv(q_sf(0), m + 1, mpi_p, q_root_sf(0), recvcounts, displs, mpi_p, 0, mpi_comm_world, ierr)
554#endif
555
557
558 !> Deallocation procedures for the module
560
561#ifdef MFC_MPI
562 ! Deallocating the receive counts and the displacement vector variables used in variable-gather communication procedures
563 if ((format == 1 .and. n > 0) .or. n == 0) then
564 deallocate (recvcounts)
565 deallocate (displs)
566 end if
567#endif
568
569 end subroutine s_finalize_mpi_proxy_module
570
571end module m_mpi_proxy
Shared derived types for field data, patch geometry, bubble dynamics, and MPI I/O structures.
Global parameters for the post-process: domain geometry, equation of state, and output database setti...
logical cont_damage
Continuum damage modeling.
logical hypoelasticity
Turn hypoelasticity on.
integer thermal
1 = adiabatic, 2 = isotherm, 3 = transfer
real(wp), dimension(num_fluids_max) schlieren_alpha
Per-fluid Schlieren intensity amplitude coefficients.
integer num_fluids
Number of different fluids present in the flow.
logical, dimension(3) flux_wrt
integer proc_rank
Rank of the local processor.
logical mixture_err
Mixture error limiter.
logical output_partial_domain
Specify portion of domain to output for post-processing.
real(wp), dimension(:), allocatable x_root_cc
real(wp), dimension(:), allocatable y_cb
integer muscl_order
Order of accuracy for the MUSCL reconstruction.
logical alt_soundspeed
Alternate sound speed.
integer relax_model
Phase change relaxation model.
logical, dimension(3) mom_wrt
real(wp), dimension(:), allocatable x_root_cb
logical, dimension(num_fluids_max) alpha_wrt
logical, dimension(num_fluids_max) alpha_rho_wrt
logical, dimension(num_fluids_max) alpha_rho_e_wrt
integer model_eqns
Multicomponent flow model.
integer precision
Floating point precision of the database file(s).
logical hyperelasticity
Turn hyperelasticity on.
real(wp), dimension(:), allocatable z_cb
type(physical_parameters), dimension(num_fluids_max) fluid_pp
Stiffened gas EOS parameters and Reynolds numbers per fluid.
type(bounds_info) z_output
Portion of domain to output for post-processing.
real(wp), dimension(:), allocatable x_cc
integer fd_order
Finite-difference order for vorticity and Schlieren derivatives.
real(wp), dimension(:), allocatable x_cb
integer t_step_save
Interval between consecutive time-step directory.
logical hyper_cleaning
Hyperbolic cleaning for MHD.
real(wp) bx0
Constant magnetic field in the x-direction (1D).
logical, dimension(3) omega_wrt
integer num_procs
Number of processors.
character(len=path_len) case_dir
Case folder location.
integer weno_order
Order of accuracy for the WENO reconstruction.
logical mhd
Magnetohydrodynamics.
logical parallel_io
Format of the data files.
logical down_sample
down sampling of the database file(s)
logical file_per_process
output format
integer t_step_start
First time-step directory.
logical mpp_lim
Maximum volume fraction limiter.
logical, dimension(3) vel_wrt
type(subgrid_bubble_physical_parameters) bub_pp
logical relativity
Relativity for RMHD.
integer num_ibs
Number of immersed boundaries.
integer t_step_stop
Last time-step directory.
MPI communication layer: domain decomposition, halo exchange, reductions, and parallel I/O setup.
MPI gather and scatter operations for distributing post-process grid and flow-variable data.
impure subroutine s_initialize_mpi_proxy_module
Computation of parameters, allocation procedures, and/or any other tasks needed to properly setup the...
integer, dimension(:), allocatable recvcounts
impure subroutine s_mpi_defragment_1d_grid_variable
Collect the sub-domain cell-boundary or cell-center location data from all processors and put back to...
impure subroutine s_mpi_defragment_1d_flow_variable(q_sf, q_root_sf)
Gather the sub-domain flow variable data from all processors and reassemble it for the entire computa...
impure subroutine s_mpi_gather_spatial_extents(spatial_extents)
Gather spatial extents from all ranks for Silo database metadata.
impure subroutine s_mpi_bcast_user_inputs
Since only processor with rank 0 is in charge of reading and checking the consistency of the user pro...
impure subroutine s_mpi_gather_data_extents(q_sf, data_extents)
Gather the Silo database metadata for the flow variable's extents to boost performance of the multidi...
impure subroutine s_finalize_mpi_proxy_module
Deallocation procedures for the module.
integer, dimension(:), allocatable displs