MPI-AMRVAC  3.1
The MPI - Adaptive Mesh Refinement - Versatile Advection Code (development version)
amrvac.t
Go to the documentation of this file.
1 !> AMRVAC solves a set of hyperbolic equations
2 !> \f$\vec{u}_t + \nabla_x \cdot \vec{f}(\vec{u}) = \vec{s}\f$
3 !> using adaptive mesh refinement.
4 program amrvac
5 
10  use mod_usr
11  use mod_initialize
14  use mod_selectgrids, only: selectgrids
15  use mod_particles
17  use mod_advance, only: process
19  use mod_convert, only: init_convert
20  use mod_physics
25 
26 
27  double precision :: time0, time_in
28  logical,save :: part_file_exists=.false.
29 
30 
31  call comm_start()
32 
33  time0 = mpi_wtime()
34  time_advance = .false.
35  time_bc = zero
36 
37  ! read command line arguments first
38  call read_arguments()
39 
40  ! init_convert is called before usr_init as user might associate a convert method
41  call init_convert()
42  ! the user_init routine should load a physics module
43  call usr_init()
44 
45  call initialize_amrvac()
46 
47  if (restart_from_file /= undefined) then
48  ! restart from previous file or dat file conversion
49  ! get input data from previous AMRVAC run
50 
51  ! read in dat file
52  call read_snapshot()
53 
54  ! rewrite it=0 snapshot when restart from it=0 state
55  if(it==0.and.itsave(1,2)==0) snapshotnext=snapshotnext-1
56 
57  if (reset_time) then
58  ! reset it and global time to original value
59  it = it_init
61  ! reset snapshot number
62  snapshotnext=0
63  end if
64 
65  if (reset_it) then
66  ! reset it to original value
67  it = it_init
68  end if
69 
70  ! allow use to read extra data before filling boundary condition
71  if (associated(usr_process_grid) .or. &
72  associated(usr_process_global)) then
73  call process(it,global_time)
74  end if
75 
76  ! modify initial condition
77  if (firstprocess) then
78  ! update ghost cells for all need-boundary variables before modification
79  call getbc(global_time,0.d0,ps,1,nwflux+nwaux)
80  call modify_ic
81  end if
82 
83  ! select active grids
84  call selectgrids
85 
86  ! update ghost cells for all need-boundary variables
87  call getbc(global_time,0.d0,ps,1,nwflux+nwaux)
88 
89  ! reset AMR grid
90  if (reset_grid) then
91  call settree
92  else
93  ! set up boundary flux conservation arrays
94  if (levmax>levmin) call allocatebflux
95  end if
96 
97  ! all blocks refined to the same level for output
98  if(convert .and. level_io>0 .or. level_io_min.ne.1 .or. level_io_max.ne.nlevelshi) &
100 
101  {^nooned
102  ! improve initial condition after restart and modification
104  }
105 
107 
108  if(use_particles) then
109  call read_particles_snapshot(part_file_exists)
110  call init_gridvars()
111  if (.not. part_file_exists) call particles_create()
112  if(convert) then
113  call handle_particles()
114  call finish_gridvars()
115  call time_spent_on_particles()
116  call comm_finalize
117  stop
118  end if
119  end if
120 
121  if(convert) then
122  if (npe/=1.and.(.not.(index(convert_type,'mpi')>=1)) &
123  .and. convert_type .ne. 'user') &
124  call mpistop("non-mpi conversion only uses 1 cpu")
125  if(mype==0.and.level_io>0) write(unitterm,*)'reset tree to fixed level=',level_io
126 
127  !here requires -1 snapshot
129 
130  if(associated(phys_special_advance)) then
131  ! e.g. calculate MF velocity from magnetic field
133  end if
134 
135  call generate_plotfile
136  call comm_finalize
137  stop
138  end if
139 
140  else
141 
142  ! form and initialize all grids at level one
143  call initlevelone
144 
145  ! set up and initialize finer level grids, if needed
146  call settree
147 
149 
150  ! improve initial condition
152 
153  ! select active grids
154  call selectgrids
155 
156  if (use_particles) then
157  call init_gridvars()
158  call particles_create()
159  end if
160 
161  end if
162 
163  ! initialize something base on tree information
165 
166  if (mype==0) then
167  print*,'-------------------------------------------------------------------------------'
168  write(*,'(a,f17.3,a)')' Startup phase took : ',mpi_wtime()-time0,' sec'
169  print*,'-------------------------------------------------------------------------------'
170  end if
171 
172  ! an interface to allow user to do special things before the main loop
173  if (associated(usr_before_main_loop)) &
174  call usr_before_main_loop()
175 
176  ! do time integration of all grids on all levels
177  call timeintegration()
178 
179  if (mype==0) then
180  print*,'-------------------------------------------------------------------------------'
181  write(*,'(a,f17.3,a)')' Finished AMRVAC in : ',mpi_wtime()-time0,' sec'
182  print*,'-------------------------------------------------------------------------------'
183  end if
184 
185  call comm_finalize
186 
187 contains
188 
189  subroutine timeintegration()
190  use mod_timing
192  use mod_forest, only: nleafs_active
194  use mod_input_output, only: saveamrfile
197  use mod_dt, only: setdt
198 
199 
200  integer :: level, ifile, fixcount, ncells_block, igrid, iigrid
201  integer(kind=8) ncells_update
202  logical :: crashall
203  double precision :: time_last_print, time_write0, time_write, time_before_advance, dt_loop
204 
205  time_in=mpi_wtime()
206  time_last_print = -bigdouble
207  fixcount=1
208 
210 
211  do ifile=nfile,1,-1
212  if(resume_previous_run) then
213  tsavelast(ifile)=aint((global_time+smalldouble)/dtsave(ifile))*dtsave(ifile)
214  itsavelast(ifile)=it/ditsave(ifile)*ditsave(ifile)
215  else
216  tsavelast(ifile)=global_time
217  itsavelast(ifile)=it
218  end if
219  end do
220 
221  ! the next two are used to keep track of the performance during runtime:
222  ittimelast=it
223  timelast=mpi_wtime()
224 
225  ! ------ start of integration loop. ------------------
226  if (mype==0) then
227  write(*, '(A,ES9.2,A)') ' Start integrating, print status every ', &
228  time_between_print, ' seconds'
229  write(*, '(A4,A10,A12,A12,A12)') ' #', 'it', 'time', 'dt', 'wc-time(s)'
230  end if
231 
232  timeloop0=mpi_wtime()
233  time_bc=0.d0
234  time_write=0.d0
235  ncells_block={(ixghi^d-2*nghostcells)*}
236  ncells_update=0
237  dt_loop=0.d0
238 
239  time_advance=.true.
240 
241  time_evol : do
242 
243  time_before_advance=mpi_wtime()
244  ! set time step
245  call setdt()
246 
247  ! Optionally call a user method that can modify the grid variables at the
248  ! beginning of a time step
249  if (associated(usr_process_grid) .or. &
250  associated(usr_process_global)) then
251  call process(it,global_time)
252  end if
253 
254  ! Check if output needs to be written
255  do ifile=nfile,1,-1
256  save_file(ifile) = timetosave(ifile)
257  end do
258 
259  timeio0=mpi_wtime()
260 
261  if (timeio0 - time_last_print > time_between_print) then
262  time_last_print = timeio0
263  if (mype == 0) then
264  write(*, '(A4,I10,ES12.4,ES12.4,ES12.4)') " #", &
266  end if
267  end if
268 
269  ! output data
270  if (any(save_file)) then
271  if(associated(usr_modify_output)) then
272  ! Users can modify or set variables before output is written
273  do iigrid=1,igridstail; igrid=igrids(iigrid);
274  ^d&dxlevel(^d)=rnode(rpdx^d_,igrid);
275  block=>ps(igrid)
276  call usr_modify_output(ixg^ll,ixm^ll,global_time,ps(igrid)%w,ps(igrid)%x)
277  end do
278  end if
279  time_write=0.d0
280  do ifile=nfile,1,-1
281  if (save_file(ifile)) then
282  time_write0=mpi_wtime()
283  call saveamrfile(ifile)
284  time_write=time_write+mpi_wtime()-time_write0
285  end if
286  end do
287  end if
288 
289  ! output a snapshot when user write a file named 'savenow' in the same
290  ! folder as the executable amrvac
291  if (mype==0) inquire(file='savenow',exist=save_now)
292  if (npe>1) call mpi_bcast(save_now,1,mpi_logical,0,icomm,ierrmpi)
293 
294  if (save_now) then
295  if(mype==0) write(*,'(a,i7,a,i7,a,es12.4)') ' save a snapshot No.',&
296  snapshotnext,' at it=',it,' global_time=',global_time
297  call saveamrfile(1)
298  call saveamrfile(2)
299  call mpi_file_delete('savenow',mpi_info_null,ierrmpi)
300  end if
301  timeio_tot=timeio_tot+mpi_wtime()-timeio0
302 
303  pass_wall_time=mpi_wtime()-time0+dt_loop+4.d0*time_write >=wall_time_max
304 
305  ! exit time loop if time is up
306  if (it>=it_max .or. global_time>=time_max .or. pass_wall_time .or. final_dt_exit) exit time_evol
307 
308  ! solving equations
309  call advance(it)
310 
311  ! if met unphysical values, output the last good status and stop the run
312  call mpi_allreduce(crash,crashall,1,mpi_logical,mpi_lor,icomm,ierrmpi)
313  if (crashall) then
314  call saveamrfile(1)
315  call saveamrfile(2)
316  if(mype==0) write(*,*) "Error: small value encountered, run crash."
317  call mpi_abort(icomm, iigrid, ierrmpi)
318  end if
319 
320  ! Optionally call a user method that can modify the grid variables at the
321  ! end of a time step: this is for two-way coupling to PIC, e.g.
322  if (associated(usr_process_adv_grid) .or. &
323  associated(usr_process_adv_global)) then
325  end if
326 
327  ! update AMR mesh and tree
328  timegr0=mpi_wtime()
329  if(ditregrid>1) then
330  if(fixcount<ditregrid) then
331  fixcount=fixcount+1
332  else
333  if (refine_max_level>1 .and. .not.(fixgrid())) call resettree
334  fixcount=1
335  end if
336  else
337  if (refine_max_level>1 .and. .not.(fixgrid())) call resettree
338  end if
339  timegr_tot=timegr_tot+(mpi_wtime()-timegr0)
340 
341  ! update time variables
342  it = it + 1
344 
345  if(it>9000000)then
347  itsavelast(:)=0
348  end if
349 
350  ! count updated cells
351  ncells_update=ncells_update+ncells_block*nleafs_active
352 
353  ! time lapses in one loop
354  dt_loop=mpi_wtime()-time_before_advance
355  end do time_evol
356 
357  if(use_particles) then
358  call finish_gridvars()
359  end if
360 
361  time_advance=.false.
362 
363  timeloop=mpi_wtime()-timeloop0
364 
365  if (mype==0) then
366  write(*,'(a,f12.3,a)')' Total timeloop took : ',timeloop,' sec'
367  write(*,'(a,f12.3,a)')' Time spent on AMR : ',timegr_tot,' sec'
368  write(*,'(a,f12.2,a)')' Percentage: ',100.0*timegr_tot/timeloop,' %'
369  write(*,'(a,f12.3,a)')' Time spent on IO in loop : ',timeio_tot,' sec'
370  write(*,'(a,f12.2,a)')' Percentage: ',100.0*timeio_tot/timeloop,' %'
371  write(*,'(a,f12.3,a)')' Time spent on ghost cells : ',time_bc,' sec'
372  write(*,'(a,f12.2,a)')' Percentage: ',100.0*time_bc/timeloop,' %'
373  write(*,'(a,f12.3,a)')' Time spent on computing : ',timeloop-timeio_tot-timegr_tot-time_bc,' sec'
374  write(*,'(a,f12.2,a)')' Percentage: ',100.0*(timeloop-timeio_tot-timegr_tot-time_bc)/timeloop,' %'
375  write(*,'(a,es12.3 )')' Cells updated / proc / sec : ',dble(ncells_update)*dble(nstep)/dble(npe)/timeloop
376  end if
377 
378  ! output end state
379  timeio0=mpi_wtime()
380  do ifile=nfile,1,-1
381  if(itsavelast(ifile)<it) call saveamrfile(ifile)
382  end do
383  if (mype==0) call mpi_file_close(log_fh,ierrmpi)
384  timeio_tot=timeio_tot+(mpi_wtime()-timeio0)
385 
386  if (mype==0) then
387  write(*,'(a,f12.3,a)')' Total time spent on IO : ',timeio_tot,' sec'
388  write(*,'(a,f12.3,a)')' Total timeintegration took : ',mpi_wtime()-time_in,' sec'
389  write(*, '(A4,I10,ES12.3,ES12.3,ES12.3)') " #", &
391  end if
392 
393  {#IFDEF RAY
394  call time_spent_on_rays
395  }
396 
397  if(use_particles) call time_spent_on_particles
398 
399  if (use_multigrid) call mg_timers_show(mg)
400  end subroutine timeintegration
401 
402  !> Save times are defined by either tsave(isavet(ifile),ifile) or
403  !> itsave(isaveit(ifile),ifile) or dtsave(ifile) or ditsave(ifile)
404  !> tsavestart(ifile) determines first start time. This only affects
405  !> read out times determined by dtsave(ifiles).
406  !> Other conditions may be included.
407  logical function timetosave(ifile)
409 
410  integer:: ifile
411  logical:: oksave
412 
413  oksave=.false.
414  if (it==itsave(isaveit(ifile),ifile)) then
415  oksave=.true.
416  isaveit(ifile)=isaveit(ifile)+1
417  end if
418  if (it==itsavelast(ifile)+ditsave(ifile)) oksave=.true.
419 
420  if (global_time>=tsave(isavet(ifile),ifile).and.global_time-dt<tsave(isavet(ifile),ifile)) then
421  oksave=.true.
422  isavet(ifile)=isavet(ifile)+1
423  end if
424 
425  if(global_time>=tsavestart(ifile)-smalldouble)then
426  if (global_time>=tsavelast(ifile)+dtsave(ifile)-smalldouble)then
427  oksave=.true.
428  n_saves(ifile) = n_saves(ifile) + 1
429  endif
430  endif
431 
432  if (oksave) then
433  tsavelast(ifile) =global_time
434  itsavelast(ifile)=it
435  end if
436  timetosave=oksave
437 
438  return
439  end function timetosave
440 
441  !> Return true if the AMR grid should not be adapted any more. This is
442  !> controlled by tfixgrid or itfixgrid. Other conditions may be included.
443  logical function fixgrid()
445 
447  end function fixgrid
448 
449 end program amrvac
logical function fixgrid()
Return true if the AMR grid should not be adapted any more. This is controlled by tfixgrid or itfixgr...
Definition: amrvac.t:444
logical function timetosave(ifile)
Save times are defined by either tsave(isavet(ifile),ifile) or itsave(isaveit(ifile),...
Definition: amrvac.t:408
program amrvac
AMRVAC solves a set of hyperbolic equations using adaptive mesh refinement.
Definition: amrvac.t:4
subroutine timeintegration()
Definition: amrvac.t:190
Module containing all the time stepping schemes.
Definition: mod_advance.t:2
subroutine, public process_advanced(iit, qt)
process_advanced is user entry in time loop, just after advance allows to modify solution,...
Definition: mod_advance.t:773
subroutine, public advance(iit)
Advance all the grids over one time step, including all sources.
Definition: mod_advance.t:18
subroutine, public process(iit, qt)
process is a user entry in time loop, before output and advance allows to modify solution,...
Definition: mod_advance.t:740
subroutine, public resettree
reset AMR and (de)allocate boundary flux storage at level changes
Definition: mod_amr_grid.t:47
subroutine, public resettree_convert
Force the tree to desired level(s) from level_io(_min/_max) used for conversion to vtk output.
Definition: mod_amr_grid.t:79
subroutine, public settree
Build up AMR.
Definition: mod_amr_grid.t:14
subroutine, public comm_start
Initialize the MPI environment.
Definition: mod_comm_lib.t:17
subroutine, public comm_finalize
Finalize (or shutdown) the MPI environment.
Definition: mod_comm_lib.t:49
subroutine, public mpistop(message)
Exit MPI-AMRVAC with an error message.
Definition: mod_comm_lib.t:208
subroutine generate_plotfile
subroutine init_convert()
Definition: mod_convert.t:33
Definition: mod_dt.t:1
subroutine, public setdt()
setdt - set dt for all levels between levmin and levmax. dtpar>0 --> use fixed dtpar for all level dt...
Definition: mod_dt.t:11
Module for flux conservation near refinement boundaries.
subroutine, public allocatebflux
Module with basic grid data structures.
Definition: mod_forest.t:2
integer nleafs_active
Definition: mod_forest.t:78
update ghost cells of all blocks including physical boundaries
subroutine getbc(time, qdt, psb, nwstart, nwbc, req_diag)
do update ghost cells of all blocks including physical boundaries
This module contains definitions of global parameters and variables and some generic functions/subrou...
double precision, dimension(nfile) tsavelast
type(state), pointer block
Block pointer for using one block and its previous state.
integer nstep
How many sub-steps the time integrator takes.
integer it_max
Stop the simulation after this many time steps have been taken.
logical reset_it
If true, reset iteration count to 0.
integer ixghi
Upper index of grid block arrays.
logical, dimension(nfile) save_file
whether or not to save an output file
logical resume_previous_run
If true, restart a previous run from the latest snapshot.
double precision global_time
The global simulation time.
integer, dimension(nsavehi, nfile) itsave
Save output of type N on iterations itsave(:, N)
double precision time_max
End time for the simulation.
double precision time_init
Start time for the simulation.
logical firstprocess
If true, call initonegrid_usr upon restarting.
integer snapshotini
Resume from the snapshot with this index.
integer it
Number of time steps taken.
integer it_init
initial iteration count
integer ditregrid
Reconstruct the AMR grid once every ditregrid iteration(s)
character(len=std_len) convert_type
Which format to use when converting.
integer itfixgrid
Fix the AMR grid after this many time steps.
integer, parameter nlevelshi
The maximum number of levels in the grid refinement.
logical use_particles
Use particles module or not.
integer icomm
The MPI communicator.
logical reset_time
If true, reset iteration count and global_time to original values, and start writing snapshots at ind...
integer mype
The rank of the current MPI task.
integer, dimension(1:nfile) n_saves
Number of saved files of each type.
double precision, dimension(nfile) tsavestart
Start of read out (not counting specified read outs)
integer, dimension(nfile) ditsave
Repeatedly save output of type N when ditsave(N) time steps have passed.
integer, dimension(:), allocatable, parameter d
double precision wall_time_max
Ending wall time (in hours) for the simulation.
integer ierrmpi
A global MPI error return code.
logical autoconvert
If true, already convert to output format during the run.
integer slowsteps
If > 1, then in the first slowsteps-1 time steps dt is reduced by a factor .
integer snapshotnext
IO: snapshot and collapsed views output numbers/labels.
integer npe
The number of MPI tasks.
integer, dimension(nfile) itsavelast
double precision time_between_print
to monitor timeintegration loop at given wall-clock time intervals
integer, parameter unitterm
Unit for standard output.
double precision, dimension(nfile) dtsave
Repeatedly save output of type N when dtsave(N) simulation time has passed.
character(len=std_len) restart_from_file
If not 'unavailable', resume from snapshot with this base file name.
double precision, dimension(:,:), allocatable rnode
Corner coordinates.
integer, parameter filelog_
Constant indicating log output.
integer, parameter fileout_
Constant indicating regular output.
double precision time_bc
accumulated wall-clock time spent on boundary conditions
double precision tfixgrid
Fix the AMR grid after this time.
integer nghostcells
Number of ghost cells surrounding a grid.
character(len= *), parameter undefined
double precision, dimension(nsavehi, nfile) tsave
Save output of type N on times tsave(:, N)
logical convert
If true and restart_from_file is given, convert snapshots to other file formats.
logical reset_grid
If true, rebuild the AMR grid upon restarting.
logical crash
Save a snapshot before crash a run met unphysical values.
logical use_multigrid
Use multigrid (only available in 2D and 3D)
integer refine_max_level
Maximal number of AMR levels.
integer, parameter nfile
Number of output methods.
logical pass_wall_time
If true, wall time is up, modify snapshotnext for later overwrite.
logical final_dt_exit
Force timeloop exit when final dt < dtmin.
integer, dimension(nfile) isaveit
double precision, dimension(ndim) dxlevel
integer, dimension(nfile) isavet
integer log_fh
MPI file handle for logfile.
subroutine, public improve_initial_condition()
improve initial condition after initialization
subroutine, public initlevelone
Generate and initialize all grids at the coarsest level (level one)
subroutine, public modify_ic
modify initial condition
This module handles the initialization of various components of amrvac.
Definition: mod_initialize.t:2
subroutine, public initialize_amrvac()
Initialize amrvac: read par files and initialize variables.
logical save_now
whether a manually inserted snapshot is saved
Module for reading input and writing output.
subroutine saveamrfile(ifile)
subroutine read_arguments()
Read the command line arguments passed to amrvac.
subroutine read_snapshot
Routine to read in snapshots (.dat files). When it cannot recognize the file version,...
Module to couple the octree-mg library to AMRVAC. This file uses the VACPP preprocessor,...
type(mg_t) mg
Data structure containing the multigrid tree.
subroutine mg_setup_multigrid()
Setup multigrid for usage.
Module containing all the particle routines.
Definition: mod_particles.t:2
subroutine particles_create()
Create initial particles.
Definition: mod_particles.t:42
This module defines the procedures of a physics module. It contains function pointers for the various...
Definition: mod_physics.t:4
procedure(sub_special_advance), pointer phys_special_advance
Definition: mod_physics.t:75
subroutine, public selectgrids
integer ittimelast
Definition: mod_timing.t:13
double precision timegr_tot
Definition: mod_timing.t:9
double precision timeio_tot
Definition: mod_timing.t:8
double precision time_in
Definition: mod_timing.t:8
double precision timelast
Definition: mod_timing.t:14
double precision timeio0
Definition: mod_timing.t:8
double precision timeloop
Definition: mod_timing.t:9
double precision timeloop0
Definition: mod_timing.t:9
double precision timegr0
Definition: mod_timing.t:9
subroutine, public initialize_trac_after_settree
Definition: mod_trac.t:869
Module with all the methods that users can customize in AMRVAC.
procedure(process_grid), pointer usr_process_grid
procedure(process_adv_grid), pointer usr_process_adv_grid
procedure(sub_modify_io), pointer usr_modify_output
procedure(p_no_args), pointer usr_before_main_loop
procedure(process_global), pointer usr_process_global
procedure(process_adv_global), pointer usr_process_adv_global