27 double precision :: time0, time_in
28 logical,
save :: part_file_exists=.false.
109 call read_particles_snapshot(part_file_exists)
113 call handle_particles()
114 call finish_gridvars()
115 call time_spent_on_particles()
124 call mpistop(
"non-mpi conversion only uses 1 cpu")
167 print*,
'-------------------------------------------------------------------------------'
168 write(*,
'(a,f17.3,a)')
' Startup phase took : ',mpi_wtime()-time0,
' sec'
169 print*,
'-------------------------------------------------------------------------------'
180 print*,
'-------------------------------------------------------------------------------'
181 write(*,
'(a,f17.3,a)')
' Finished AMRVAC in : ',mpi_wtime()-time0,
' sec'
182 print*,
'-------------------------------------------------------------------------------'
200 integer :: level, ifile, fixcount, ncells_block, igrid, iigrid
201 integer(kind=8) ncells_update
203 double precision :: time_last_print, time_write0, time_write, time_before_advance, dt_loop
206 time_last_print = -bigdouble
227 write(*,
'(A,ES9.2,A)')
' Start integrating, print status every ', &
229 write(*,
'(A4,A10,A12,A12,A12)')
' #',
'it',
'time',
'dt',
'wc-time(s)'
243 time_before_advance=mpi_wtime()
264 write(*,
'(A4,I10,ES12.4,ES12.4,ES12.4)')
" #", &
273 do iigrid=1,igridstail; igrid=igrids(iigrid);
282 time_write0=mpi_wtime()
284 time_write=time_write+mpi_wtime()-time_write0
295 if(
mype==0)
write(*,
'(a,i7,a,i7,a,es12.4)')
' save a snapshot No.',&
299 call mpi_file_delete(
'savenow',mpi_info_null,
ierrmpi)
316 if(
mype==0)
write(*,*)
"Error: small value encountered, run crash."
354 dt_loop=mpi_wtime()-time_before_advance
358 call finish_gridvars()
366 write(*,
'(a,f12.3,a)')
' Total timeloop took : ',
timeloop,
' sec'
367 write(*,
'(a,f12.3,a)')
' Time spent on AMR : ',
timegr_tot,
' sec'
369 write(*,
'(a,f12.3,a)')
' Time spent on IO in loop : ',
timeio_tot,
' sec'
371 write(*,
'(a,f12.3,a)')
' Time spent on ghost cells : ',
time_bc,
' sec'
375 write(*,
'(a,es12.3 )')
' Cells updated / proc / sec : ',dble(ncells_update)*dble(
nstep)/dble(
npe)/
timeloop
387 write(*,
'(a,f12.3,a)')
' Total time spent on IO : ',
timeio_tot,
' sec'
388 write(*,
'(a,f12.3,a)')
' Total timeintegration took : ',mpi_wtime()-
time_in,
' sec'
389 write(*,
'(A4,I10,ES12.3,ES12.3,ES12.3)')
" #", &
394 call time_spent_on_rays
logical function fixgrid()
Return true if the AMR grid should not be adapted any more. This is controlled by tfixgrid or itfixgr...
logical function timetosave(ifile)
Save times are defined by either tsave(isavet(ifile),ifile) or itsave(isaveit(ifile),...
program amrvac
AMRVAC solves a set of hyperbolic equations using adaptive mesh refinement.
subroutine timeintegration()
Module containing all the time stepping schemes.
subroutine, public process_advanced(iit, qt)
process_advanced is user entry in time loop, just after advance allows to modify solution,...
subroutine, public advance(iit)
Advance all the grids over one time step, including all sources.
subroutine, public process(iit, qt)
process is a user entry in time loop, before output and advance allows to modify solution,...
subroutine, public resettree
reset AMR and (de)allocate boundary flux storage at level changes
subroutine, public resettree_convert
Force the tree to desired level(s) from level_io(_min/_max) used for conversion to vtk output.
subroutine, public settree
Build up AMR.
subroutine, public comm_start
Initialize the MPI environment.
subroutine, public comm_finalize
Finalize (or shutdown) the MPI environment.
subroutine, public mpistop(message)
Exit MPI-AMRVAC with an error message.
subroutine generate_plotfile
subroutine init_convert()
subroutine, public setdt()
setdt - set dt for all levels between levmin and levmax. dtpar>0 --> use fixed dtpar for all level dt...
Module for flux conservation near refinement boundaries.
subroutine, public allocatebflux
Module with basic grid data structures.
update ghost cells of all blocks including physical boundaries
subroutine getbc(time, qdt, psb, nwstart, nwbc, req_diag)
do update ghost cells of all blocks including physical boundaries
This module contains definitions of global parameters and variables and some generic functions/subrou...
double precision, dimension(nfile) tsavelast
type(state), pointer block
Block pointer for using one block and its previous state.
integer nstep
How many sub-steps the time integrator takes.
integer it_max
Stop the simulation after this many time steps have been taken.
logical reset_it
If true, reset iteration count to 0.
integer ixghi
Upper index of grid block arrays.
logical, dimension(nfile) save_file
whether or not to save an output file
logical resume_previous_run
If true, restart a previous run from the latest snapshot.
double precision global_time
The global simulation time.
integer, dimension(nsavehi, nfile) itsave
Save output of type N on iterations itsave(:, N)
double precision time_max
End time for the simulation.
double precision time_init
Start time for the simulation.
logical firstprocess
If true, call initonegrid_usr upon restarting.
integer snapshotini
Resume from the snapshot with this index.
integer it
Number of time steps taken.
integer it_init
initial iteration count
integer ditregrid
Reconstruct the AMR grid once every ditregrid iteration(s)
character(len=std_len) convert_type
Which format to use when converting.
integer itfixgrid
Fix the AMR grid after this many time steps.
integer, parameter nlevelshi
The maximum number of levels in the grid refinement.
logical use_particles
Use particles module or not.
integer icomm
The MPI communicator.
logical reset_time
If true, reset iteration count and global_time to original values, and start writing snapshots at ind...
integer mype
The rank of the current MPI task.
integer, dimension(1:nfile) n_saves
Number of saved files of each type.
double precision, dimension(nfile) tsavestart
Start of read out (not counting specified read outs)
integer, dimension(nfile) ditsave
Repeatedly save output of type N when ditsave(N) time steps have passed.
integer, dimension(:), allocatable, parameter d
double precision wall_time_max
Ending wall time (in hours) for the simulation.
integer ierrmpi
A global MPI error return code.
logical autoconvert
If true, already convert to output format during the run.
integer slowsteps
If > 1, then in the first slowsteps-1 time steps dt is reduced by a factor .
integer snapshotnext
IO: snapshot and collapsed views output numbers/labels.
integer npe
The number of MPI tasks.
integer, dimension(nfile) itsavelast
double precision time_between_print
to monitor timeintegration loop at given wall-clock time intervals
integer, parameter unitterm
Unit for standard output.
double precision, dimension(nfile) dtsave
Repeatedly save output of type N when dtsave(N) simulation time has passed.
character(len=std_len) restart_from_file
If not 'unavailable', resume from snapshot with this base file name.
double precision, dimension(:,:), allocatable rnode
Corner coordinates.
integer, parameter filelog_
Constant indicating log output.
integer, parameter fileout_
Constant indicating regular output.
double precision time_bc
accumulated wall-clock time spent on boundary conditions
double precision tfixgrid
Fix the AMR grid after this time.
integer nghostcells
Number of ghost cells surrounding a grid.
character(len= *), parameter undefined
double precision, dimension(nsavehi, nfile) tsave
Save output of type N on times tsave(:, N)
logical convert
If true and restart_from_file is given, convert snapshots to other file formats.
logical reset_grid
If true, rebuild the AMR grid upon restarting.
logical crash
Save a snapshot before crash a run met unphysical values.
logical use_multigrid
Use multigrid (only available in 2D and 3D)
integer refine_max_level
Maximal number of AMR levels.
integer, parameter nfile
Number of output methods.
logical pass_wall_time
If true, wall time is up, modify snapshotnext for later overwrite.
logical final_dt_exit
Force timeloop exit when final dt < dtmin.
integer, dimension(nfile) isaveit
double precision, dimension(ndim) dxlevel
integer, dimension(nfile) isavet
integer log_fh
MPI file handle for logfile.
subroutine, public improve_initial_condition()
improve initial condition after initialization
subroutine, public initlevelone
Generate and initialize all grids at the coarsest level (level one)
subroutine, public modify_ic
modify initial condition
This module handles the initialization of various components of amrvac.
subroutine, public initialize_amrvac()
Initialize amrvac: read par files and initialize variables.
Module to couple the octree-mg library to AMRVAC. This file uses the VACPP preprocessor,...
type(mg_t) mg
Data structure containing the multigrid tree.
subroutine mg_setup_multigrid()
Setup multigrid for usage.
Module containing all the particle routines.
subroutine particles_create()
Create initial particles.
This module defines the procedures of a physics module. It contains function pointers for the various...
procedure(sub_special_advance), pointer phys_special_advance
subroutine, public selectgrids
double precision timegr_tot
double precision timeio_tot
double precision timelast
double precision timeloop
double precision timeloop0
subroutine, public initialize_trac_after_settree
Module with all the methods that users can customize in AMRVAC.
procedure(process_grid), pointer usr_process_grid
procedure(process_adv_grid), pointer usr_process_adv_grid
procedure(sub_modify_io), pointer usr_modify_output
procedure(p_no_args), pointer usr_before_main_loop
procedure(process_global), pointer usr_process_global
procedure(process_adv_global), pointer usr_process_adv_global