diff options
author | 2021-07-09 20:13:56 -0700 | |
---|---|---|
committer | 2021-07-09 20:13:56 -0700 | |
commit | f462adc324cf466b60d62bc8c06c035221b75861 (patch) | |
tree | 3b100b97e40559ee9d809698e7a348b1c033220c /Examples/Tests/pass_mpi_communicator/PICMI_inputs_2d.py | |
parent | b28004a648e5b729f8ef52c89d8df7a9423e368c (diff) | |
download | WarpX-f462adc324cf466b60d62bc8c06c035221b75861.tar.gz WarpX-f462adc324cf466b60d62bc8c06c035221b75861.tar.zst WarpX-f462adc324cf466b60d62bc8c06c035221b75861.zip |
Feature pass MPI comm from Python script to AMReX during initialization (#2034)
* Added functionality to pass mpi comm from python script to amrex during initialization
* Fixed missing _ in MPI._sizeof()
* Added functions to get the current processor's rank and total number of processors
* Renamed MPI_Comm to _MPI_Comm_type and defined _MPI_Comm_type in except statement
* Updated comment to explain why mpi4py needs to be imported before loading libwarpx
* Removed ifdef flags that prevent amrex_init_with_inited_mpi from being declared when MPI is off
* Changed amrex_init_with_inited_mpi to be declared even when not using mpi, but will be defined to be functionally the same as amrex_init
* Defined MPI = None to signify whether MPI is used, to add another check when initializing amrex
* Changed ifdef blocks in WarpXWrappers.cpp/h to fix compile errors.
Added ifdef block to conditionally declare amrex_init_with_inited_mpi in WarpXWrappers.h to prevent compile error when not using MPI. Removed ifdef block to declare/define same function in WarpXWrappers.cpp since function needs to be declared even when MPI is not used, but will never be called in that case.
* Changed BL_USE_MPI to AMREX_USE_MPI and removed incorrect MPI=None statement
* Changed BL_USE_MPI to AMREX_USE_MPI
* Added test to verify correct passing of MPI communicator to amrex
* Added ability to pass mpi_comm to sim.step
* Change test to check for differeing outputs when passed different inputs
* Removed obsolete comments refactored program to use more shared code
* Refactored comments
* Updated description to match test
* Removed unecessary imports and updated comments
Diffstat (limited to 'Examples/Tests/pass_mpi_communicator/PICMI_inputs_2d.py')
-rw-r--r-- | Examples/Tests/pass_mpi_communicator/PICMI_inputs_2d.py | 139 |
1 files changed, 139 insertions, 0 deletions
diff --git a/Examples/Tests/pass_mpi_communicator/PICMI_inputs_2d.py b/Examples/Tests/pass_mpi_communicator/PICMI_inputs_2d.py new file mode 100644 index 000000000..6eb23d272 --- /dev/null +++ b/Examples/Tests/pass_mpi_communicator/PICMI_inputs_2d.py @@ -0,0 +1,139 @@ +# --- Test if MPI_communicator is correctly passed from Python to CPP +# --- This test should be run with MPI enabled +# --- Inputs taken from langmuir_2d. Runs 10 steps, and will check +# --- if the correct amount of processors are initialized in AMReX. + +from mpi4py import MPI +from pywarpx import picmi +constants = picmi.constants + +########################## +# MPI communicator setup +########################## + +# split processor 0 into separate communicator from others +comm_world = MPI.COMM_WORLD +rank = comm_world.Get_rank() +if rank == 0: + color = 0 +else: + color = 1 +new_comm = comm_world.Split(color) + +########################## +# physics parameters +########################## + +# different communicators will be passed different plasma density +plasma_density = [1.e18, 1.e19] +plasma_xmin = 0. +plasma_x_velocity = 0.1*constants.c + +########################## +# numerics parameters +########################## + +# --- Number of time steps +max_steps = 10 +diagnostic_intervals = "::10" + +# --- Grid +nx = 64 +ny = 64 + +xmin = -20.e-6 +ymin = -20.e-6 +xmax = +20.e-6 +ymax = +20.e-6 + +number_per_cell_each_dim = [2,2] + +########################## +# physics components +########################## + +uniform_plasma = picmi.UniformDistribution(density = plasma_density[color], + upper_bound = [0., None, None], + directed_velocity = [0.1*constants.c, 0., 0.]) + +electrons = picmi.Species(particle_type='electron', name='electrons', initial_distribution=uniform_plasma) + +########################## +# numerics components +########################## + +grid = picmi.Cartesian2DGrid(number_of_cells = [nx, ny], + lower_bound = [xmin, ymin], + upper_bound = [xmax, ymax], + lower_boundary_conditions = ['periodic', 'periodic'], + upper_boundary_conditions = ['periodic', 'periodic'], + moving_window_velocity = [0., 0., 0.], + warpx_max_grid_size = 32) + +solver = picmi.ElectromagneticSolver(grid=grid, cfl=1.) + +########################## +# diagnostics +########################## + +field_diag = picmi.FieldDiagnostic(name = f'diag{color + 1}', + grid = grid, + period = diagnostic_intervals, + data_list = ['Ex', 'Jx'], + write_dir = '.', + warpx_file_prefix = f'Python_pass_mpi_comm_plt{color + 1}_') + +part_diag = picmi.ParticleDiagnostic(name = f'diag{color + 1}', + period = diagnostic_intervals, + species = [electrons], + data_list = ['weighting', 'ux']) + +########################## +# simulation setup +########################## + +sim = picmi.Simulation(solver = solver, + max_steps = max_steps, + verbose = 1, + warpx_current_deposition_algo = 'direct') + +sim.add_species(electrons, + layout = picmi.GriddedLayout(n_macroparticle_per_cell=number_per_cell_each_dim, grid=grid)) + +sim.add_diagnostic(field_diag) +sim.add_diagnostic(part_diag) + +########################## +# simulation run +########################## + +sim.step(max_steps, mpi_comm=new_comm) + +########################## +# test +########################## + +# NOTE: Some tests are done in this input PICMI file. These tests +# check that amrex initialized the correct amount of procs and +# that the procs ranks in amrex are correct. +# If any of these tests fail, the terminal will print that the +# program crashed. + +comm_world_size = comm_world.size +new_comm_size = new_comm.size + +from pywarpx import wx + +if color == 0: + # verify that communicator contains correct number of procs (1) + assert wx.libwarpx.warpx_getNProcs() == comm_world_size - 2 + assert wx.libwarpx.warpx_getNProcs() == new_comm_size + +else: + # verify that amrex initialized with 1 fewer proc than comm world + assert wx.libwarpx.warpx_getNProcs() == comm_world_size - 1 + assert wx.libwarpx.warpx_getNProcs() == new_comm_size + + # verify that amrex proc ranks are offset by -1 from + # world comm proc ranks + assert wx.libwarpx.warpx_getMyProc() == rank - 1 |