From 0d91a3d9f2cb70ad0ffffca2c614ecd63005fa46 Mon Sep 17 00:00:00 2001 From: Roelof Groenewald <40245517+roelof-groenewald@users.noreply.github.com> Date: Thu, 18 Nov 2021 16:31:10 -0800 Subject: Bug fixes and cleanup in load balancing (#2563) * added helper function to rebuild MultiFabs and iMultiFabs during load balancing and included rebuilding of EB multifabs * added redistribute call for the particle boundary buffer during load balancing * consistently use DistribtionMap rather than dmap in ElectrostaticSolver.cpp * applied suggested changes from code review by Phil Miller * removed default argument for redistribute in RemakeMultiFab * removed RemakeMultiFab() as a member of WarpX * Only remake EB multifabs if they are used Co-authored-by: Lorenzo Giacomel <47607756+lgiacome@users.noreply.github.com> * adapted existing particle scraping test (PICMI version) to also cover the redistribution of particle buffers from load balancing * added redeclaring of m_borrowing * Move redeclaring of m_borrow inside if statement for ECT solver algorihtm Co-authored-by: Lorenzo Giacomel <47607756+lgiacome@users.noreply.github.com> * added calls to MarkCells and ComputeFaceExtensions * fixed issue causing CI test to fail and copied conditionals from WarpXInitData.cpp to recompute EB quantities * Guard cells communication for EB data when re-gridding (#105) * Add 2D circle EB test (#2538) * Added embedded_circle test * Add embedded_circle test files * Removed diag files * removed PICMI input file * Update to use default regression analysis * Added line breaks for spacing * Added description * Fixed benchmark file * Added load balancing to test * Commented out load_balancing portion of test. This will be added back in once load balancing is fixed. Co-authored-by: Axel Huebl * Added guard cells communication for EB data in regridding Co-authored-by: Kevin Z. Zhu <86268612+KZhu-ME@users.noreply.github.com> Co-authored-by: Axel Huebl * moved all EB grid data calculations to a new function InitializeEBGridData() which is now called by both WarpX::InitLevelData and WarpX::RemakeLevel * Fix typo in doc string. Co-authored-by: Phil Miller Co-authored-by: Lorenzo Giacomel <47607756+lgiacome@users.noreply.github.com> Co-authored-by: Kevin Z. Zhu <86268612+KZhu-ME@users.noreply.github.com> Co-authored-by: Axel Huebl Co-authored-by: Phil Miller --- .../ParticleBoundaryScrape/PICMI_inputs_scrape.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) (limited to 'Examples/Modules/ParticleBoundaryScrape/PICMI_inputs_scrape.py') diff --git a/Examples/Modules/ParticleBoundaryScrape/PICMI_inputs_scrape.py b/Examples/Modules/ParticleBoundaryScrape/PICMI_inputs_scrape.py index 79cf01a57..99dd2ffab 100644 --- a/Examples/Modules/ParticleBoundaryScrape/PICMI_inputs_scrape.py +++ b/Examples/Modules/ParticleBoundaryScrape/PICMI_inputs_scrape.py @@ -54,7 +54,7 @@ grid = picmi.Cartesian3DGrid( upper_boundary_conditions=['none', 'none', 'none'], lower_boundary_conditions_particles=['open', 'open', 'open'], upper_boundary_conditions_particles=['open', 'open', 'open'], - warpx_max_grid_size = 128 + warpx_max_grid_size = 32 ) solver = picmi.ElectromagneticSolver( @@ -86,7 +86,9 @@ sim = picmi.Simulation( solver = solver, max_steps = max_steps, warpx_embedded_boundary=embedded_boundary, - verbose=True + verbose=True, + warpx_load_balance_intervals=40, + warpx_load_balance_efficiency_ratio_threshold=0.9 ) sim.add_species( @@ -111,9 +113,12 @@ sim.step(max_steps) ################################################ from pywarpx import _libwarpx +from mpi4py import MPI as mpi + +my_id = _libwarpx.libwarpx.warpx_getMyProc() n = _libwarpx.get_particle_boundary_buffer_size("electrons", 'eb') -print("Number of electrons in buffer:", n) +print(f"Number of electrons in buffer (proc #{my_id}): {n}") assert n == 612 scraped_steps = _libwarpx.get_particle_boundary_buffer("electrons", 'eb', 'step_scraped', 0) @@ -121,11 +126,14 @@ for arr in scraped_steps: assert all(arr > 40) weights = _libwarpx.get_particle_boundary_buffer("electrons", 'eb', 'w', 0) -assert sum(len(arr) for arr in weights) == 612 +n = sum(len(arr) for arr in weights) +print(f"Number of electrons in this proc's buffer (proc #{my_id}): {n}") +n_sum = mpi.COMM_WORLD.allreduce(n, op=mpi.SUM) +assert n_sum == 612 # clear the particle buffer _libwarpx.libwarpx.warpx_clearParticleBoundaryBuffer() # confirm that the buffer was cleared n = _libwarpx.get_particle_boundary_buffer_size("electrons", 'eb') -print("Number of electrons in buffer:", n) +print(f"Number of electrons in buffer (proc #{my_id}): {n}") assert n == 0 -- cgit v1.2.3