diff options
author | 2021-05-11 20:23:15 -0700 | |
---|---|---|
committer | 2021-05-11 20:23:15 -0700 | |
commit | c26fadfcfa9c39ec024ecc0053f6fddb3ffdd163 (patch) | |
tree | 9b7e35a65611e8ba375683cd4e248d49a22eb88a /Python/pywarpx/picmi.py | |
parent | 2cdb5453ddd1a1065fcf4b2daf772fece94e7f99 (diff) | |
download | WarpX-c26fadfcfa9c39ec024ecc0053f6fddb3ffdd163.tar.gz WarpX-c26fadfcfa9c39ec024ecc0053f6fddb3ffdd163.tar.zst WarpX-c26fadfcfa9c39ec024ecc0053f6fddb3ffdd163.zip |
Feature - Time dependent Dirichlet boundary conditions for electrostatic simulations (#1761)
* Update copyright notices
* allow specification of boundary potentials at runtime when using Dirichlet boundary conditions in the electrostatic solver (labframe)
* added parsing to boundary potentials specified at runtime to allow time dependence through a mathematical expression with t (time)
* updated to picmistandard 0.0.14 in order to set the electrostatic solver convergence threshold
* update docs
* various changes requested during PR review
* fixed issue causing old tests to break and added a new test for time varying boundary potentials
* possibly a fix for the failed time varying boundary condition test
* changed permission on the analysis file for the time varying BCs test
* switched to using yt for data analysis since h5py is not available
* made changes compatible with PR#1730; changed potential boundary setting routine to use the ParallelFor construct and set all boundaries in a single call
* fixed typo in computePhiRZ
* updated docs and fixed other minor typos
* fixed bug in returning from loop over dimensions when setting boundary potentials rather than continuing
* changed to setting potentials on domain boundaries rather than tilebox boundaries and changed picmi.py to accept boundary potentials
* now using domain.surroundingNodes() to get the proper boundary cells for the physical domain
* fixed typo in variable name specifying z-boundary potential
* changed boundary value parameter for Dirichlet BC to boundary.field_lo/hi and changed setPhiBC() to only loop over the grid points when a boundary value has changed
* switched specifying potential boundary values though individual inputs of the form boundary.potential_lo/hi_x/y/z and incorporated the new BC formalism through FieldBoundaryType::Periodic and FieldBoundaryType::PEC rather than Geom(0).isPeriodic(idim)
* removed incorrect check of whether the potential boundary values are already correct, also had to change the input to test space_charge_initialization_2d to comply with the new boundary condition input parameters and finally changed permissions to analysis_fields.py file for the embedded boundary test since it was failing
* remove line from WarpX-tests.ini that was incorrectly added during upstream merge
* changed input file for relativistic space charge initialization to new boundary condition specification
* fixed outdated comment and updated documentation to reflect that the Dirichlet BCs can also be specified when using the relativistic electrostatic field solver
* moved call to get domain boundaries inside the loop over levels
* cleaned up the code some by using domain.smallEnd and domain.bigEnd rather than lbound and ubound
* added check that a box contains boundary cells before launching a loop over that box cells to set the boundary conditions
Co-authored-by: Peter Scherpelz <peter.scherpelz@modernelectron.com>
Diffstat (limited to 'Python/pywarpx/picmi.py')
-rw-r--r-- | Python/pywarpx/picmi.py | 27 |
1 files changed, 27 insertions, 0 deletions
diff --git a/Python/pywarpx/picmi.py b/Python/pywarpx/picmi.py index d93396a82..6d1ace18e 100644 --- a/Python/pywarpx/picmi.py +++ b/Python/pywarpx/picmi.py @@ -355,6 +355,13 @@ class CylindricalGrid(picmistandard.PICMI_CylindricalGrid): self.max_grid_size = kw.pop('warpx_max_grid_size', 32) self.blocking_factor = kw.pop('warpx_blocking_factor', None) + self.potential_xmin = None + self.potential_xmax = None + self.potential_ymin = None + self.potential_ymax = None + self.potential_zmin = kw.pop('warpx_potential_lo_z', None) + self.potential_zmax = kw.pop('warpx_potential_hi_z', None) + def initialize_inputs(self): pywarpx.amr.n_cell = self.number_of_cells @@ -398,6 +405,13 @@ class Cartesian2DGrid(picmistandard.PICMI_Cartesian2DGrid): self.max_grid_size = kw.pop('warpx_max_grid_size', 32) self.blocking_factor = kw.pop('warpx_blocking_factor', None) + self.potential_xmin = kw.pop('warpx_potential_lo_x', None) + self.potential_xmax = kw.pop('warpx_potential_hi_x', None) + self.potential_ymin = None + self.potential_ymax = None + self.potential_zmin = kw.pop('warpx_potential_lo_z', None) + self.potential_zmax = kw.pop('warpx_potential_hi_z', None) + def initialize_inputs(self): pywarpx.amr.n_cell = self.number_of_cells @@ -437,6 +451,13 @@ class Cartesian3DGrid(picmistandard.PICMI_Cartesian3DGrid): self.max_grid_size = kw.pop('warpx_max_grid_size', 32) self.blocking_factor = kw.pop('warpx_blocking_factor', None) + self.potential_xmin = kw.pop('warpx_potential_lo_x', None) + self.potential_xmax = kw.pop('warpx_potential_hi_x', None) + self.potential_ymin = kw.pop('warpx_potential_lo_y', None) + self.potential_ymax = kw.pop('warpx_potential_hi_y', None) + self.potential_zmin = kw.pop('warpx_potential_lo_z', None) + self.potential_zmax = kw.pop('warpx_potential_hi_z', None) + def initialize_inputs(self): pywarpx.amr.n_cell = self.number_of_cells @@ -545,6 +566,12 @@ class ElectrostaticSolver(picmistandard.PICMI_ElectrostaticSolver): pywarpx.warpx.do_electrostatic = 'labframe' pywarpx.warpx.self_fields_required_precision = self.required_precision pywarpx.warpx.self_fields_max_iters = self.maximum_iterations + pywarpx.boundary.potential_lo_x = self.grid.potential_xmin + pywarpx.boundary.potential_lo_y = self.grid.potential_ymin + pywarpx.boundary.potential_lo_z = self.grid.potential_zmin + pywarpx.boundary.potential_hi_x = self.grid.potential_xmax + pywarpx.boundary.potential_hi_y = self.grid.potential_ymax + pywarpx.boundary.potential_hi_z = self.grid.potential_zmax class GaussianLaser(picmistandard.PICMI_GaussianLaser): |