diff --git a/.github/workflows/pytest-petsc.yml b/.github/workflows/pytest-petsc.yml index 2bf55b5049..502e9b2139 100644 --- a/.github/workflows/pytest-petsc.yml +++ b/.github/workflows/pytest-petsc.yml @@ -11,10 +11,12 @@ on: branches: - main - petsc + - biharmonic pull_request: branches: - main - petsc + - biharmonic jobs: pytest: diff --git a/devito/logger.py b/devito/logger.py index 9c1dfa56d2..e5df4bb565 100644 --- a/devito/logger.py +++ b/devito/logger.py @@ -76,9 +76,10 @@ def set_log_level(level, comm=None): used, for example, if one wants to log to one file per rank. """ from devito import configuration + from devito.mpi.distributed import MPI if comm is not None and configuration['mpi']: - if comm.rank != 0: + if comm != MPI.COMM_NULL and comm.rank != 0: logger.removeHandler(stream_handler) logger.addHandler(logging.NullHandler()) else: diff --git a/devito/petsc/iet/passes.py b/devito/petsc/iet/passes.py index 3c73bfd391..b4859dda16 100644 --- a/devito/petsc/iet/passes.py +++ b/devito/petsc/iet/passes.py @@ -52,9 +52,12 @@ def lower_petsc(iet, **kwargs): # Assumption is that all solves are on the same grid if len(unique_grids) > 1: raise ValueError("All PETScSolves must use the same Grid, but multiple found.") + grid = unique_grids.pop() + devito_mpi = kwargs['options'].get('mpi', False) + comm = grid.distributor._obj_comm if devito_mpi else 'PETSC_COMM_WORLD' # Create core PETSc calls (not specific to each PETScSolve) - core = make_core_petsc_calls(objs, **kwargs) + core = make_core_petsc_calls(objs, comm) setup = [] subs = {} @@ -62,7 +65,7 @@ def lower_petsc(iet, **kwargs): for iters, (injectsolve,) in injectsolve_mapper.items(): - builder = Builder(injectsolve, objs, iters, **kwargs) + builder = Builder(injectsolve, objs, iters, comm, **kwargs) setup.extend(builder.solversetup.calls) @@ -108,9 +111,8 @@ def finalize(iet): return iet._rebuild(body=finalize_body) -def make_core_petsc_calls(objs, **kwargs): - call_mpi = petsc_call_mpi('MPI_Comm_size', [objs['comm'], Byref(objs['size'])]) - +def make_core_petsc_calls(objs, comm): + call_mpi = petsc_call_mpi('MPI_Comm_size', [comm, Byref(objs['size'])]) return call_mpi, BlankLine @@ -123,16 +125,18 @@ class Builder: returning subclasses of the objects initialised in __init__, depending on the properties of `injectsolve`. """ - def __init__(self, injectsolve, objs, iters, **kwargs): + def __init__(self, injectsolve, objs, iters, comm, **kwargs): self.injectsolve = injectsolve self.objs = objs self.iters = iters + self.comm = comm self.kwargs = kwargs self.coupled = isinstance(injectsolve.expr.rhs.fielddata, MultipleFieldData) self.args = { 'injectsolve': self.injectsolve, 'objs': self.objs, 'iters': self.iters, + 'comm': self.comm, **self.kwargs } self.args['solver_objs'] = self.objbuilder.solver_objs @@ -190,9 +194,6 @@ def populate_matrix_context(efuncs, objs): ) -# TODO: Devito MPI + PETSc testing -# if kwargs['options']['mpi'] -> communicator = grid.distributor._obj_comm -communicator = 'PETSC_COMM_WORLD' subdms = PointerDM(name='subdms') fields = PointerIS(name='fields') submats = PointerMat(name='submats') @@ -208,7 +209,6 @@ def populate_matrix_context(efuncs, objs): # they are semantically identical. objs = frozendict({ 'size': PetscMPIInt(name='size'), - 'comm': communicator, 'err': PetscErrorCode(name='err'), 'block': CallbackMat('block'), 'submat_arr': PointerMat(name='submat_arr'), diff --git a/devito/petsc/iet/routines.py b/devito/petsc/iet/routines.py index ec78054094..9249c7a557 100644 --- a/devito/petsc/iet/routines.py +++ b/devito/petsc/iet/routines.py @@ -934,7 +934,7 @@ def _submat_callback_body(self): ptr = DummyExpr(objs['submat_arr']._C_symbol, Deref(objs['Submats']), init=True) - mat_create = petsc_call('MatCreate', [self.objs['comm'], Byref(objs['block'])]) + mat_create = petsc_call('MatCreate', [sobjs['comm'], Byref(objs['block'])]) mat_set_sizes = petsc_call( 'MatSetSizes', [ @@ -1041,6 +1041,7 @@ def __init__(self, **kwargs): self.injectsolve = kwargs.get('injectsolve') self.objs = kwargs.get('objs') self.sregistry = kwargs.get('sregistry') + self.comm = kwargs.get('comm') self.fielddata = self.injectsolve.expr.rhs.fielddata self.solver_objs = self._build() @@ -1080,6 +1081,7 @@ def _build(self): 'dmda': DM(sreg.make_name(prefix='da'), dofs=len(targets)), 'callbackdm': CallbackDM(sreg.make_name(prefix='dm')), } + base_dict['comm'] = self.comm self._target_dependent(base_dict) return self._extend_build(base_dict) @@ -1235,7 +1237,7 @@ def _setup(self): solver_params = self.injectsolve.expr.rhs.solver_parameters - snes_create = petsc_call('SNESCreate', [objs['comm'], Byref(sobjs['snes'])]) + snes_create = petsc_call('SNESCreate', [sobjs['comm'], Byref(sobjs['snes'])]) snes_set_dm = petsc_call('SNESSetDM', [sobjs['snes'], dmda]) @@ -1261,7 +1263,7 @@ def _setup(self): v for v, dim in zip(target.shape_allocated, target.dimensions) if dim.is_Space ) local_x = petsc_call('VecCreateMPIWithArray', - ['PETSC_COMM_WORLD', 1, local_size, 'PETSC_DECIDE', + [sobjs['comm'], 1, local_size, 'PETSC_DECIDE', field_from_ptr, Byref(sobjs['xlocal'])]) # TODO: potentially also need to set the DM and local/global map to xlocal @@ -1364,11 +1366,12 @@ def _create_dmda_calls(self, dmda): def _create_dmda(self, dmda): objs = self.objs + sobjs = self.solver_objs grid = self.fielddata.grid nspace_dims = len(grid.dimensions) # MPI communicator - args = [objs['comm']] + args = [sobjs['comm']] # Type of ghost nodes args.extend(['DM_BOUNDARY_GHOSTED' for _ in range(nspace_dims)]) @@ -1386,7 +1389,10 @@ def _create_dmda(self, dmda): # Number of degrees of freedom per node args.append(dmda.dofs) # "Stencil width" -> size of overlap + # TODO: Instead, this probably should be + # extracted from fielddata.target._size_outhalo? stencil_width = self.fielddata.space_order + args.append(stencil_width) args.extend([objs['Null']]*nspace_dims) @@ -1409,7 +1415,7 @@ def _setup(self): solver_params = self.injectsolve.expr.rhs.solver_parameters - snes_create = petsc_call('SNESCreate', [objs['comm'], Byref(sobjs['snes'])]) + snes_create = petsc_call('SNESCreate', [sobjs['comm'], Byref(sobjs['snes'])]) snes_set_dm = petsc_call('SNESSetDM', [sobjs['snes'], dmda]) diff --git a/tests/test_petsc.py b/tests/test_petsc.py index 678b1071c8..3f64a79c82 100644 --- a/tests/test_petsc.py +++ b/tests/test_petsc.py @@ -20,6 +20,7 @@ @skipif('petsc') +@pytest.fixture(scope='session', autouse=True) def test_petsc_initialization(): # TODO: Temporary workaround until PETSc is automatically # initialized @@ -28,6 +29,14 @@ def test_petsc_initialization(): PetscInitialize() +@skipif('petsc') +@pytest.mark.parallel(mode=[1, 2, 4, 6]) +def test_petsc_initialization_parallel(mode): + configuration['compiler'] = 'custom' + os.environ['CC'] = 'mpicc' + PetscInitialize() + + @skipif('petsc') def test_petsc_local_object(): """ @@ -1311,3 +1320,60 @@ def define(self, dimensions): + 'MATOP_MULT,(void (*)(void))J00_MatMult0)' in str(create) # TODO: Test mixed, time dependent solvers + + +class TestMPI: + # TODO: Add test for DMDACreate() in parallel + + @pytest.mark.parametrize('nx, unorm', [ + (17, 7.441506654790017), + (33, 10.317652759863675), + (65, 14.445123374862874), + (129, 20.32492895656658), + (257, 28.67050632840985) + ]) + @skipif('petsc') + @pytest.mark.parallel(mode=[2, 4, 8]) + def test_laplacian_1d(self, nx, unorm, mode): + """ + """ + configuration['compiler'] = 'custom' + os.environ['CC'] = 'mpicc' + PetscInitialize() + + class SubSide(SubDomain): + def __init__(self, side='left', grid=None): + self.side = side + self.name = f'sub{side}' + super().__init__(grid=grid) + + def define(self, dimensions): + x, = dimensions + return {x: (self.side, 1)} + + grid = Grid(shape=(nx,), dtype=np.float64) + sub1, sub2 = [SubSide(side=s, grid=grid) for s in ('left', 'right')] + + u = Function(name='u', grid=grid, space_order=2) + f = Function(name='f', grid=grid, space_order=2) + + u0 = Constant(name='u0', value=-1.0, dtype=np.float64) + u1 = Constant(name='u1', value=-np.exp(1.0), dtype=np.float64) + + eqn = Eq(-u.laplace, f, subdomain=grid.interior) + + X = np.linspace(0, 1.0, nx).astype(np.float64) + f.data[:] = np.float64(np.exp(X)) + + # Create boundary condition expressions using subdomains + bcs = [EssentialBC(u, u0, subdomain=sub1)] + bcs += [EssentialBC(u, u1, subdomain=sub2)] + + petsc = PETScSolve([eqn] + bcs, target=u, solver_parameters={'ksp_rtol': 1e-10}) + + op = Operator(petsc, language='petsc') + op.apply() + + # Expected norm computed "manually" from sequential run + # What rtol and atol should be used? + assert np.isclose(norm(u), unorm, rtol=1e-13, atol=1e-13)