Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/pytest-petsc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@ on:
branches:
- main
- petsc
- biharmonic
pull_request:
branches:
- main
- petsc
- biharmonic

jobs:
pytest:
Expand Down
3 changes: 2 additions & 1 deletion devito/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,10 @@ def set_log_level(level, comm=None):
used, for example, if one wants to log to one file per rank.
"""
from devito import configuration
from devito.mpi.distributed import MPI

if comm is not None and configuration['mpi']:
if comm.rank != 0:
if comm != MPI.COMM_NULL and comm.rank != 0:
logger.removeHandler(stream_handler)
logger.addHandler(logging.NullHandler())
else:
Expand Down
20 changes: 10 additions & 10 deletions devito/petsc/iet/passes.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,17 +52,20 @@ def lower_petsc(iet, **kwargs):
# Assumption is that all solves are on the same grid
if len(unique_grids) > 1:
raise ValueError("All PETScSolves must use the same Grid, but multiple found.")
grid = unique_grids.pop()
devito_mpi = kwargs['options'].get('mpi', False)
comm = grid.distributor._obj_comm if devito_mpi else 'PETSC_COMM_WORLD'

# Create core PETSc calls (not specific to each PETScSolve)
core = make_core_petsc_calls(objs, **kwargs)
core = make_core_petsc_calls(objs, comm)

setup = []
subs = {}
efuncs = {}

for iters, (injectsolve,) in injectsolve_mapper.items():

builder = Builder(injectsolve, objs, iters, **kwargs)
builder = Builder(injectsolve, objs, iters, comm, **kwargs)

setup.extend(builder.solversetup.calls)

Expand Down Expand Up @@ -108,9 +111,8 @@ def finalize(iet):
return iet._rebuild(body=finalize_body)


def make_core_petsc_calls(objs, **kwargs):
call_mpi = petsc_call_mpi('MPI_Comm_size', [objs['comm'], Byref(objs['size'])])

def make_core_petsc_calls(objs, comm):
call_mpi = petsc_call_mpi('MPI_Comm_size', [comm, Byref(objs['size'])])
return call_mpi, BlankLine


Expand All @@ -123,16 +125,18 @@ class Builder:
returning subclasses of the objects initialised in __init__,
depending on the properties of `injectsolve`.
"""
def __init__(self, injectsolve, objs, iters, **kwargs):
def __init__(self, injectsolve, objs, iters, comm, **kwargs):
self.injectsolve = injectsolve
self.objs = objs
self.iters = iters
self.comm = comm
self.kwargs = kwargs
self.coupled = isinstance(injectsolve.expr.rhs.fielddata, MultipleFieldData)
self.args = {
'injectsolve': self.injectsolve,
'objs': self.objs,
'iters': self.iters,
'comm': self.comm,
**self.kwargs
}
self.args['solver_objs'] = self.objbuilder.solver_objs
Expand Down Expand Up @@ -190,9 +194,6 @@ def populate_matrix_context(efuncs, objs):
)


# TODO: Devito MPI + PETSc testing
# if kwargs['options']['mpi'] -> communicator = grid.distributor._obj_comm
communicator = 'PETSC_COMM_WORLD'
subdms = PointerDM(name='subdms')
fields = PointerIS(name='fields')
submats = PointerMat(name='submats')
Expand All @@ -208,7 +209,6 @@ def populate_matrix_context(efuncs, objs):
# they are semantically identical.
objs = frozendict({
'size': PetscMPIInt(name='size'),
'comm': communicator,
'err': PetscErrorCode(name='err'),
'block': CallbackMat('block'),
'submat_arr': PointerMat(name='submat_arr'),
Expand Down
16 changes: 11 additions & 5 deletions devito/petsc/iet/routines.py
Original file line number Diff line number Diff line change
Expand Up @@ -934,7 +934,7 @@ def _submat_callback_body(self):

ptr = DummyExpr(objs['submat_arr']._C_symbol, Deref(objs['Submats']), init=True)

mat_create = petsc_call('MatCreate', [self.objs['comm'], Byref(objs['block'])])
mat_create = petsc_call('MatCreate', [sobjs['comm'], Byref(objs['block'])])

mat_set_sizes = petsc_call(
'MatSetSizes', [
Expand Down Expand Up @@ -1041,6 +1041,7 @@ def __init__(self, **kwargs):
self.injectsolve = kwargs.get('injectsolve')
self.objs = kwargs.get('objs')
self.sregistry = kwargs.get('sregistry')
self.comm = kwargs.get('comm')
self.fielddata = self.injectsolve.expr.rhs.fielddata
self.solver_objs = self._build()

Expand Down Expand Up @@ -1080,6 +1081,7 @@ def _build(self):
'dmda': DM(sreg.make_name(prefix='da'), dofs=len(targets)),
'callbackdm': CallbackDM(sreg.make_name(prefix='dm')),
}
base_dict['comm'] = self.comm
self._target_dependent(base_dict)
return self._extend_build(base_dict)

Expand Down Expand Up @@ -1235,7 +1237,7 @@ def _setup(self):

solver_params = self.injectsolve.expr.rhs.solver_parameters

snes_create = petsc_call('SNESCreate', [objs['comm'], Byref(sobjs['snes'])])
snes_create = petsc_call('SNESCreate', [sobjs['comm'], Byref(sobjs['snes'])])

snes_set_dm = petsc_call('SNESSetDM', [sobjs['snes'], dmda])

Expand All @@ -1261,7 +1263,7 @@ def _setup(self):
v for v, dim in zip(target.shape_allocated, target.dimensions) if dim.is_Space
)
local_x = petsc_call('VecCreateMPIWithArray',
['PETSC_COMM_WORLD', 1, local_size, 'PETSC_DECIDE',
[sobjs['comm'], 1, local_size, 'PETSC_DECIDE',
field_from_ptr, Byref(sobjs['xlocal'])])

# TODO: potentially also need to set the DM and local/global map to xlocal
Expand Down Expand Up @@ -1364,11 +1366,12 @@ def _create_dmda_calls(self, dmda):

def _create_dmda(self, dmda):
objs = self.objs
sobjs = self.solver_objs
grid = self.fielddata.grid
nspace_dims = len(grid.dimensions)

# MPI communicator
args = [objs['comm']]
args = [sobjs['comm']]

# Type of ghost nodes
args.extend(['DM_BOUNDARY_GHOSTED' for _ in range(nspace_dims)])
Expand All @@ -1386,7 +1389,10 @@ def _create_dmda(self, dmda):
# Number of degrees of freedom per node
args.append(dmda.dofs)
# "Stencil width" -> size of overlap
# TODO: Instead, this probably should be
# extracted from fielddata.target._size_outhalo?
stencil_width = self.fielddata.space_order

args.append(stencil_width)
args.extend([objs['Null']]*nspace_dims)

Expand All @@ -1409,7 +1415,7 @@ def _setup(self):

solver_params = self.injectsolve.expr.rhs.solver_parameters

snes_create = petsc_call('SNESCreate', [objs['comm'], Byref(sobjs['snes'])])
snes_create = petsc_call('SNESCreate', [sobjs['comm'], Byref(sobjs['snes'])])

snes_set_dm = petsc_call('SNESSetDM', [sobjs['snes'], dmda])

Expand Down
66 changes: 66 additions & 0 deletions tests/test_petsc.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@


@skipif('petsc')
@pytest.fixture(scope='session', autouse=True)
def test_petsc_initialization():
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this a fixture or a test?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A fixture, see comment above

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did you merge this without updating the name?

Tests start with test_ and are run by pytest, fixtures are not

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I didn't realise you were requesting a change, I will edit it on the biharmonic branch

# TODO: Temporary workaround until PETSc is automatically
# initialized
Expand All @@ -28,6 +29,14 @@ def test_petsc_initialization():
PetscInitialize()


@skipif('petsc')
@pytest.mark.parallel(mode=[1, 2, 4, 6])
def test_petsc_initialization_parallel(mode):
configuration['compiler'] = 'custom'
os.environ['CC'] = 'mpicc'
PetscInitialize()


@skipif('petsc')
def test_petsc_local_object():
"""
Expand Down Expand Up @@ -1311,3 +1320,60 @@ def define(self, dimensions):
+ 'MATOP_MULT,(void (*)(void))J00_MatMult0)' in str(create)

# TODO: Test mixed, time dependent solvers


class TestMPI:
# TODO: Add test for DMDACreate() in parallel

@pytest.mark.parametrize('nx, unorm', [
(17, 7.441506654790017),
(33, 10.317652759863675),
(65, 14.445123374862874),
(129, 20.32492895656658),
(257, 28.67050632840985)
])
@skipif('petsc')
@pytest.mark.parallel(mode=[2, 4, 8])
def test_laplacian_1d(self, nx, unorm, mode):
"""
"""
configuration['compiler'] = 'custom'
os.environ['CC'] = 'mpicc'
PetscInitialize()

class SubSide(SubDomain):
def __init__(self, side='left', grid=None):
self.side = side
self.name = f'sub{side}'
super().__init__(grid=grid)

def define(self, dimensions):
x, = dimensions
return {x: (self.side, 1)}

grid = Grid(shape=(nx,), dtype=np.float64)
sub1, sub2 = [SubSide(side=s, grid=grid) for s in ('left', 'right')]

u = Function(name='u', grid=grid, space_order=2)
f = Function(name='f', grid=grid, space_order=2)

u0 = Constant(name='u0', value=-1.0, dtype=np.float64)
u1 = Constant(name='u1', value=-np.exp(1.0), dtype=np.float64)

eqn = Eq(-u.laplace, f, subdomain=grid.interior)

X = np.linspace(0, 1.0, nx).astype(np.float64)
f.data[:] = np.float64(np.exp(X))

# Create boundary condition expressions using subdomains
bcs = [EssentialBC(u, u0, subdomain=sub1)]
bcs += [EssentialBC(u, u1, subdomain=sub2)]

petsc = PETScSolve([eqn] + bcs, target=u, solver_parameters={'ksp_rtol': 1e-10})

op = Operator(petsc, language='petsc')
op.apply()

# Expected norm computed "manually" from sequential run
# What rtol and atol should be used?
assert np.isclose(norm(u), unorm, rtol=1e-13, atol=1e-13)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Rtol and atol can probably be increased to ~1e-8 for CI reliability