Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
f0483f4
misc: Split Dockerfile.cpu into .cpu and .intel
FabioLuporini Dec 19, 2023
57b99e6
misc: Update file due to incorrect rebase
ZoeLeibowitz Dec 22, 2023
ca20534
misc: Split Dockerfile.cpu into .cpu and .intel
FabioLuporini Dec 19, 2023
15ed63e
misc: Update file due to incorrect rebase
ZoeLeibowitz Dec 22, 2023
bd4fc4b
dsl: Dispatch to sympy.Add not both Add and EvalDerivative
ZoeLeibowitz Jul 10, 2024
09f6fdf
compiler: Create mixin for ThreadCallable and PETScCallable
ZoeLeibowitz Jul 23, 2024
09d9180
types: Edit freesymbols in FieldFromPointer
ZoeLeibowitz Jan 3, 2025
8472b77
clean
ZoeLeibowitz Jan 8, 2025
579045f
trigger wfs for branches off this branch
ZoeLeibowitz Jan 8, 2025
79cf51a
rebase leftover
ZoeLeibowitz Feb 18, 2025
4272da2
rebase fix and remove concretize mapper
ZoeLeibowitz Feb 18, 2025
526449e
rebase leftover
ZoeLeibowitz Feb 18, 2025
4689508
flake8
ZoeLeibowitz Feb 18, 2025
434e424
flake8
ZoeLeibowitz Feb 18, 2025
cfa7218
misc: Split Dockerfile.cpu into .cpu and .intel
FabioLuporini Dec 19, 2023
fd8c53f
misc: Update file due to incorrect rebase
ZoeLeibowitz Dec 22, 2023
d09e654
misc: Split Dockerfile.cpu into .cpu and .intel
FabioLuporini Dec 19, 2023
62a4e1c
misc: Update file due to incorrect rebase
ZoeLeibowitz Dec 22, 2023
0f862ba
dsl: Dispatch to sympy.Add not both Add and EvalDerivative
ZoeLeibowitz Jul 10, 2024
b8de614
compiler: Create mixin for ThreadCallable and PETScCallable
ZoeLeibowitz Jul 23, 2024
82b434b
types: Edit freesymbols in FieldFromPointer
ZoeLeibowitz Jan 3, 2025
8cb18d3
compiler: Use classes
ZoeLeibowitz Jan 9, 2025
da2bbf3
compiler: Switch to using a separate struct and dm for each SNES solve
ZoeLeibowitz Jan 9, 2025
c445140
compiler: Rework petscarrays
ZoeLeibowitz Jan 9, 2025
670e1d2
misc: Clean up
ZoeLeibowitz Jan 9, 2025
ad5fb49
compiler: Timedep abstraction
ZoeLeibowitz Jan 10, 2025
89cc06e
More cleanup, add callbackdm etc
ZoeLeibowitz Jan 12, 2025
2c4a568
Address some comments
ZoeLeibowitz Jan 12, 2025
b83ec92
compiler: Move uxreplace_efuncs into Builder
ZoeLeibowitz Jan 23, 2025
4d75af8
misc: Clean up
ZoeLeibowitz Jan 23, 2025
5f25b0f
misc: Add timeloop abstraction docs
ZoeLeibowitz Jan 25, 2025
37b62b3
misc: Clean up and docs
ZoeLeibowitz Jan 26, 2025
10af16f
misc: Spellings
ZoeLeibowitz Jan 28, 2025
ef7ee34
flake8
ZoeLeibowitz Feb 18, 2025
3b80b8b
Merge pull request #38 from ZoeLeibowitz/cleanup2
ZoeLeibowitz Feb 18, 2025
b989d06
Clean up
ZoeLeibowitz Feb 18, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
272 changes: 49 additions & 223 deletions devito/petsc/iet/passes.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
import cgen as c

from devito.passes.iet.engine import iet_pass
from devito.ir.iet import (Transformer, MapNodes, Iteration, List, BlankLine,
DummyExpr, FindNodes, retrieve_iteration_tree,
filter_iterations)
from devito.symbolics import Byref, Macro, FieldFromComposite
from devito.petsc.types import (PetscMPIInt, DM, Mat, LocalVec, GlobalVec,
KSP, PC, SNES, PetscErrorCode, DummyArg, PetscInt,
StartPtr)
from devito.petsc.iet.nodes import InjectSolveDummy, PETScCall
from devito.petsc.utils import solver_mapper, core_metadata
from devito.petsc.iet.routines import PETScCallbackBuilder
from devito.ir.iet import Transformer, MapNodes, Iteration, BlankLine
from devito.symbolics import Byref, Macro
from devito.petsc.types import (PetscMPIInt, PetscErrorCode)
from devito.petsc.iet.nodes import InjectSolveDummy
from devito.petsc.utils import core_metadata
from devito.petsc.iet.routines import (CallbackBuilder, BaseObjectBuilder, BaseSetup,
Solver, TimeDependent, NonTimeDependent)
from devito.petsc.iet.utils import petsc_call, petsc_call_mpi


Expand All @@ -34,54 +31,36 @@ def lower_petsc(iet, **kwargs):

setup = []
subs = {}

# Create a different DMDA for each target with a unique space order
unique_dmdas = create_dmda_objs(targets)
objs.update(unique_dmdas)
for dmda in unique_dmdas.values():
setup.extend(create_dmda_calls(dmda, objs))

builder = PETScCallbackBuilder(**kwargs)
efuncs = {}

for iters, (injectsolve,) in injectsolve_mapper.items():
solver_objs = build_solver_objs(injectsolve, iters, **kwargs)

# Generate the solver setup for each InjectSolveDummy
solver_setup = generate_solver_setup(solver_objs, objs, injectsolve)
setup.extend(solver_setup)
builder = Builder(injectsolve, objs, iters, **kwargs)

# Generate all PETSc callback functions for the target via recursive compilation
matvec_op, formfunc_op, runsolve = builder.make(injectsolve,
objs, solver_objs)
setup.extend([matvec_op, formfunc_op, BlankLine])
# Only Transform the spatial iteration loop
space_iter, = spatial_injectsolve_iter(iters, injectsolve)
subs.update({space_iter: List(body=runsolve)})
setup.extend(builder.solversetup.calls)

# Generate callback to populate main struct object
struct, struct_calls = builder.make_main_struct(unique_dmdas, objs)
setup.extend(struct_calls)
# Transform the spatial iteration loop with the calls to execute the solver
subs.update(builder.solve.mapper)

iet = Transformer(subs).visit(iet)
efuncs.update(builder.cbbuilder.efuncs)

iet = assign_time_iters(iet, struct)
iet = Transformer(subs).visit(iet)

body = core + tuple(setup) + (BlankLine,) + iet.body.body
body = iet.body._rebuild(
init=init, body=body,
frees=(c.Line("PetscCall(PetscFinalize());"),)
frees=(petsc_call('PetscFinalize', []),)
)
iet = iet._rebuild(body=body)
metadata = core_metadata()
efuncs = tuple(builder.efuncs.values())
metadata.update({'efuncs': efuncs})
metadata.update({'efuncs': tuple(efuncs.values())})

return iet, metadata


def init_petsc(**kwargs):
# Initialize PETSc -> for now, assuming all solver options have to be
# specifed via the parameters dict in PETScSolve
# specified via the parameters dict in PETScSolve
# TODO: Are users going to be able to use PETSc command line arguments?
# In firedrake, they have an options_prefix for each solver, enabling the use
# of command line options
Expand Down Expand Up @@ -110,201 +89,48 @@ def build_core_objects(target, **kwargs):
}


def create_dmda_objs(unique_targets):
unique_dmdas = {}
for target in unique_targets:
name = 'da_so_%s' % target.space_order
unique_dmdas[name] = DM(name=name, liveness='eager',
stencil_width=target.space_order)
return unique_dmdas


def create_dmda_calls(dmda, objs):
dmda_create = create_dmda(dmda, objs)
dm_setup = petsc_call('DMSetUp', [dmda])
dm_mat_type = petsc_call('DMSetMatType', [dmda, 'MATSHELL'])
dm_get_local_info = petsc_call('DMDAGetLocalInfo', [dmda, Byref(dmda.info)])
return dmda_create, dm_setup, dm_mat_type, dm_get_local_info, BlankLine


def create_dmda(dmda, objs):
no_of_space_dims = len(objs['grid'].dimensions)

# MPI communicator
args = [objs['comm']]

# Type of ghost nodes
args.extend(['DM_BOUNDARY_GHOSTED' for _ in range(no_of_space_dims)])

# Stencil type
if no_of_space_dims > 1:
args.append('DMDA_STENCIL_BOX')

# Global dimensions
args.extend(list(objs['grid'].shape)[::-1])
# No.of processors in each dimension
if no_of_space_dims > 1:
args.extend(list(objs['grid'].distributor.topology)[::-1])

# Number of degrees of freedom per node
args.append(1)
# "Stencil width" -> size of overlap
args.append(dmda.stencil_width)
args.extend([Null for _ in range(no_of_space_dims)])

# The distributed array object
args.append(Byref(dmda))

# The PETSc call used to create the DMDA
dmda = petsc_call('DMDACreate%sd' % no_of_space_dims, args)

return dmda


def build_solver_objs(injectsolve, iters, **kwargs):
target = injectsolve.expr.rhs.target
sreg = kwargs['sregistry']
return {
'Jac': Mat(sreg.make_name(prefix='J_')),
'x_global': GlobalVec(sreg.make_name(prefix='x_global_')),
'x_local': LocalVec(sreg.make_name(prefix='x_local_'), liveness='eager'),
'b_global': GlobalVec(sreg.make_name(prefix='b_global_')),
'b_local': LocalVec(sreg.make_name(prefix='b_local_')),
'ksp': KSP(sreg.make_name(prefix='ksp_')),
'pc': PC(sreg.make_name(prefix='pc_')),
'snes': SNES(sreg.make_name(prefix='snes_')),
'X_global': GlobalVec(sreg.make_name(prefix='X_global_')),
'Y_global': GlobalVec(sreg.make_name(prefix='Y_global_')),
'X_local': LocalVec(sreg.make_name(prefix='X_local_'), liveness='eager'),
'Y_local': LocalVec(sreg.make_name(prefix='Y_local_'), liveness='eager'),
'dummy': DummyArg(sreg.make_name(prefix='dummy_')),
'localsize': PetscInt(sreg.make_name(prefix='localsize_')),
'start_ptr': StartPtr(sreg.make_name(prefix='start_ptr_'), target.dtype),
'true_dims': retrieve_time_dims(iters),
'target': target,
'time_mapper': injectsolve.expr.rhs.time_mapper,
}


def generate_solver_setup(solver_objs, objs, injectsolve):
target = solver_objs['target']

dmda = objs['da_so_%s' % target.space_order]

solver_params = injectsolve.expr.rhs.solver_parameters

snes_create = petsc_call('SNESCreate', [objs['comm'], Byref(solver_objs['snes'])])

snes_set_dm = petsc_call('SNESSetDM', [solver_objs['snes'], dmda])

create_matrix = petsc_call('DMCreateMatrix', [dmda, Byref(solver_objs['Jac'])])

# NOTE: Assumming all solves are linear for now.
snes_set_type = petsc_call('SNESSetType', [solver_objs['snes'], 'SNESKSPONLY'])

snes_set_jac = petsc_call(
'SNESSetJacobian', [solver_objs['snes'], solver_objs['Jac'],
solver_objs['Jac'], 'MatMFFDComputeJacobian', Null]
)

global_x = petsc_call('DMCreateGlobalVector',
[dmda, Byref(solver_objs['x_global'])])

global_b = petsc_call('DMCreateGlobalVector',
[dmda, Byref(solver_objs['b_global'])])

local_b = petsc_call('DMCreateLocalVector',
[dmda, Byref(solver_objs['b_local'])])
class Builder:
"""
This class is designed to support future extensions, enabling
different combinations of solver types, preconditioning methods,
and other functionalities as needed.

snes_get_ksp = petsc_call('SNESGetKSP',
[solver_objs['snes'], Byref(solver_objs['ksp'])])
The class will be extended to accommodate different solver types by
returning subclasses of the objects initialised in __init__,
depending on the properties of `injectsolve`.
"""
def __init__(self, injectsolve, objs, iters, **kwargs):

ksp_set_tols = petsc_call(
'KSPSetTolerances', [solver_objs['ksp'], solver_params['ksp_rtol'],
solver_params['ksp_atol'], solver_params['ksp_divtol'],
solver_params['ksp_max_it']]
)
# Determine the time dependency class
time_mapper = injectsolve.expr.rhs.time_mapper
timedep = TimeDependent if time_mapper else NonTimeDependent
self.timedep = timedep(injectsolve, iters, **kwargs)

ksp_set_type = petsc_call(
'KSPSetType', [solver_objs['ksp'], solver_mapper[solver_params['ksp_type']]]
)
# Objects
self.objbuilder = BaseObjectBuilder(injectsolve, **kwargs)
self.solver_objs = self.objbuilder.solver_objs

ksp_get_pc = petsc_call('KSPGetPC', [solver_objs['ksp'], Byref(solver_objs['pc'])])

# Even though the default will be jacobi, set to PCNONE for now
pc_set_type = petsc_call('PCSetType', [solver_objs['pc'], 'PCNONE'])

ksp_set_from_ops = petsc_call('KSPSetFromOptions', [solver_objs['ksp']])

return (
snes_create,
snes_set_dm,
create_matrix,
snes_set_jac,
snes_set_type,
global_x,
global_b,
local_b,
snes_get_ksp,
ksp_set_tols,
ksp_set_type,
ksp_get_pc,
pc_set_type,
ksp_set_from_ops
)
# Callbacks
self.cbbuilder = CallbackBuilder(
injectsolve, objs, self.solver_objs, timedep=self.timedep,
**kwargs
)

# Solver setup
self.solversetup = BaseSetup(
self.solver_objs, objs, injectsolve, self.cbbuilder
)

def assign_time_iters(iet, struct):
"""
Assign time iterators to the struct within loops containing PETScCalls.
Ensure that assignment occurs only once per time loop, if necessary.
Assign only the iterators that are common between the struct fields
and the actual Iteration.
"""
time_iters = [
i for i in FindNodes(Iteration).visit(iet)
if i.dim.is_Time and FindNodes(PETScCall).visit(i)
]

if not time_iters:
return iet

mapper = {}
for iter in time_iters:
common_dims = [d for d in iter.dimensions if d in struct.fields]
common_dims = [
DummyExpr(FieldFromComposite(d, struct), d) for d in common_dims
]
iter_new = iter._rebuild(nodes=List(body=tuple(common_dims)+iter.nodes))
mapper.update({iter: iter_new})

return Transformer(mapper).visit(iet)


def retrieve_time_dims(iters):
time_iter = [i for i in iters if any(d.is_Time for d in i.dimensions)]
mapper = {}
if not time_iter:
return mapper
for d in time_iter[0].dimensions:
if d.is_Modulo:
mapper[d.origin] = d
elif d.is_Time:
mapper[d] = d
return mapper


def spatial_injectsolve_iter(iter, injectsolve):
spatial_body = []
for tree in retrieve_iteration_tree(iter[0]):
root = filter_iterations(tree, key=lambda i: i.dim.is_Space)[0]
if injectsolve in FindNodes(InjectSolveDummy).visit(root):
spatial_body.append(root)
return spatial_body
# Execute the solver
self.solve = Solver(
self.solver_objs, objs, injectsolve, iters,
self.cbbuilder, timedep=self.timedep
)


Null = Macro('NULL')
void = 'void'


# TODO: Don't use c.Line here?
petsc_func_begin_user = c.Line('PetscFunctionBeginUser;')
Loading
Loading