Skip to content

Commit 34ca8c3

Browse files
Add a fusion rewrite for CAReduces with Elemwise inputs
1 parent f3c4b26 commit 34ca8c3

File tree

3 files changed

+135
-3
lines changed

3 files changed

+135
-3
lines changed

aesara/scalar/basic.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4048,7 +4048,7 @@ def __init__(self, inputs, outputs):
40484048

40494049
@property
40504050
def fn(self):
4051-
return self._fn
4051+
return None
40524052

40534053
@property
40544054
def inner_inputs(self):

aesara/tensor/rewriting/elemwise.py

+62-2
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,16 @@
1111
from aesara.graph.basic import Apply, Constant, io_toposort
1212
from aesara.graph.features import ReplaceValidate
1313
from aesara.graph.op import compute_test_value, get_test_value
14-
from aesara.graph.rewriting.basic import GraphRewriter, copy_stack_trace, node_rewriter
14+
from aesara.graph.rewriting.basic import (
15+
GraphRewriter,
16+
copy_stack_trace,
17+
in2out,
18+
node_rewriter,
19+
)
1520
from aesara.graph.rewriting.db import SequenceDB
1621
from aesara.graph.utils import InconsistencyError, MethodNotDefined, TestValueError
1722
from aesara.tensor.basic import MakeVector, alloc, cast, get_scalar_constant_value
18-
from aesara.tensor.elemwise import DimShuffle, Elemwise
23+
from aesara.tensor.elemwise import CAReduce, DimShuffle, Elemwise
1924
from aesara.tensor.exceptions import NotScalarConstantError
2025
from aesara.tensor.rewriting.basic import register_canonicalize, register_specialize
2126
from aesara.tensor.shape import shape_padleft
@@ -944,3 +949,58 @@ def local_useless_composite(fgraph, node):
944949
c = aes.Composite(inputs=comp.inputs, outputs=new_outputs)
945950
e = Elemwise(scalar_op=c)(*node.inputs, return_list=True)
946951
return dict(zip([node.outputs[i] for i in idx], e))
952+
953+
954+
@node_rewriter([CAReduce])
955+
def local_careduce_fusion(fgraph, node):
956+
"""Fuse a `CAReduce` applied to an `Elemwise`."""
957+
958+
(car_input,) = node.inputs
959+
elm_node = car_input.owner
960+
961+
if elm_node is None or not isinstance(elm_node.op, Elemwise):
962+
return False
963+
964+
elm_inputs = elm_node.inputs
965+
elm_outputs = elm_node.outputs
966+
967+
if len(elm_inputs) > 1 or len(elm_outputs) > 1:
968+
# TODO: Implement the multiple inputs case
969+
return False
970+
971+
if len(fgraph.clients[elm_outputs[0]]) > 1:
972+
return False
973+
974+
car_axis = node.op.axis
975+
car_scalar_op = node.op.scalar_op
976+
elm_scalar_op = elm_node.op.scalar_op
977+
978+
scalar_elm_inputs = [
979+
aes.get_scalar_type(inp.type.dtype).make_variable() for inp in elm_inputs
980+
]
981+
elm_output = elm_scalar_op(*scalar_elm_inputs)
982+
# This input represents the previous value in the `CAReduce` binary reduction
983+
carried_car_input = elm_output.type()
984+
scalar_fused_outputs = [car_scalar_op(carried_car_input, elm_output)]
985+
986+
fused_scalar_op = aes.Composite(
987+
inputs=[carried_car_input] + scalar_elm_inputs, outputs=scalar_fused_outputs
988+
)
989+
990+
# The fused `Op` needs to look and behave like a `BinaryScalarOp`
991+
# TODO: Generate a new `type` and make this relationship official?
992+
fused_scalar_op.identity = car_scalar_op.identity
993+
fused_scalar_op.nin = 2
994+
fused_scalar_op.nout = 1
995+
996+
new_car_op = CAReduce(fused_scalar_op, car_axis)
997+
998+
return [new_car_op(*elm_inputs)]
999+
1000+
1001+
compile.optdb.register( # type: ignore
1002+
"local_careduce_fusion",
1003+
in2out(local_careduce_fusion),
1004+
"fusion",
1005+
position=49,
1006+
)

tests/tensor/rewriting/test_elemwise.py

+72
Original file line numberDiff line numberDiff line change
@@ -1113,6 +1113,78 @@ def test_test_values(self, test_value):
11131113
f.maker.fgraph.outputs[0].tag.test_value, np.c_[[2.0]]
11141114
)
11151115

1116+
@pytest.mark.parametrize("linker", ["cvm", "py"])
1117+
@pytest.mark.parametrize("axis", [None, 0, 1, (0, 1), (0, 1, 2)])
1118+
def test_CAReduce_single_input(self, linker, axis):
1119+
"""Make sure that `CAReduce` and `Elemwise` fusions work with a single input."""
1120+
1121+
mode = Mode(linker=linker)
1122+
mode._optimizer = mode._optimizer.including(
1123+
"local_careduce_fusion",
1124+
"canonicalize",
1125+
"inplace",
1126+
)
1127+
1128+
x = tensor("floatX", shape=(None, None, None), name="x")
1129+
out = exp(x).sum(axis=axis)
1130+
1131+
out_fn = function([x], out, mode=mode)
1132+
(out_node,) = out_fn.maker.fgraph.toposort()
1133+
1134+
assert isinstance(getattr(out_node.op, "scalar_op"), aes.basic.Composite)
1135+
1136+
rng = np.random.default_rng(2320)
1137+
x_val = rng.random((4, 3, 2), dtype=config.floatX)
1138+
1139+
exp_res = np.exp(x_val).sum(axis=axis)
1140+
1141+
out_val = out_fn(x_val)
1142+
assert out_val.shape == exp_res.shape
1143+
assert np.allclose(out_val, exp_res)
1144+
1145+
# `Elemwise`s with more than one client shouldn't be rewritten
1146+
x = tensor("floatX", shape=(None, None, None), name="x")
1147+
exp_x = exp(x)
1148+
out = exp_x.sum(axis=axis) + exp(x)
1149+
1150+
out_fn = function([x], out, mode=mode)
1151+
out_nodes = out_fn.maker.fgraph.toposort()
1152+
assert not any(
1153+
isinstance(out_node.op.scalar_op, aes.basic.Composite)
1154+
for out_node in out_nodes
1155+
if hasattr(out_node.op, "scalar_op")
1156+
)
1157+
1158+
@pytest.mark.xfail(reason="Not implemented")
1159+
@pytest.mark.parametrize("linker", ["cvm", "py"])
1160+
@pytest.mark.parametrize("axis", [None, 0, 1, (0, 1), (0, 1, 2)])
1161+
def test_CAReduce_multiple_inputs(self, linker, axis):
1162+
"""Make sure that `CAReduce` and `Elemwise` fusions work with multiple inputs."""
1163+
1164+
mode = Mode(linker=linker)
1165+
mode._optimizer = mode._optimizer.including(
1166+
"local_careduce_fusion",
1167+
"canonicalize",
1168+
"inplace",
1169+
)
1170+
1171+
x = tensor("floatX", shape=(None, None, None), name="x")
1172+
y = tensor("floatX", shape=(None, None, None), name="y")
1173+
out = (x + y).sum(axis=axis)
1174+
1175+
out_fn = function([x, y], out, mode=mode)
1176+
(out_node,) = out_fn.maker.fgraph.toposort()
1177+
1178+
assert isinstance(getattr(out_node.op, "scalar_op"), aes.basic.Composite)
1179+
1180+
rng = np.random.default_rng(2320)
1181+
x_val = rng.random((4, 3, 2), dtype=config.floatX)
1182+
y_val = rng.random((4, 3, 2), dtype=config.floatX)
1183+
exp_res = (x_val + y_val).sum(axis=axis)
1184+
out_val = out_fn(x_val, y_val)
1185+
assert out_val.shape == exp_res.shape
1186+
assert np.allclose(out_val, exp_res)
1187+
11161188

11171189
class TimesN(aes.basic.UnaryScalarOp):
11181190
"""

0 commit comments

Comments
 (0)