Skip to content

Commit

Permalink
[TOPI][x86] Pad schedule improvment.
Browse files Browse the repository at this point in the history
  • Loading branch information
anijain2305 committed Jan 29, 2020
1 parent 55d8192 commit 5dd2f54
Show file tree
Hide file tree
Showing 4 changed files with 62 additions and 2 deletions.
3 changes: 2 additions & 1 deletion python/tvm/relay/op/nn/_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,8 @@ def compute_upsampling3d(attrs, inputs, out_dtype, target):
coordinate_transformation_mode)]

# pad
reg.register_schedule("nn.pad", schedule_broadcast)
schedule_pad = reg.schedule_pad
reg.register_schedule("nn.pad", schedule_pad)

# mirror_pad
reg.register_schedule("nn.mirror_pad", schedule_broadcast)
Expand Down
8 changes: 7 additions & 1 deletion python/tvm/relay/op/op.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,11 +301,17 @@ def schedule_injective(attrs, outputs, target):


def schedule_concatenate(attrs, outputs, target):
"""Generic schedule for concatinate."""
"""Generic schedule for concatenate."""
with target:
return topi.generic.schedule_concatenate(outputs)


def schedule_pad(attrs, outputs, target):
"""Generic schedule for pad."""
with target:
return topi.generic.schedule_pad(outputs)


__DEBUG_COUNTER__ = 0

def debug(expr, debug_func=None):
Expand Down
17 changes: 17 additions & 0 deletions topi/python/topi/generic/injective.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,5 +81,22 @@ def schedule_concatenate(outs):
"""
return schedule_injective(outs)

@tvm.target.generic_func
def schedule_pad(outs):
"""Schedule for pad op.
Parameters
----------
outs: Array of Tensor
The computation graph description of reduce in the format
of an array of tensors.
Returns
-------
sch: Schedule
The computation schedule for the op.
"""
return schedule_injective(outs)

schedule_elemwise = schedule_injective
schedule_broadcast = schedule_injective
36 changes: 36 additions & 0 deletions topi/python/topi/x86/injective.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,5 +117,41 @@ def vectorize(sch, tensor, vectorize_limit):
s[x].parallel(s[x].op.axis[0])
return s

@generic.schedule_pad.register(["cpu"])
def schedule_pad(outs):
"""X86 schedule for pad op.
Parameters
----------
outs: Array of Tensor
The computation graph description of injective in the format
of an array of tensors.
Returns
-------
sch: Schedule
The computation schedule for the op.
"""
outs = [outs] if isinstance(outs, tvm.tensor.Tensor) else outs
x = outs[0]
s = tvm.create_schedule([x.op for x in outs])
tvm.schedule.AutoInlineInjective(s)

if len(s[x].op.axis) > 5:
fused = s[x].fuse(s[x].op.axis[0], s[x].op.axis[1], s[x].op.axis[2])
s[x].parallel(fused)
elif len(s[x].op.axis) == 5:
# Parallelize H and W dimension across cores. This will reduce branch divergence.
n, C, h, w, c = s[x].op.axis
s[x].reorder(h, w, n, C, c)
fused = s[x].fuse(h, w)
s[x].parallel(fused)
elif len(s[x].op.axis) >= 3:
fused = s[x].fuse(s[x].op.axis[0], s[x].op.axis[1])
s[x].parallel(fused)
elif len(s[x].op.axis) >= 1:
s[x].parallel(s[x].op.axis[0])
return s

schedule_elemwise = schedule_injective
schedule_broadcast = schedule_injective

0 comments on commit 5dd2f54

Please sign in to comment.