-
Notifications
You must be signed in to change notification settings - Fork 3.7k
Open
Labels
needs-triagePRs or issues that need to be investigated by maintainers to find the right assignees to address itPRs or issues that need to be investigated by maintainers to find the right assignees to address ittype: bug
Description
Actual behavior
@I.ir_module
class Module:
@R.function
def main(q: R.Tensor((4, 16, 32, 8), dtype="float32"), k: R.Tensor((4, 8, 32, 8), dtype="float32"), v: R.Tensor((4, 8, 32, 16), dtype="float32"), bias: R.Tensor((4, 32, 16, 8), dtype="float32")) -> R.Tensor((4, 16, 32, 16), dtype="float32"):
gv: R.Tensor((4, 16, 32, 16), dtype="float32") = R.nn.attention_bias(q, k, v, bias, scale=T.float32(0.10000000000000001), causal_mask="TopLeft", window_size=None)
return gv
error: module 'tvm.relax.op.nn' has no attribute 'attention_bias'
--> <str>:9:58
|
9 | gv: R.Tensor((4, 16, 32, 16), dtype="float32") = R.nn.attention_bias(q, k, v, bias, scale=T.float32(0.10000000000000001), causal_mask="TopLeft", window_size=None)
| ^^^^^^^^^^^^^^^^^^^
Steps to reproduce
irs= """# from tvm.script import ir as I
# from tvm.script import tir as T
# from tvm.script import relax as R
@I.ir_module
class Module:
@R.function
def main(q: R.Tensor((4, 16, 32, 8), dtype="float32"), k: R.Tensor((4, 8, 32, 8), dtype="float32"), v: R.Tensor((4, 8, 32, 16), dtype="float32"), bias: R.Tensor((4, 32, 16, 8), dtype="float32")) -> R.Tensor((4, 16, 32, 16), dtype="float32"):
gv: R.Tensor((4, 16, 32, 16), dtype="float32") = R.nn.attention(q, k, v, bias, scale=T.float32(0.10000000000000001), causal_mask="TopLeft", window_size=None)
return gv
import tvm
mod = tvm.script.from_source(irs)
mod.show()
mod_new = tvm.script.from_source(mod.script()) # crash!
"""
Metadata
Metadata
Assignees
Labels
needs-triagePRs or issues that need to be investigated by maintainers to find the right assignees to address itPRs or issues that need to be investigated by maintainers to find the right assignees to address ittype: bug