Skip to content

Commit

Permalink
cover not implemented cpu code
Browse files Browse the repository at this point in the history
  • Loading branch information
FeixLiu committed Jul 9, 2021
1 parent 928478d commit d2b0fee
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,30 @@ def test_check_grad(self):

@unittest.skipIf(not core.is_compiled_with_cuda(),
"core is not compiled with CUDA")
class TestDropoutBiasFuseOp1(unittest.TestCase):
class TestSoftmaxMaskFuseOp1(OpTest):
def setUp(self):
self.op_type = "softmax_mask_fuse_upper_triangle"
x = np.random.random((1, 1, 32, 32))
self.inputs = {'X': x}
rst = _get_softmax_upper(x)
self.outputs = {'Out': rst}

def test_check_output(self):
try:
self.check_output_with_place(core.CPUPlace())
except NotImplementedError:
pass

def test_check_grad(self):
try:
self.check_grad_with_place(core.CPUPlace(), ["X"], "Out")
except NotImplementedError:
pass


@unittest.skipIf(not core.is_compiled_with_cuda(),
"core is not compiled with CUDA")
class TestDropoutBiasFuseOp2(unittest.TestCase):
# test the python side API for softmax_mask_fuse op
def setUp(self):
np.random.seed(123)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def softmax_mask_fuse_upper_triangle(x):
Fuse softmax mask together without even give a mask.
Under GPT model, the mask is always be a upper triangle
so we can simply mask the upper triangle part of x to get the mask result
:param x: the input x
:param x: the input x (rst of QK)
:return: the result of softmax mask fuse (upper triangle)
"""
if in_dygraph_mode():
Expand Down

1 comment on commit d2b0fee

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.