Skip to content

Commit

Permalink
rm detach
Browse files Browse the repository at this point in the history
  • Loading branch information
ForFishes committed Aug 5, 2021
1 parent 4cc3d9a commit cf0da31
Showing 1 changed file with 12 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def _p2p_helper(tensor_send_next, tensor_send_prev, recv_prev, recv_next):
for d in tensor_send_prev:
paddle.distributed.wait(d, use_calc_stream=True)
send_partial(
d.detach(),
d,
dst=0,
nranks=mp_degree,
rank_id=mp_rank,
Expand All @@ -266,7 +266,7 @@ def _p2p_helper(tensor_send_next, tensor_send_prev, recv_prev, recv_next):
else:
paddle.distributed.wait(tensor_send_prev, use_calc_stream=True)
send_partial(
tensor_send_prev.detach(),
tensor_send_prev,
dst=0,
nranks=mp_degree,
rank_id=mp_rank,
Expand All @@ -277,28 +277,28 @@ def _p2p_helper(tensor_send_next, tensor_send_prev, recv_prev, recv_next):
if isinstance(tensor_recv_prev, tuple):
for d in tensor_recv_prev:
recv_partial(
d.detach(),
d,
src=0,
nranks=mp_degree,
rank_id=mp_rank,
group=_hcg.recv_prev_group,
use_calc_stream=True)
allgather_partial(
d.detach(),
d,
nranks=mp_degree,
rank_id=mp_rank,
group=mp_group,
use_calc_stream=True)
else:
recv_partial(
tensor_recv_prev.detach(),
tensor_recv_prev,
src=0,
nranks=mp_degree,
rank_id=mp_rank,
group=_hcg.recv_prev_group,
use_calc_stream=True)
allgather_partial(
tensor_recv_prev.detach(),
tensor_recv_prev,
nranks=mp_degree,
rank_id=mp_rank,
group=mp_group,
Expand All @@ -309,7 +309,7 @@ def _p2p_helper(tensor_send_next, tensor_send_prev, recv_prev, recv_next):
for d in tensor_send_next:
paddle.distributed.wait(d, use_calc_stream=True)
send_partial(
d.detach(),
d,
dst=1,
nranks=mp_degree,
rank_id=mp_rank,
Expand All @@ -318,7 +318,7 @@ def _p2p_helper(tensor_send_next, tensor_send_prev, recv_prev, recv_next):
else:
paddle.distributed.wait(tensor_send_next, use_calc_stream=True)
send_partial(
tensor_send_next.detach(),
tensor_send_next,
dst=1,
nranks=mp_degree,
rank_id=mp_rank,
Expand All @@ -329,30 +329,30 @@ def _p2p_helper(tensor_send_next, tensor_send_prev, recv_prev, recv_next):
if isinstance(tensor_recv_next, tuple):
for d in tensor_recv_next:
recv_partial(
d.detach(),
d,
src=1,
nranks=mp_degree,
rank_id=mp_rank,
group=_hcg.recv_next_group,
use_calc_stream=True)
allgather_partial(
d.detach(),
d,
nranks=mp_degree,
rank_id=mp_rank,
group=mp_group,
use_calc_stream=True)

else:
recv_partial(
tensor_recv_next.detach(),
tensor_recv_next,
src=1,
nranks=mp_degree,
rank_id=mp_rank,
group=_hcg.recv_next_group,
use_calc_stream=True)

allgather_partial(
tensor_recv_next.detach(),
tensor_recv_next,
nranks=mp_degree,
rank_id=mp_rank,
group=mp_group,
Expand Down

1 comment on commit cf0da31

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.