Skip to content

Commit

Permalink
Temporarily fix bug for backward tanspiler when using parallel_do ope…
Browse files Browse the repository at this point in the history
…rator.
  • Loading branch information
qingqing01 committed Mar 21, 2018
1 parent 7c041e4 commit 39e92b9
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 4 deletions.
16 changes: 14 additions & 2 deletions python/paddle/fluid/backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,16 +307,28 @@ def _append_backward_ops_(block,
sub_block = program.block(op.block_attr("sub_block"))
grad_sub_block = program.create_block()
grad_sub_block.set_forward_block_idx(sub_block.idx)

all_vars = op.block.vars
target_vars = [all_vars[name] for name in op.output_arg_names]
no_grad_set = copy.copy(no_grad_dict[sub_block.idx])
new_no_grad_dict = _get_stop_gradients_(sub_block.program)
new_no_grad_dict[0].update(map(_append_grad_suffix_, no_grad_set))
block_no_grad_set = set(
map(_strip_grad_suffix_, new_no_grad_dict[0]))
op_path = _find_op_path_(sub_block, target_vars, [],
block_no_grad_set)
no_grad_dict[0].update(map(_append_grad_suffix_, block_no_grad_set))

cb = _callback_lookup_(op)
if cb is not None:
if callbacks is None:
new_callbacks = [cb]
else:
new_callbacks = callbacks + [_callback_lookup_(op)]
_append_backward_ops_(sub_block, sub_block.ops, grad_sub_block,
_append_backward_ops_(sub_block, op_path, grad_sub_block,
no_grad_dict, grad_to_var, new_callbacks)
else:
_append_backward_ops_(sub_block, sub_block.ops, grad_sub_block,
_append_backward_ops_(sub_block, op_path, grad_sub_block,
no_grad_dict, grad_to_var, callbacks)

program.rollback()
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/layers/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,13 +129,11 @@ class number, M is number of bounding boxes. For each category
prior_box_var=prior_box_var,
target_box=loc,
code_type='decode_center_size')

old_shape = scores.shape
scores = ops.reshape(x=scores, shape=(-1, old_shape[-1]))
scores = nn.softmax(input=scores)
scores = ops.reshape(x=scores, shape=old_shape)
scores = nn.transpose(scores, perm=[0, 2, 1])

nmsed_outs = helper.create_tmp_variable(dtype=decoded_box.dtype)
helper.append_op(
type="multiclass_nms",
Expand Down Expand Up @@ -695,6 +693,8 @@ def _prior_box_(input,
outputs={"Boxes": box,
"Variances": var},
attrs=attrs, )
box.stop_gradient = True
var.stop_gradient = True
return box, var

def _reshape_with_axis_(input, axis=1):
Expand Down

0 comments on commit 39e92b9

Please sign in to comment.