Skip to content

Commit

Permalink
[hybrid] [npu] fit npu nan/inf check (#35171)
Browse files Browse the repository at this point in the history
  • Loading branch information
FeixLiu authored Sep 2, 2021
1 parent 6e638d7 commit 67ed7e1
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion python/paddle/fluid/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5323,7 +5323,13 @@ def _accumulate_gradients_with_fuse(self, main_block, fp16, fused_size):
"copy_data": False,
"use_align": True,
"dtype": grads[0].dtype,
self._op_role_key: self._op_role.Backward
self._op_role_key: self._op_role.Backward,
# On npu, the nan/inf check login is different with gpu.
# If there are some not initialized sections in the fused var,
# and the value in those sections are nan/inf, it will trigger the nan/inf check.
# To avoid these problematic triggers, set constant is needed for npu
"set_constant": core.is_compiled_with_npu(),
"constant": float(0.0),
})
offset += 1
# For the gradient_merged_fused_var, given a init value during the coalesce op
Expand Down

0 comments on commit 67ed7e1

Please sign in to comment.