diff --git a/python/paddle/autograd/ir_backward.py b/python/paddle/autograd/ir_backward.py index 672a6fd8dcc66..8f7b779bbe6f1 100644 --- a/python/paddle/autograd/ir_backward.py +++ b/python/paddle/autograd/ir_backward.py @@ -15,6 +15,7 @@ from __future__ import annotations import logging +import warnings import paddle.pir from paddle.autograd.backward_utils import ( @@ -166,8 +167,8 @@ def prepare_grad_outputs(grad_outputs, outputs, state): % (i, str(grad.shape), i, str(output.shape)) ) if output.dtype != grad.dtype: - raise ValueError( - "The dtype of grad_output[%d] %s should be the same as the dtype of output[%d] %s" + warnings.warn( + "The dtype of grad_output[%d] %s is not same as the dtype of output[%d] %s" % (i, str(grad.dtype), i, str(output.dtype)) ) feedop = grad.get_defining_op() diff --git a/test/deprecated/book/CMakeLists.txt b/test/deprecated/book/CMakeLists.txt index 8a5589856d073..1f904d38940b0 100644 --- a/test/deprecated/book/CMakeLists.txt +++ b/test/deprecated/book/CMakeLists.txt @@ -9,7 +9,8 @@ foreach(src ${TEST_OPS}) py_test(${src} SRCS ${src}.py) set_tests_properties(${src} PROPERTIES FIXTURES_SETUP ${src}_infer_model) endforeach() -set_tests_properties(test_word2vec_book PROPERTIES TIMEOUT 120) -set_tests_properties(test_recognize_digits PROPERTIES TIMEOUT 120) -set_tests_properties(test_image_classification PROPERTIES TIMEOUT 200) -set_tests_properties(test_fit_a_line PROPERTIES TIMEOUT 120) +set_tests_properties(test_word2vec_book_deprecated PROPERTIES TIMEOUT 120) +set_tests_properties(test_recognize_digits_deprecated PROPERTIES TIMEOUT 120) +set_tests_properties(test_image_classification_deprecated PROPERTIES TIMEOUT + 200) +set_tests_properties(test_fit_a_line_deprecated PROPERTIES TIMEOUT 120) diff --git a/test/deprecated/book/test_fit_a_line.py b/test/deprecated/book/test_fit_a_line_deprecated.py similarity index 100% rename from test/deprecated/book/test_fit_a_line.py rename to test/deprecated/book/test_fit_a_line_deprecated.py diff --git a/test/deprecated/book/test_image_classification.py b/test/deprecated/book/test_image_classification_deprecated.py similarity index 100% rename from test/deprecated/book/test_image_classification.py rename to test/deprecated/book/test_image_classification_deprecated.py diff --git a/test/deprecated/book/test_recognize_digits.py b/test/deprecated/book/test_recognize_digits_deprecated.py similarity index 100% rename from test/deprecated/book/test_recognize_digits.py rename to test/deprecated/book/test_recognize_digits_deprecated.py diff --git a/test/deprecated/book/test_recommender_system.py b/test/deprecated/book/test_recommender_system_deprecated.py similarity index 100% rename from test/deprecated/book/test_recommender_system.py rename to test/deprecated/book/test_recommender_system_deprecated.py diff --git a/test/deprecated/book/test_word2vec_book.py b/test/deprecated/book/test_word2vec_book_deprecated.py similarity index 100% rename from test/deprecated/book/test_word2vec_book.py rename to test/deprecated/book/test_word2vec_book_deprecated.py diff --git a/test/deprecated/prim/prim/vjp/CMakeLists.txt b/test/deprecated/prim/prim/vjp/CMakeLists.txt index d71096db0a142..1bed0af20ce0b 100644 --- a/test/deprecated/prim/prim/vjp/CMakeLists.txt +++ b/test/deprecated/prim/prim/vjp/CMakeLists.txt @@ -8,5 +8,4 @@ foreach(TEST_OP ${TEST_OPS}) py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS}) endforeach() -add_subdirectory(eager) add_subdirectory(static) diff --git a/test/deprecated/prim/prim/vjp/eager/CMakeLists.txt b/test/deprecated/prim/prim/vjp/eager/CMakeLists.txt deleted file mode 100644 index 863a484c466f1..0000000000000 --- a/test/deprecated/prim/prim/vjp/eager/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -file( - GLOB TEST_OPS - RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" - "test_*.py") -string(REPLACE ".py" "" TEST_OPS "${TEST_OPS}") -set(GC_ENVS FLAGS_eager_delete_tensor_gb=0.0) - -foreach(TEST_OP ${TEST_OPS}) - py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS}) -endforeach() diff --git a/test/deprecated/prim/prim/vjp/static/CMakeLists.txt b/test/deprecated/prim/prim/vjp/static/CMakeLists.txt index 45977c90e6a14..5431104fd925f 100644 --- a/test/deprecated/prim/prim/vjp/static/CMakeLists.txt +++ b/test/deprecated/prim/prim/vjp/static/CMakeLists.txt @@ -9,7 +9,6 @@ foreach(TEST_OP ${TEST_OPS}) py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS}) endforeach() -set_tests_properties(test_comp_tanh_grad PROPERTIES TIMEOUT 60) set_tests_properties(test_comp_div_grad PROPERTIES TIMEOUT 60) set_tests_properties(test_comp_add_grad PROPERTIES TIMEOUT 60) set_tests_properties(test_comp_sub_grad PROPERTIES TIMEOUT 60) diff --git a/test/legacy_test/op_test.py b/test/legacy_test/op_test.py index 4c3c201c8afe3..4f256a5ce9c3b 100644 --- a/test/legacy_test/op_test.py +++ b/test/legacy_test/op_test.py @@ -1960,7 +1960,9 @@ def check_inplace_output_with_place( if getattr(self, "no_need_check_inplace", False): return - if os.getenv("FLAGS_enable_pir_in_executor"): + if os.getenv("FLAGS_enable_pir_in_executor") or os.getenv( + "FLAGS_enable_pir_api" + ): return has_infer_inplace = base.core.has_infer_inplace(self.op_type) @@ -3119,18 +3121,19 @@ def check_grad_with_place( core._set_prim_all_enabled(False) core.set_prim_eager_enabled(False) if check_prim: - self._check_grad_helper() - prim_grad_checker = PrimGradChecker( - self, - place, - inputs_to_check, - output_names, - no_grad_set, - user_defined_grad_outputs, - ) - prim_grad_checker.check() - # Support operators which are not in the NO_FP64_CHECK_GRAD_OP_LIST list can be test prim with fp32 - self.__class__.check_prim = True + with paddle.pir_utils.OldIrGuard(): + self._check_grad_helper() + prim_grad_checker = PrimGradChecker( + self, + place, + inputs_to_check, + output_names, + no_grad_set, + user_defined_grad_outputs, + ) + prim_grad_checker.check() + # Support operators which are not in the NO_FP64_CHECK_GRAD_OP_LIST list can be test prim with fp32 + self.__class__.check_prim = True if check_prim_pir: with paddle.pir_utils.IrGuard(): diff --git a/test/deprecated/prim/prim/vjp/eager/test_comp_eager_cast_grad.py b/test/prim/prim/vjp/eager/test_comp_eager_cast_grad.py similarity index 84% rename from test/deprecated/prim/prim/vjp/eager/test_comp_eager_cast_grad.py rename to test/prim/prim/vjp/eager/test_comp_eager_cast_grad.py index 6547352b2b001..11fb24f007df4 100644 --- a/test/deprecated/prim/prim/vjp/eager/test_comp_eager_cast_grad.py +++ b/test/prim/prim/vjp/eager/test_comp_eager_cast_grad.py @@ -67,11 +67,16 @@ def desired(primal, cotangent): actual = actual(self.primal, self.cotangent) desired = desired(self.primal, self.cotangent) - from paddle.base.data_feeder import _PADDLE_DTYPE_2_NUMPY_DTYPE + from paddle.base.data_feeder import _PADDLE_PIR_DTYPE_2_NUMPY_DTYPE - self.assertEqual( - _PADDLE_DTYPE_2_NUMPY_DTYPE[actual[0].dtype], desired.dtype - ) + if actual[0].dtype in _PADDLE_PIR_DTYPE_2_NUMPY_DTYPE.keys(): + TO_NUMPY_DTYPE = _PADDLE_PIR_DTYPE_2_NUMPY_DTYPE + else: + from paddle.base.data_feeder import _PADDLE_DTYPE_2_NUMPY_DTYPE + + TO_NUMPY_DTYPE = _PADDLE_DTYPE_2_NUMPY_DTYPE + + self.assertEqual(TO_NUMPY_DTYPE[actual[0].dtype], desired.dtype) np.testing.assert_allclose( actual=actual[0], desired=desired, diff --git a/test/deprecated/prim/prim/vjp/eager/test_comp_eager_pow_grad.py b/test/prim/prim/vjp/eager/test_comp_eager_pow_grad.py similarity index 100% rename from test/deprecated/prim/prim/vjp/eager/test_comp_eager_pow_grad.py rename to test/prim/prim/vjp/eager/test_comp_eager_pow_grad.py diff --git a/test/prim/prim/vjp/static/CMakeLists.txt b/test/prim/prim/vjp/static/CMakeLists.txt index fbf58b6b0b3a7..593d75174b38e 100644 --- a/test/prim/prim/vjp/static/CMakeLists.txt +++ b/test/prim/prim/vjp/static/CMakeLists.txt @@ -10,3 +10,4 @@ foreach(TEST_OP ${TEST_OPS}) endforeach() set_tests_properties(test_comp_sum_grad PROPERTIES TIMEOUT 60) +set_tests_properties(test_comp_tanh_grad PROPERTIES TIMEOUT 60) diff --git a/test/deprecated/prim/prim/vjp/static/test_comp_cast_grad.py b/test/prim/prim/vjp/static/test_comp_cast_grad.py similarity index 82% rename from test/deprecated/prim/prim/vjp/static/test_comp_cast_grad.py rename to test/prim/prim/vjp/static/test_comp_cast_grad.py index a997f9a87d408..0a6f9f4388213 100644 --- a/test/deprecated/prim/prim/vjp/static/test_comp_cast_grad.py +++ b/test/prim/prim/vjp/static/test_comp_cast_grad.py @@ -19,7 +19,7 @@ import parameterized as param import paddle -from paddle.base import core, framework +from paddle.base import core def apply_to_static(net, use_cinn): @@ -88,27 +88,6 @@ def train(self, use_prim, use_cinn): return res - def test_cinn(self): - paddle.disable_static() - use_cinn = True - if isinstance( - framework._current_expected_place(), framework.core.CPUPlace - ): - # TODO(jiabin): CINN will crashed in this case open it when fixed - use_cinn = False - - dy_res = self.train(use_prim=False, use_cinn=False) - comp_st_cinn_res = self.train(use_prim=True, use_cinn=use_cinn) - - for i in range(len(dy_res)): - np.testing.assert_allclose( - comp_st_cinn_res[i].numpy(), - dy_res[i].numpy(), - rtol=1e-15, - atol=1e-15, - ) - paddle.enable_static() - def test_cast_grad_comp(self): core._set_prim_backward_enabled(True) @@ -124,10 +103,14 @@ def actual(primal, cotangent): x_cotangent = paddle.static.gradients(y, x, v) exe = paddle.static.Executor() exe.run(sp) + if paddle.framework.in_pir_mode(): + fetch_list = mp.blocks[0].ops[-1].result(0) + else: + fetch_list = mp.blocks[0].ops[-1].output('Out')[0] return exe.run( program=mp, feed={'primal': primal, 'cotangent': cotangent}, - fetch_list=mp.blocks[0].ops[-1].output('Out')[0], + fetch_list=fetch_list, )[0] def desired(primal, cotangent): diff --git a/test/deprecated/prim/prim/vjp/static/test_comp_reshape_grad.py b/test/prim/prim/vjp/static/test_comp_reshape_grad.py similarity index 87% rename from test/deprecated/prim/prim/vjp/static/test_comp_reshape_grad.py rename to test/prim/prim/vjp/static/test_comp_reshape_grad.py index 0d7b3d363d266..4365193628e71 100644 --- a/test/deprecated/prim/prim/vjp/static/test_comp_reshape_grad.py +++ b/test/prim/prim/vjp/static/test_comp_reshape_grad.py @@ -105,28 +105,9 @@ def train(self, use_prim, use_cinn): return res - def test_cinn(self): - paddle.disable_static() - use_cinn = True - if isinstance( - framework._current_expected_place(), framework.core.CPUPlace - ): - # TODO(jiabin): CINN will crashed in this case open it when fixed - use_cinn = False - - dy_res = self.train(use_prim=False, use_cinn=False) - comp_st_cinn_res = self.train(use_prim=True, use_cinn=use_cinn) - - for i in range(len(dy_res)): - np.testing.assert_allclose( - comp_st_cinn_res[i].numpy(), - dy_res[i].numpy(), - rtol=1e-7, - atol=1e-7, - ) + def test_reshape_grad_comp(self): paddle.enable_static() - def test_reshape_grad_comp(self): def actual(primal, shape, cotangent): core._set_prim_backward_enabled(True) mp, sp = paddle.static.Program(), paddle.static.Program() @@ -143,7 +124,7 @@ def actual(primal, shape, cotangent): return exe.run( program=mp, feed={'primal': primal, 'cotangent': cotangent}, - fetch_list=[x_cotangent[0].name], + fetch_list=[x_cotangent[0]], )[0] def desired(primal, shape, cotangent): @@ -162,7 +143,7 @@ def desired(primal, shape, cotangent): return exe.run( program=mp, feed={'primal': primal, 'cotangent': cotangent}, - fetch_list=[x_cotangent[0].name], + fetch_list=[x_cotangent[0]], )[0] if (self.dtype == np.float16) and isinstance( @@ -178,6 +159,7 @@ def desired(primal, shape, cotangent): atol=self.rtol, ) core._set_prim_backward_enabled(False) + paddle.disable_static() if __name__ == '__main__': diff --git a/test/deprecated/prim/prim/vjp/static/test_comp_tanh_grad.py b/test/prim/prim/vjp/static/test_comp_tanh_grad.py similarity index 88% rename from test/deprecated/prim/prim/vjp/static/test_comp_tanh_grad.py rename to test/prim/prim/vjp/static/test_comp_tanh_grad.py index 15a88c9930569..a30dd19164e4d 100644 --- a/test/deprecated/prim/prim/vjp/static/test_comp_tanh_grad.py +++ b/test/prim/prim/vjp/static/test_comp_tanh_grad.py @@ -69,21 +69,9 @@ def train(self, use_prim, use_cinn): return res - def test_cinn(self): - paddle.disable_static() - dy_res = self.train(use_prim=False, use_cinn=False) - comp_st_cinn_res = self.train(use_prim=True, use_cinn=True) - - for i in range(len(dy_res)): - np.testing.assert_allclose( - comp_st_cinn_res[i].numpy(), - dy_res[i].numpy(), - rtol=1e-7, - atol=1e-7, - ) + def test_tanh_grad_comp(self): paddle.enable_static() - def test_tanh_grad_comp(self): def actual(primal, cotangent): mp, sp = paddle.static.Program(), paddle.static.Program() with paddle.static.program_guard(mp, sp): @@ -99,7 +87,7 @@ def actual(primal, cotangent): return exe.run( program=mp, feed={'primal': primal, 'cotangent': cotangent}, - fetch_list=[x_cotangent[0].name], + fetch_list=[x_cotangent[0]], )[0] def desired(primal, cotangent): @@ -112,6 +100,7 @@ def desired(primal, cotangent): atol=0, ) core._set_prim_backward_enabled(False) + paddle.disable_static() if __name__ == '__main__':