From 2783dd8a267215c1f7203f81b80aeb2af10972f0 Mon Sep 17 00:00:00 2001 From: Guillaume Fraux Date: Wed, 3 Jul 2024 14:12:34 +0200 Subject: [PATCH] Use the same defaul precision as torch.gradcheck --- python/metatensor-operations/tests/_gradcheck.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/metatensor-operations/tests/_gradcheck.py b/python/metatensor-operations/tests/_gradcheck.py index 6beaeab9d..4d5611acb 100644 --- a/python/metatensor-operations/tests/_gradcheck.py +++ b/python/metatensor-operations/tests/_gradcheck.py @@ -11,8 +11,8 @@ def check_finite_differences( *, parameter: str, displacement: float = 1e-6, - rtol: float = 1e-5, - atol: float = 1e-15, + rtol: float = 1e-3, + atol: float = 1e-9, ) -> None: """ Check that analytical gradients with respect to ``parameter`` in the