-
Notifications
You must be signed in to change notification settings - Fork 16
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add testing utilities to perform finite difference for operations
- Add finite difference test for add operation - Add dispatch functions to add tests for torch array backend Co-authored-by: Divya Suman <[email protected]>
- Loading branch information
1 parent
111f0f9
commit 0246ca9
Showing
6 changed files
with
236 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
4 changes: 4 additions & 0 deletions
4
python/metatensor-operations/metatensor/operations/_testing/__init__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
from ._creation_operation import cartesian_cubic, cartesian_linear | ||
from ._grad import finite_differences | ||
|
||
__all__ = ["cartesian_cubic", "cartesian_linear", "finite_differences"] |
93 changes: 93 additions & 0 deletions
93
python/metatensor-operations/metatensor/operations/_testing/_creation_operation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
from typing import Union | ||
|
||
import numpy as np | ||
|
||
from .. import _dispatch | ||
from .._classes import Labels, TensorBlock, TensorMap | ||
from .._dispatch import TorchTensor | ||
from ..block_from_array import block_from_array | ||
|
||
|
||
def cartesian_cubic( | ||
cartesian_vector: Union[np.ndarray, TorchTensor], compute_grad: bool = False | ||
) -> TensorMap: | ||
""" | ||
Creates a tensor map from a set of Cartesian vectors together with gradients if | ||
:param compute_grad: is `True` according to the function: | ||
.. math:: | ||
f(x, y, z) = x^3 + y^3 + z^3 | ||
\\nabla f = (3x^2, 3y^2, 3z^2) | ||
:param cartesian_vector: Set of Cartesian vectors with shape (n_samples, 3) | ||
:param compute_grad: Specifies if the returned tensor map should contain the | ||
gradients | ||
""" | ||
|
||
cartesian_vector_cubic = cartesian_vector**3 | ||
values = _dispatch.sum(cartesian_vector_cubic, axis=1).reshape(-1, 1) | ||
if compute_grad: | ||
values_grad = _dispatch.zeros_like(cartesian_vector, (len(values), 3, 1)) | ||
values_grad[:, 0] = 3 * cartesian_vector[:, 0:1] ** 2 | ||
values_grad[:, 1] = 3 * cartesian_vector[:, 1:2] ** 2 | ||
values_grad[:, 2] = 3 * cartesian_vector[:, 2:3] ** 2 | ||
|
||
block = block_from_array(values) | ||
if compute_grad: | ||
block.add_gradient( | ||
parameter="positions", | ||
gradient=TensorBlock( | ||
values=values_grad, | ||
samples=Labels.range("sample", len(values)), | ||
components=[Labels.range("cartesian", 3)], | ||
properties=block.properties, | ||
), | ||
) | ||
return TensorMap(Labels.range("_", 1), [block]) | ||
|
||
|
||
def cartesian_linear( | ||
cartesian_vector: Union[np.ndarray, TorchTensor], compute_grad: bool = False | ||
) -> TensorMap: | ||
""" | ||
Creates a tensor map from a set of Cartesian vectors together with gradients if | ||
:param compute_grad: is `True` according to the function: | ||
.. math:: | ||
f(x, y, z) = 3x + 2y + 8*z + 4 | ||
\\nabla f = (3, 2, 8) | ||
:param cartesian_vector: Set of Cartesian vectors with shape (n_samples, 3) | ||
:param compute_grad: Specifies if the returned tensor map should contain the | ||
gradients | ||
""" | ||
|
||
cartesian_vector_linear = ( | ||
3 * cartesian_vector[:, 0] | ||
+ 2 * cartesian_vector[:, 1] | ||
+ 8 * cartesian_vector[:, 2] | ||
+ 4 | ||
) | ||
values = cartesian_vector_linear.reshape(-1, 1) | ||
if compute_grad: | ||
values_grad = _dispatch.zeros_like(cartesian_vector, (len(values), 3, 1)) | ||
values_grad[:, 0] = 3 * _dispatch.ones_like(cartesian_vector, (len(values), 1)) | ||
values_grad[:, 1] = 2 * _dispatch.ones_like(cartesian_vector, (len(values), 1)) | ||
values_grad[:, 2] = 8 * _dispatch.ones_like(cartesian_vector, (len(values), 1)) | ||
|
||
block = block_from_array(values) | ||
if compute_grad: | ||
block.add_gradient( | ||
parameter="positions", | ||
gradient=TensorBlock( | ||
values=values_grad, | ||
samples=Labels.range("sample", len(values)), | ||
components=[Labels.range("cartesian", 3)], | ||
properties=block.properties, | ||
), | ||
) | ||
return TensorMap(Labels.range("_", 1), [block]) |
78 changes: 78 additions & 0 deletions
78
python/metatensor-operations/metatensor/operations/_testing/_grad.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
from typing import Callable, Union | ||
|
||
import numpy as np | ||
from numpy.testing import assert_allclose | ||
|
||
from .. import _dispatch | ||
from .._classes import TensorMap | ||
from .._dispatch import TorchTensor | ||
|
||
|
||
def finite_differences( | ||
function: Callable[[Union[np.ndarray, TorchTensor], bool], TensorMap], | ||
input_array: Union[np.ndarray, TorchTensor], | ||
parameter: str = "positions", | ||
displacement: float = 1e-6, | ||
rtol: float = 1e-5, | ||
atol: float = 1e-16, | ||
) -> None: | ||
""" | ||
Check that analytical gradients with respect to the :param parameter: agree with a | ||
finite difference calculation of the gradients. The callable must be able to return | ||
the analtyical gradients optionally if input argument `compute_grad` is true so it | ||
can be tested here. The dimension of the gradients are supposed to be in the | ||
components. For example if the gradients are taken with respect to Cartesian | ||
coordinates the :param function: outputs a tensor map of gradients_with 3 | ||
components. | ||
:param function: a function that outputs a tensor map (with gradients if specified | ||
by input parameter `compute_grad`) from the :param input_array:. | ||
:param input_array: an input for which the analytical and numerical gradients are | ||
tested | ||
:param parameter: the parameter of the gradient that is checked | ||
:param displacement: distance each atom will be displaced in each direction when | ||
computing finite differences | ||
:param max_relative: Maximal relative error. ``10 * displacement`` is a good | ||
starting point | ||
:param atol: Threshold below which all values are considered zero. This should be | ||
very small (1e-16) to prevent false positives (if all values & gradients are | ||
below that threshold, tests will pass even with wrong gradients) | ||
:raises AssertionError: if the two gradients are not equal up to specified precision | ||
""" | ||
reference = function(input_array, compute_grad=True) | ||
dim_gradients = len(reference[0].gradient(parameter).components) | ||
for spatial in range(dim_gradients): | ||
input_pos = _dispatch.copy(input_array) | ||
input_pos[:, spatial] += displacement / 2 | ||
updated_pos = function(input_pos) | ||
|
||
input_neg = _dispatch.copy(input_array) | ||
input_neg[:, spatial] -= displacement / 2 | ||
updated_neg = function(input_neg) | ||
|
||
assert updated_pos.keys == reference.keys | ||
assert updated_neg.keys == reference.keys | ||
|
||
for key, block in reference.items(): | ||
gradients = block.gradient(parameter) | ||
|
||
block_pos = updated_pos.block(key) | ||
block_neg = updated_neg.block(key) | ||
|
||
for gradient_i, sample_labels in enumerate(gradients.samples): | ||
sample_i = sample_labels[0] | ||
|
||
# check that the sample is the same in both descriptors | ||
assert block_pos.samples[sample_i] == block.samples[sample_i] | ||
assert block_neg.samples[sample_i] == block.samples[sample_i] | ||
|
||
value_pos = block_pos.values[sample_i] | ||
value_neg = block_neg.values[sample_i] | ||
gradient = gradients.values[gradient_i, spatial] | ||
|
||
assert value_pos.shape == gradient.shape | ||
assert value_neg.shape == gradient.shape | ||
|
||
finite_difference = (value_pos - value_neg) / displacement | ||
|
||
assert_allclose(finite_difference, gradient, rtol=rtol, atol=atol) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters