Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implementations of is_contiguous and make_contiguous #522

Draft
wants to merge 6 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/src/operations/reference/logic/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ Logic function
allclose() <allclose>
equal() <equal>
equal_metadata() <equal_metadata>
is_contiguous() <is_contiguous>
7 changes: 7 additions & 0 deletions docs/src/operations/reference/logic/is_contiguous.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
is_contiguous
=============

.. autofunction:: metatensor.is_contiguous

.. autofunction:: metatensor.is_contiguous_block

1 change: 1 addition & 0 deletions docs/src/operations/reference/manipulation/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ Manipulation operations
detach() <detach>
drop_blocks() <drop-blocks>
join() <join>
make_contiguous() <make-contiguous>
manipulate dimension <manipulate-dimension>
one_hot() <one-hot>
remove_gradients() <remove-gradients>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
make_contiguous
===============

.. autofunction:: metatensor.make_contiguous

.. autofunction:: metatensor.make_contiguous_block
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,16 @@
equal_metadata_raise,
equal_metadata_block_raise,
)
from .is_contiguous import ( # noqa: F401
is_contiguous,
is_contiguous_block,
)
from .join import join # noqa
from .lstsq import lstsq # noqa
from .make_contiguous import ( # noqa: F401
make_contiguous,
make_contiguous_block,
)
from .manipulate_dimension import ( # noqa
append_dimension,
insert_dimension,
Expand Down
36 changes: 36 additions & 0 deletions python/metatensor-operations/metatensor/operations/_dispatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,42 @@ def concatenate(arrays: List[TorchTensor], axis: int):
raise TypeError(UNKNOWN_ARRAY_TYPE)


def is_contiguous_array(array):
"""
Checks if a given array is contiguous.

In the case of numpy, C order is used for consistency with torch. As such,
only C-contiguity is checked.
"""
if isinstance(array, TorchTensor):
return array.is_contiguous()

elif isinstance(array, np.ndarray):
return array.flags["C_CONTIGUOUS"]

else:
raise TypeError(UNKNOWN_ARRAY_TYPE)


def make_contiguous_array(array):
"""
Returns a contiguous array.
It is equivalent of np.ascontiguousarray(array) and tensor.contiguous(). In
the case of numpy, C order is used for consistency with torch. As such, only
C-contiguity is checked.
"""
if isinstance(array, TorchTensor):
if array.is_contiguous():
return array
return array.contiguous()
elif isinstance(array, np.ndarray):
if array.flags["C_CONTIGUOUS"]:
return array
return np.ascontiguousarray(array)
else:
raise TypeError(UNKNOWN_ARRAY_TYPE)


def copy(array):
"""Returns a copy of ``array``.
The new data is not shared with the original array"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
from . import _dispatch
from ._backend import (
Labels,
TensorBlock,
TensorMap,
torch_jit_is_scripting,
torch_jit_script,
)


@torch_jit_script
def is_contiguous_block(block: TensorBlock) -> bool:
"""
Checks whether the values array and gradients values arrays (if present) of an input
:py:class:`TensorBlock` are contiguous.

Note that arrays of :py:class:`Labels` objects are not checked for contiguity.

:param block: the input :py:class:`TensorBlock`.

:return: bool, true if all values arrays contiguous, false otherwise.
"""
check_contiguous: bool = True
if not _dispatch.is_contiguous_array(block.values):
check_contiguous = False

for _param, gradient in block.gradients():
if not _dispatch.is_contiguous_array(gradient.values):
check_contiguous = False

return check_contiguous


@torch_jit_script
def is_contiguous(tensor: TensorMap) -> bool:
"""
Checks whether all values arrays and gradients values arrays (if present) in all
:py:class:`TensorBlock` of an input :py:class:`TensorMap` are contiguous.

Note that arrays of :py:class:`Labels` objects are not checked for contiguity.

:param tensor: the input :py:class:`TensorMap`.

:return: bool, true if all values arrays contiguous, false otherwise.
"""
check_contiguous: bool = True
for _key, block in tensor.items():
if not is_contiguous_block(block):
check_contiguous = False

return check_contiguous
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from . import _dispatch
from ._backend import (
Labels,
TensorBlock,
TensorMap,
torch_jit_is_scripting,
torch_jit_script,
)


@torch_jit_script
def make_contiguous_block(block: TensorBlock) -> TensorBlock:
"""
Returns a new :py:class:`TensorBlock` where the values and gradient values (if
present) arrays are mades to be contiguous.

:param block: the input :py:class:`TensorBlock`.

:return: a new :py:class:`TensorBlock` where the values and gradients arrays (if
present) are contiguous.
"""
contiguous_block = TensorBlock(
values=_dispatch.make_contiguous_array(block.values.copy()),
samples=block.samples,
components=block.components,
properties=block.properties,
)
for param, gradient in block.gradients():
new_gradient = TensorBlock(
values=_dispatch.make_contiguous_array(gradient.values.copy()),
samples=gradient.samples,
components=gradient.components,
properties=gradient.properties,
)
contiguous_block.add_gradient(param, new_gradient)

return contiguous_block


@torch_jit_script
def make_contiguous(tensor: TensorMap) -> TensorMap:
"""
Returns a new :py:class:`TensorMap` where all values and gradient values arrays are
mades to be contiguous.

:param tensor: the input :py:class:`TensorMap`.

:return: a new :py:class:`TensorMap` with the same data and metadata as ``tensor``
and contiguous values of ``tensor``.
"""
keys: Labels = tensor.keys
contiguous_blocks: List[TensorBlock] = []
for _key, block in tensor.items():
contiguous_block = make_contiguous_block(block)
contiguous_blocks.append(contiguous_block)

return TensorMap(keys=keys, blocks=contiguous_blocks)
70 changes: 70 additions & 0 deletions python/metatensor-operations/tests/is_contiguous.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import os

import pytest

import metatensor
from metatensor import TensorBlock, TensorMap


DATA_ROOT = os.path.join(os.path.dirname(__file__), "data")


@pytest.fixture
def tensor():
"""Loads a TensorMap from file for use in tests"""
return metatensor.load(
os.path.join(DATA_ROOT, "qm7-power-spectrum.npz"),
use_numpy=True,
)


@pytest.fixture
def incontiguous_tensor(tensor) -> TensorMap:
"""
Make a TensorMap non-contiguous by reversing the order of the samples/properties
rows/columns in all values and gradient blocks.
"""
keys = tensor.keys
new_blocks = []

for _key, block in tensor.items():
# Create a new TM with a non-contig array
new_block = _incontiguous_block(block)
new_blocks.append(new_block)

return TensorMap(keys=keys, blocks=new_blocks)


def _incontiguous_block(block: TensorBlock) -> TensorBlock:
"""
Make a non-contiguous block by reversing the order in both the main value block and
the gradient block(s).
"""
new_vals = block.values.copy()[::-1, ::-1]
new_block = TensorBlock(
values=new_vals,
samples=block.samples,
components=block.components,
properties=block.properties,
)
for param, gradient in block.gradients():
new_grad = gradient.values.copy()[::-1, ::-1]
new_gradient = TensorBlock(
values=new_grad,
samples=gradient.samples,
components=gradient.components,
properties=gradient.properties,
)
new_block.add_gradient(param, new_gradient)

return new_block


def test_is_contiguous_block(tensor):
assert metatensor.is_contiguous_block(tensor.block(0))
assert not metatensor.is_contiguous_block(_incontiguous_block(tensor.block(0)))


def test_is_contiguous(tensor, incontiguous_tensor):
assert metatensor.is_contiguous(tensor)
assert not metatensor.is_contiguous(incontiguous_tensor)
72 changes: 72 additions & 0 deletions python/metatensor-operations/tests/make_contiguous.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import os

import pytest

import metatensor
from metatensor import TensorBlock, TensorMap


DATA_ROOT = os.path.join(os.path.dirname(__file__), "data")


@pytest.fixture
def tensor():
"""Loads a TensorMap from file for use in tests"""
return metatensor.load(
os.path.join(DATA_ROOT, "qm7-power-spectrum.npz"),
use_numpy=True,
)


@pytest.fixture
def incontiguous_tensor(tensor) -> TensorMap:
"""
Make a TensorMap non-contiguous by reversing the order of the samples/properties
rows/columns in all values and gradient blocks.
"""
keys = tensor.keys
new_blocks = []

for _key, block in tensor.items():
# Create a new TM with a non-contig array
new_block = _incontiguous_block(block)
new_blocks.append(new_block)

return TensorMap(keys=keys, blocks=new_blocks)


def _incontiguous_block(block: TensorBlock) -> TensorBlock:
"""
Make a non-contiguous block by reversing the order in both the main value block and
the gradient block(s).
"""
new_vals = block.values.copy()[::-1, ::-1]
new_block = TensorBlock(
values=new_vals,
samples=block.samples,
components=block.components,
properties=block.properties,
)
for param, gradient in block.gradients():
new_grad = gradient.values.copy()[::-1, ::-1]
new_gradient = TensorBlock(
values=new_grad,
samples=gradient.samples,
components=gradient.components,
properties=gradient.properties,
)
new_block.add_gradient(param, new_gradient)

return new_block


def test_is_contiguous_block(tensor):
assert not metatensor.is_contiguous_block(_incontiguous_block(tensor.block(0)))
assert metatensor.is_contiguous_block(
metatensor.make_contiguous_block(_incontiguous_block(tensor.block(0)))
)


def test_is_contiguous(incontiguous_tensor):
assert not metatensor.is_contiguous(incontiguous_tensor)
assert metatensor.is_contiguous(metatensor.make_contiguous(incontiguous_tensor))
10 changes: 10 additions & 0 deletions python/metatensor-torch/tests/operations/contiguous.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import io

import torch

import metatensor.torch


def test_is_contiguous():
# TODO: write tests, used as a placeholder for now
assert True
Loading