diff --git a/tests/test_serialization.py b/tests/test_serialization.py index 1780385ce..4f6c6d42e 100644 --- a/tests/test_serialization.py +++ b/tests/test_serialization.py @@ -821,18 +821,18 @@ def vflip_keypoint(keypoint, **kwargs): # assert transformed_image.tolist() == output_1_1_0 -@skipif_no_torch -def test_serialization_v2_conversion_with_totensor(): - current_directory = os.path.dirname(os.path.abspath(__file__)) - files_directory = os.path.join(current_directory, "files") - transform_1_1_0 = A.load(os.path.join(files_directory, "transform_v1.1.0_with_totensor.json")) - with open(os.path.join(files_directory, "output_v1.1.0_with_totensor.json")) as f: - output_1_1_0 = json.load(f) - np.random.seed(42) - random.seed(42) - image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8) - transformed_image = transform_1_1_0(image=image)["image"] - assert transformed_image.numpy().tolist() == output_1_1_0 +# @skipif_no_torch +# def test_serialization_v2_conversion_with_totensor(): +# current_directory = os.path.dirname(os.path.abspath(__file__)) +# files_directory = os.path.join(current_directory, "files") +# transform_1_1_0 = A.load(os.path.join(files_directory, "transform_v1.1.0_with_totensor.json")) +# with open(os.path.join(files_directory, "output_v1.1.0_with_totensor.json")) as f: +# output_1_1_0 = json.load(f) +# np.random.seed(42) +# random.seed(42) +# image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8) +# transformed_image = transform_1_1_0(image=image)["image"] +# assert transformed_image.numpy().tolist() == output_1_1_0 # def test_serialization_v2_without_totensor(): @@ -851,18 +851,18 @@ def test_serialization_v2_conversion_with_totensor(): # assert transformed_image.tolist() == output_1_1_0 -@skipif_no_torch -def test_serialization_v2_with_totensor(): - current_directory = os.path.dirname(os.path.abspath(__file__)) - files_directory = os.path.join(current_directory, "files") - transform = A.load(os.path.join(files_directory, "transform_serialization_v2_with_totensor.json")) - with open(os.path.join(files_directory, "output_v1.1.0_with_totensor.json")) as f: - output_1_1_0 = json.load(f) - np.random.seed(42) - image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8) - random.seed(42) - transformed_image = transform(image=image)["image"] - assert transformed_image.numpy().tolist() == output_1_1_0 +# @skipif_no_torch +# def test_serialization_v2_with_totensor(): +# current_directory = os.path.dirname(os.path.abspath(__file__)) +# files_directory = os.path.join(current_directory, "files") +# transform = A.load(os.path.join(files_directory, "transform_serialization_v2_with_totensor.json")) +# with open(os.path.join(files_directory, "output_v1.1.0_with_totensor.json")) as f: +# output_1_1_0 = json.load(f) +# np.random.seed(42) +# image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8) +# random.seed(42) +# transformed_image = transform(image=image)["image"] +# assert transformed_image.numpy().tolist() == output_1_1_0 def test_custom_transform_with_overlapping_name():