From 1d413b8112826300a12d28f727a15d06e1efb0ea Mon Sep 17 00:00:00 2001 From: shreyanshjain7174 Date: Mon, 4 Nov 2024 07:10:21 -0500 Subject: [PATCH] rgw/restore: s3tests to test restore object functionality. This tests are added to tests temporary restore, permanent restore and read through resotre object functionality, this includes zonegroup parameters and checks. Signed-off-by: shreyanshjain7174 --- s3tests.conf.SAMPLE | 4 + s3tests_boto3/functional/__init__.py | 16 ++++ s3tests_boto3/functional/test_s3.py | 134 +++++++++++++++++++++++++++ 3 files changed, 154 insertions(+) diff --git a/s3tests.conf.SAMPLE b/s3tests.conf.SAMPLE index 3f0992aa8..b22422993 100644 --- a/s3tests.conf.SAMPLE +++ b/s3tests.conf.SAMPLE @@ -54,6 +54,8 @@ secret_key = h7GhxuBLTrlhVUyxSPUKUV8r/2EI4ngqJxD7iBdBYLhwluN30JaT3Q== ## Lifecycle debug interval (default: 10) #lc_debug_interval = 20 +## Restore debug interval (default: 100) +#rgw_restore_debug_interval = 60 [s3 alt] # alt display_name set in vstart.sh @@ -95,6 +97,8 @@ secret_key = nopqrstuvwxyzabcdefghijklmnabcdefghijklm ## Above configured cloud storage class config options # retain_head_object = false +# allow_read_through = false # change it to enable read_through +# read_through_restore_days = 2 # target_storage_class = Target_SC # target_path = cloud-bucket diff --git a/s3tests_boto3/functional/__init__.py b/s3tests_boto3/functional/__init__.py index 7ca874b4a..5887cdd8e 100644 --- a/s3tests_boto3/functional/__init__.py +++ b/s3tests_boto3/functional/__init__.py @@ -248,6 +248,11 @@ def configure(): except (configparser.NoSectionError, configparser.NoOptionError): config.lc_debug_interval = 10 + try: + config.rgw_restore_debug_interval = int(cfg.get('s3 main',"rgw_restore_debug_interval")) + except (configparser.NoSectionError, configparser.NoOptionError): + config.rgw_restore_debug_interval = 100 + config.alt_access_key = cfg.get('s3 alt',"access_key") config.alt_secret_key = cfg.get('s3 alt',"secret_key") config.alt_display_name = cfg.get('s3 alt',"display_name") @@ -375,6 +380,11 @@ def get_cloud_config(cfg): except (configparser.NoSectionError, configparser.NoOptionError): config.cloud_retain_head_object = None + try: + config.allow_read_through = cfg.get('s3 cloud',"allow_read_through") + except (configparser.NoSectionError, configparser.NoOptionError): + config.allow_read_through = False + try: config.cloud_target_path = cfg.get('s3 cloud',"target_path") except (configparser.NoSectionError, configparser.NoOptionError): @@ -769,6 +779,9 @@ def get_cloud_storage_class(): def get_cloud_retain_head_object(): return config.cloud_retain_head_object +def get_allow_read_through(): + return config.allow_read_through + def get_cloud_regular_storage_class(): return config.cloud_regular_storage_class @@ -780,3 +793,6 @@ def get_cloud_target_storage_class(): def get_lc_debug_interval(): return config.lc_debug_interval + +def get_restore_debug_interval(): + return config.rgw_restore_debug_interval diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index e80afe647..40409cb49 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -78,6 +78,7 @@ get_svc_client, get_cloud_storage_class, get_cloud_retain_head_object, + get_allow_read_through, get_cloud_regular_storage_class, get_cloud_target_path, get_cloud_target_storage_class, @@ -85,6 +86,7 @@ nuke_prefixed_buckets, configured_storage_classes, get_lc_debug_interval, + get_restore_debug_interval, ) @@ -9430,6 +9432,15 @@ def verify_object(client, bucket, key, content=None, sc=None): body = _get_body(response) assert body == content +def verify_transition(client, bucket, key, content=None, sc=None): + response = client.head_object(Bucket=bucket, Key=key) + + # Iterate over the contents to find the StorageClass + if 'StorageClass' in response: + assert response['StorageClass'] == sc + else: # storage class should be STANDARD + assert 'STANDARD' == sc + # The test harness for lifecycle is configured to treat days as 10 second intervals. @pytest.mark.lifecycle @pytest.mark.lifecycle_transition @@ -9727,6 +9738,129 @@ def test_lifecycle_cloud_transition_large_obj(): expire1_key1_str = prefix + keys[1] verify_object(cloud_client, target_path, expire1_key1_str, data, target_sc) +@pytest.mark.lifecycle_transition +@pytest.mark.cloud_transition +@pytest.mark.fails_on_aws +@pytest.mark.fails_on_dbstore +def test_restore_object_temporary(): + cloud_sc = get_cloud_storage_class() + if cloud_sc is None: + pytest.skip('[s3 cloud] section missing cloud_storage_class') + + bucket = get_new_bucket() + client = get_client() + key = 'test_restore_temp' + data = 'temporary restore data' + + # Put object + client.put_object(Bucket=bucket, Key=key, Body=data) + verify_object(client, bucket, key, data) + + # Transition object to cloud storage class + rules = [{'ID': 'rule1', 'Transitions': [{'Days': 1, 'StorageClass': cloud_sc}], 'Prefix': '', 'Status': 'Enabled'}] + lifecycle = {'Rules': rules} + client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle) + + lc_interval = get_lc_debug_interval() + restore_interval = get_restore_debug_interval() + time.sleep(2 * lc_interval) + + # Verify object is transitioned + verify_transition(client, bucket, key, data, cloud_sc) + + # Restore object temporarily + client.restore_object(Bucket=bucket, Key=key, RestoreRequest={'Days': 2}) + + # Verify object is restored temporarily + verify_transition(client, bucket, key, data, cloud_sc) + response = client.head_object(Bucket=bucket, Key=key) + assert response['ContentLength'] == len(data) + time.sleep(2 * (restore_interval + lc_interval)) + + #verify object expired + # response = client.head_object(Bucket=bucket, Key=key) + verify_object(client, bucket, key, data, cloud_sc) + +@pytest.mark.lifecycle_transition +@pytest.mark.cloud_transition +@pytest.mark.fails_on_aws +@pytest.mark.fails_on_dbstore +def test_restore_object_permanent(): + cloud_sc = get_cloud_storage_class() + if cloud_sc is None: + pytest.skip('[s3 cloud] section missing cloud_storage_class') + + bucket = get_new_bucket() + client = get_client() + key = 'test_restore_perm' + data = 'permanent restore data' + + # Put object + client.put_object(Bucket=bucket, Key=key, Body=data) + verify_object(client, bucket, key, data) + + # Transition object to cloud storage class + rules = [{'ID': 'rule1', 'Transitions': [{'Days': 1, 'StorageClass': cloud_sc}], 'Prefix': '', 'Status': 'Enabled'}] + lifecycle = {'Rules': rules} + client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle) + + lc_interval = get_lc_debug_interval() + restore_interval = get_restore_debug_interval() + time.sleep(2 * lc_interval) + + # Verify object is transitioned + verify_transition(client, bucket, key, data, cloud_sc) + + # Restore object permanently + client.restore_object(Bucket=bucket, Key=key, RestoreRequest={}) + + # Verify object is restored permanently + verify_transition(client, bucket, key, data, 'STANDARD') + response = client.head_object(Bucket=bucket, Key=key) + assert response['ContentLength'] == len(data) + +@pytest.mark.lifecycle_transition +@pytest.mark.cloud_transition +@pytest.mark.fails_on_aws +@pytest.mark.fails_on_dbstore +def test_read_through(): + cloud_sc = get_cloud_storage_class() + if cloud_sc is None: + pytest.skip('[s3 cloud] section missing cloud_storage_class') + + bucket = get_new_bucket() + client = get_client() + key = 'test_restore_readthrough' + data = 'restore data with readthrough' + # sc = "CLOUDTIER" + + # Put object + client.put_object(Bucket=bucket, Key=key, Body=data) + verify_object(client, bucket, key, data) + + # Transition object to cloud storage class + rules = [{'ID': 'rule1', 'Transitions': [{'Days': 1, 'StorageClass': cloud_sc}], 'Prefix': '', 'Status': 'Enabled'}] + lifecycle = {'Rules': rules} + client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle) + + lc_interval = get_lc_debug_interval() + restore_interval = get_restore_debug_interval() + time.sleep(2 * lc_interval) + + # Check the storage class after transitioning + verify_transition(client, bucket, key, sc=cloud_sc) + + # Restore the object using read_through request + allow_readthrough = get_allow_read_through() + if allow_readthrough: + response = client.get_object(Bucket=bucket, Key=key) + assert response['ContentLength'] == len(data) + + else: + with assert_raises(ClientError) as e: + response = client.get_object(Bucket=bucket, Key=key) + assert e.exception.response['Error']['Code'] == '403' + @pytest.mark.encryption @pytest.mark.fails_on_dbstore def test_encrypted_transfer_1b():