Skip to content

Commit

Permalink
rgw/restore: s3tests to test restore object functionality.
Browse files Browse the repository at this point in the history
This tests are added to tests temporary restore, permanent restore and
read through resotre object functionality, this includes zonegroup
parameters and checks.

Signed-off-by: shreyanshjain7174 <[email protected]>
  • Loading branch information
shreyanshjain7174 committed Jan 9, 2025
1 parent ae8bebd commit 29ccf28
Show file tree
Hide file tree
Showing 4 changed files with 172 additions and 1 deletion.
1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ markers =
bucket_logging
checksum
cloud_transition
cloud_restore
encryption
fails_on_aws
fails_on_dbstore
Expand Down
7 changes: 6 additions & 1 deletion s3tests.conf.SAMPLE
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ secret_key = h7GhxuBLTrlhVUyxSPUKUV8r/2EI4ngqJxD7iBdBYLhwluN30JaT3Q==

## Lifecycle debug interval (default: 10)
#lc_debug_interval = 20
## Restore debug interval (default: 100)
#rgw_restore_debug_interval = 60

[s3 alt]
# alt display_name set in vstart.sh
Expand All @@ -71,7 +73,8 @@ access_key = NOPQRSTUVWXYZABCDEFG
secret_key = nopqrstuvwxyzabcdefghijklmnabcdefghijklm

#[s3 cloud]
## to run the testcases with "cloud_transition" attribute.
## to run the testcases with "cloud_transition" for transition
## and "cloud_restore" for restore attribute.
## Note: the waiting time may have to tweaked depending on
## the I/O latency to the cloud endpoint.

Expand All @@ -95,6 +98,8 @@ secret_key = nopqrstuvwxyzabcdefghijklmnabcdefghijklm

## Above configured cloud storage class config options
# retain_head_object = false
# allow_read_through = false # change it to enable read_through
# read_through_restore_days = 2
# target_storage_class = Target_SC
# target_path = cloud-bucket

Expand Down
24 changes: 24 additions & 0 deletions s3tests_boto3/functional/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,11 @@ def configure():
except (configparser.NoSectionError, configparser.NoOptionError):
config.lc_debug_interval = 10

try:
config.rgw_restore_debug_interval = int(cfg.get('s3 main',"rgw_restore_debug_interval"))
except (configparser.NoSectionError, configparser.NoOptionError):
config.rgw_restore_debug_interval = 100

config.alt_access_key = cfg.get('s3 alt',"access_key")
config.alt_secret_key = cfg.get('s3 alt',"secret_key")
config.alt_display_name = cfg.get('s3 alt',"display_name")
Expand Down Expand Up @@ -375,6 +380,11 @@ def get_cloud_config(cfg):
except (configparser.NoSectionError, configparser.NoOptionError):
config.cloud_retain_head_object = None

try:
config.allow_read_through = cfg.get('s3 cloud',"allow_read_through")
except (configparser.NoSectionError, configparser.NoOptionError):
config.allow_read_through = False

try:
config.cloud_target_path = cfg.get('s3 cloud',"target_path")
except (configparser.NoSectionError, configparser.NoOptionError):
Expand All @@ -389,6 +399,11 @@ def get_cloud_config(cfg):
config.cloud_regular_storage_class = cfg.get('s3 cloud', "storage_class")
except (configparser.NoSectionError, configparser.NoOptionError):
config.cloud_regular_storage_class = None

try:
config.read_through_restore_days = int(cfg.get('s3 cloud', "read_through_restore_days"))
except (configparser.NoSectionError, configparser.NoOptionError):
config.read_through_restore_days = 10


def get_client(client_config=None):
Expand Down Expand Up @@ -769,6 +784,9 @@ def get_cloud_storage_class():
def get_cloud_retain_head_object():
return config.cloud_retain_head_object

def get_allow_read_through():
return config.allow_read_through

def get_cloud_regular_storage_class():
return config.cloud_regular_storage_class

Expand All @@ -780,3 +798,9 @@ def get_cloud_target_storage_class():

def get_lc_debug_interval():
return config.lc_debug_interval

def get_restore_debug_interval():
return config.rgw_restore_debug_interval

def get_read_through_days():
return config.read_through_restore_days
141 changes: 141 additions & 0 deletions s3tests_boto3/functional/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,16 @@
get_svc_client,
get_cloud_storage_class,
get_cloud_retain_head_object,
get_allow_read_through,
get_cloud_regular_storage_class,
get_cloud_target_path,
get_cloud_target_storage_class,
get_cloud_client,
nuke_prefixed_buckets,
configured_storage_classes,
get_lc_debug_interval,
get_restore_debug_interval,
get_read_through_days,
)


Expand Down Expand Up @@ -9430,6 +9433,15 @@ def verify_object(client, bucket, key, content=None, sc=None):
body = _get_body(response)
assert body == content

def verify_transition(client, bucket, key, sc=None):
response = client.head_object(Bucket=bucket, Key=key)

# Iterate over the contents to find the StorageClass
if 'StorageClass' in response:
assert response['StorageClass'] == sc
else: # storage class should be STANDARD
assert 'STANDARD' == sc

# The test harness for lifecycle is configured to treat days as 10 second intervals.
@pytest.mark.lifecycle
@pytest.mark.lifecycle_transition
Expand Down Expand Up @@ -9727,6 +9739,135 @@ def test_lifecycle_cloud_transition_large_obj():
expire1_key1_str = prefix + keys[1]
verify_object(cloud_client, target_path, expire1_key1_str, data, target_sc)

@pytest.mark.lifecycle_transition
@pytest.mark.cloud_transition
@pytest.mark.cloud_restore
@pytest.mark.fails_on_aws
@pytest.mark.fails_on_dbstore
def test_restore_object_temporary():
cloud_sc = get_cloud_storage_class()
if cloud_sc is None:
pytest.skip('[s3 cloud] section missing cloud_storage_class')

bucket = get_new_bucket()
client = get_client()
key = 'test_restore_temp'
data = 'temporary restore data'

# Put object
client.put_object(Bucket=bucket, Key=key, Body=data)
verify_object(client, bucket, key, data)

# Transition object to cloud storage class
rules = [{'ID': 'rule1', 'Transitions': [{'Days': 1, 'StorageClass': cloud_sc}], 'Prefix': '', 'Status': 'Enabled'}]
lifecycle = {'Rules': rules}
client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle)

lc_interval = get_lc_debug_interval()
restore_interval = get_restore_debug_interval()
time.sleep(2 * lc_interval)

# Verify object is transitioned
verify_transition(client, bucket, key, cloud_sc)

# Restore object temporarily
client.restore_object(Bucket=bucket, Key=key, RestoreRequest={'Days': 2})

# Verify object is restored temporarily
verify_transition(client, bucket, key, cloud_sc)
response = client.head_object(Bucket=bucket, Key=key)
assert response['ContentLength'] == len(data)
time.sleep(2 * (restore_interval + lc_interval))

#verify object expired
response = client.head_object(Bucket=bucket, Key=key)
assert response['ContentLength'] == 0

@pytest.mark.lifecycle_transition
@pytest.mark.cloud_transition
@pytest.mark.cloud_restore
@pytest.mark.fails_on_aws
@pytest.mark.fails_on_dbstore
def test_restore_object_permanent():
cloud_sc = get_cloud_storage_class()
if cloud_sc is None:
pytest.skip('[s3 cloud] section missing cloud_storage_class')

bucket = get_new_bucket()
client = get_client()
key = 'test_restore_perm'
data = 'permanent restore data'

# Put object
client.put_object(Bucket=bucket, Key=key, Body=data)
verify_object(client, bucket, key, data)

# Transition object to cloud storage class
rules = [{'ID': 'rule1', 'Transitions': [{'Days': 1, 'StorageClass': cloud_sc}], 'Prefix': '', 'Status': 'Enabled'}]
lifecycle = {'Rules': rules}
client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle)

lc_interval = get_lc_debug_interval()
restore_interval = get_restore_debug_interval()
time.sleep(2 * lc_interval)

# Verify object is transitioned
verify_transition(client, bucket, key, cloud_sc)

# Restore object permanently
client.restore_object(Bucket=bucket, Key=key, RestoreRequest={})
time.sleep(2)
# Verify object is restored permanently
verify_transition(client, bucket, key, 'STANDARD')
response = client.head_object(Bucket=bucket, Key=key)
assert response['ContentLength'] == len(data)

@pytest.mark.lifecycle_transition
@pytest.mark.cloud_transition
@pytest.mark.cloud_restore
@pytest.mark.fails_on_aws
@pytest.mark.fails_on_dbstore
def test_read_through():
cloud_sc = get_cloud_storage_class()
if cloud_sc is None:
pytest.skip('[s3 cloud] section missing cloud_storage_class')

bucket = get_new_bucket()
client = get_client()
key = 'test_restore_readthrough'
data = 'restore data with readthrough'

# Put object
client.put_object(Bucket=bucket, Key=key, Body=data)
verify_object(client, bucket, key, data)

# Transition object to cloud storage class
rules = [{'ID': 'rule1', 'Transitions': [{'Days': 1, 'StorageClass': cloud_sc}], 'Prefix': '', 'Status': 'Enabled'}]
lifecycle = {'Rules': rules}
client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle)

lc_interval = get_lc_debug_interval()
restore_interval = get_read_through_days()
time.sleep(2 * lc_interval)

# Check the storage class after transitioning
verify_transition(client, bucket, key, cloud_sc)

# Restore the object using read_through request
allow_readthrough = get_allow_read_through()
if allow_readthrough:
response = client.get_object(Bucket=bucket, Key=key)
assert response['ContentLength'] == len(data)
time.sleep(2 * (restore_interval + lc_interval))
# verify object expired
response = client.head_object(Bucket=bucket, Key=key)
assert response['ContentLength'] == 0

else:
with assert_raises(ClientError) as e:
response = client.get_object(Bucket=bucket, Key=key)
assert e.exception.response['Error']['Code'] == '403'

@pytest.mark.encryption
@pytest.mark.fails_on_dbstore
def test_encrypted_transfer_1b():
Expand Down

0 comments on commit 29ccf28

Please sign in to comment.