Skip to content

Commit

Permalink
Merge pull request #7122 from shirady/encoding-list-objects-s3-test
Browse files Browse the repository at this point in the history
add encoding url to list objects, list multi-part uploads and list versions
  • Loading branch information
shirady authored Dec 21, 2022
2 parents ac07f39 + 25fd73a commit 09dc819
Show file tree
Hide file tree
Showing 5 changed files with 138 additions and 101 deletions.
78 changes: 43 additions & 35 deletions src/endpoint/s3/ops/s3_get_bucket.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,13 @@ const S3Error = require('../s3_errors').S3Error;
const s3_utils = require('../s3_utils');

/**
* list objects and list objects V2:
* https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjects.html
* https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html
*
* note: the original documentation was in the below link:
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
* (but anyway it is permanently redirected to list object link above)
*/
async function get_bucket(req) {

Expand Down Expand Up @@ -35,43 +41,45 @@ async function get_bucket(req) {
}
const reply = await req.object_sdk.list_objects(params);

const field_encoder = s3_utils.get_response_field_encoder(req);

return {
ListBucketResult: [{
Name: req.params.bucket,
Prefix: req.query.prefix,
Delimiter: req.query.delimiter || undefined,
MaxKeys: max_keys_received,
IsTruncated: reply.is_truncated,
'Encoding-Type': req.query['encoding-type'],
...(list_type === '2' ? {
ContinuationToken: cont_tok,
StartAfter: start_after,
KeyCount: reply.objects.length + reply.common_prefixes.length,
NextContinuationToken: key_marker_to_cont_tok(
reply.next_marker, reply.objects, reply.is_truncated),
} : { // list_type v1
Marker: req.query.marker || '',
NextMarker: req.query.delimiter ? reply.next_marker : undefined,
}),
},
_.map(reply.objects, obj => ({
Contents: {
Key: obj.key,
// if the object specifies last_modified_time we use it, otherwise take create_time.
// last_modified_time is set only for cached objects.
// Non cached objects will use obj.create_time
LastModified: s3_utils.format_s3_xml_date(obj.last_modified_time || obj.create_time),
ETag: `"${obj.etag}"`,
Size: obj.size,
Owner: (!list_type || req.query['fetch-owner']) && s3_utils.DEFAULT_S3_USER,
StorageClass: s3_utils.STORAGE_CLASS_STANDARD,
}
})),
_.map(reply.common_prefixes, prefix => ({
CommonPrefixes: {
Prefix: prefix || ''
}
}))
Name: req.params.bucket,
Prefix: field_encoder(req.query.prefix) || '',
Delimiter: field_encoder(req.query.delimiter) || undefined,
MaxKeys: max_keys_received,
IsTruncated: reply.is_truncated,
EncodingType: req.query['encoding-type'],
...(list_type === '2' ? {
ContinuationToken: cont_tok,
StartAfter: field_encoder(start_after),
KeyCount: reply.objects.length + reply.common_prefixes.length,
NextContinuationToken: key_marker_to_cont_tok(
reply.next_marker, reply.objects, reply.is_truncated),
} : { // list_type v1
Marker: req.query.marker || '',
NextMarker: req.query.delimiter ? reply.next_marker : undefined,
}),
},
_.map(reply.objects, obj => ({
Contents: {
Key: field_encoder(obj.key),
// if the object specifies last_modified_time we use it, otherwise take create_time.
// last_modified_time is set only for cached objects.
// Non cached objects will use obj.create_time
LastModified: s3_utils.format_s3_xml_date(obj.last_modified_time || obj.create_time),
ETag: `"${obj.etag}"`,
Size: obj.size,
Owner: (!list_type || req.query['fetch-owner']) && s3_utils.DEFAULT_S3_USER,
StorageClass: s3_utils.STORAGE_CLASS_STANDARD,
}
})),
_.map(reply.common_prefixes, prefix => ({
CommonPrefixes: {
Prefix: field_encoder(prefix) || ''
}
}))
]
};
}
Expand Down
54 changes: 28 additions & 26 deletions src/endpoint/s3/ops/s3_get_bucket_uploads.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,34 +26,36 @@ async function get_bucket_uploads(req) {
limit: Math.min(max_keys_received, 1000),
});

const field_encoder = s3_utils.get_response_field_encoder(req);

return {
ListMultipartUploadsResult: [{
'Bucket': req.params.bucket,
'Prefix': req.query.prefix,
'Delimiter': req.query.delimiter,
'MaxUploads': max_keys_received,
'KeyMarker': req.query['key-marker'],
'UploadIdMarker': req.query['upload-id-marker'],
'IsTruncated': reply.is_truncated,
'NextKeyMarker': reply.next_marker,
'NextUploadIdMarker': reply.next_upload_id_marker,
'Encoding-Type': req.query['encoding-type'],
},
_.map(reply.objects, obj => ({
Upload: {
Key: obj.key,
UploadId: obj.obj_id,
Initiated: s3_utils.format_s3_xml_date(obj.upload_started),
Initiator: s3_utils.DEFAULT_S3_USER,
Owner: s3_utils.DEFAULT_S3_USER,
StorageClass: s3_utils.STORAGE_CLASS_STANDARD,
}
})),
_.map(reply.common_prefixes, prefix => ({
CommonPrefixes: {
Prefix: prefix || ''
}
}))
Bucket: req.params.bucket,
Prefix: field_encoder(req.query.prefix),
Delimiter: field_encoder(req.query.delimiter),
MaxUploads: max_keys_received,
KeyMarker: field_encoder(req.query['key-marker']),
UploadIdMarker: req.query['upload-id-marker'],
IsTruncated: reply.is_truncated,
NextKeyMarker: field_encoder(reply.next_marker),
NextUploadIdMarker: reply.next_upload_id_marker,
EncodingType: req.query['encoding-type'],
},
_.map(reply.objects, obj => ({
Upload: {
Key: field_encoder(obj.key),
UploadId: obj.obj_id,
Initiated: s3_utils.format_s3_xml_date(obj.upload_started),
Initiator: s3_utils.DEFAULT_S3_USER,
Owner: s3_utils.DEFAULT_S3_USER,
StorageClass: s3_utils.STORAGE_CLASS_STANDARD,
}
})),
_.map(reply.common_prefixes, prefix => ({
CommonPrefixes: {
Prefix: field_encoder(prefix) || ''
}
}))
]
};
}
Expand Down
75 changes: 38 additions & 37 deletions src/endpoint/s3/ops/s3_get_bucket_versions.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ const s3_utils = require('../s3_utils');
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGETVersion.html
*/
async function get_bucket_versions(req) {
// TODO Implement support for encoding-type

const max_keys_received = Number(req.query['max-keys'] || 1000);
if (!Number.isInteger(max_keys_received) || max_keys_received < 0) {
Expand All @@ -27,44 +26,46 @@ async function get_bucket_versions(req) {
limit: Math.min(max_keys_received, 1000),
});

const field_encoder = s3_utils.get_response_field_encoder(req);

return {
ListVersionsResult: [{
'Name': req.params.bucket,
'Prefix': req.query.prefix,
'Delimiter': req.query.delimiter,
'MaxKeys': max_keys_received,
'KeyMarker': req.query['key-marker'],
'VersionIdMarker': req.query['version-id-marker'],
'IsTruncated': reply.is_truncated,
'NextKeyMarker': reply.next_marker,
'NextVersionIdMarker': reply.next_version_id_marker,
'Encoding-Type': req.query['encoding-type'],
},
_.map(reply.objects, obj => (obj.delete_marker ? ({
DeleteMarker: {
Key: obj.key,
VersionId: obj.version_id || 'null',
IsLatest: obj.is_latest,
LastModified: s3_utils.format_s3_xml_date(obj.create_time),
Owner: s3_utils.DEFAULT_S3_USER,
}
}) : ({
Version: {
Key: obj.key,
VersionId: obj.version_id || 'null',
IsLatest: obj.is_latest,
LastModified: s3_utils.format_s3_xml_date(obj.create_time),
ETag: `"${obj.etag}"`,
Size: obj.size,
Owner: s3_utils.DEFAULT_S3_USER,
StorageClass: s3_utils.STORAGE_CLASS_STANDARD,
}
}))),
_.map(reply.common_prefixes, prefix => ({
CommonPrefixes: {
Prefix: prefix || ''
}
}))
Name: req.params.bucket,
Prefix: field_encoder(req.query.prefix),
Delimiter: field_encoder(req.query.delimiter),
MaxKeys: max_keys_received,
KeyMarker: field_encoder(req.query['key-marker']),
VersionIdMarker: req.query['version-id-marker'],
IsTruncated: reply.is_truncated,
NextKeyMarker: field_encoder(reply.next_marker),
NextVersionIdMarker: reply.next_version_id_marker,
EncodingType: req.query['encoding-type'],
},
_.map(reply.objects, obj => (obj.delete_marker ? ({
DeleteMarker: {
Key: field_encoder(obj.key),
VersionId: obj.version_id || 'null',
IsLatest: obj.is_latest,
LastModified: s3_utils.format_s3_xml_date(obj.create_time),
Owner: s3_utils.DEFAULT_S3_USER,
}
}) : ({
Version: {
Key: field_encoder(obj.key),
VersionId: obj.version_id || 'null',
IsLatest: obj.is_latest,
LastModified: s3_utils.format_s3_xml_date(obj.create_time),
ETag: `"${obj.etag}"`,
Size: obj.size,
Owner: s3_utils.DEFAULT_S3_USER,
StorageClass: s3_utils.STORAGE_CLASS_STANDARD,
}
}))),
_.map(reply.common_prefixes, prefix => ({
CommonPrefixes: {
Prefix: field_encoder(prefix) || ''
}
}))
]
};
}
Expand Down
12 changes: 9 additions & 3 deletions src/endpoint/s3/s3_errors.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ class S3Error extends Error {

// See http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html


//////////////////////////////////////////
// Errors documented in AWS error pages //
//////////////////////////////////////////
S3Error.AccessDenied = Object.freeze({
code: 'AccessDenied',
message: 'Access Denied',
Expand Down Expand Up @@ -448,8 +450,6 @@ S3Error.NoSuchTagSet = Object.freeze({
http_code: 404,
});



/////////////////////////////////////
// Errors for generic HTTP replies //
/////////////////////////////////////
Expand Down Expand Up @@ -506,6 +506,11 @@ S3Error.ObjectLockConfigurationNotFoundError = Object.freeze({
message: 'Object Lock configuration does not exist for this bucket',
http_code: 404,
});
S3Error.InvalidEncodingType = Object.freeze({
code: 'InvalidArgument',
message: 'Invalid Encoding Method specified in Request',
http_code: 400,
});

S3Error.RPC_ERRORS_TO_S3 = Object.freeze({
UNAUTHORIZED: S3Error.AccessDenied,
Expand Down Expand Up @@ -543,6 +548,7 @@ S3Error.RPC_ERRORS_TO_S3 = Object.freeze({
INTERNAL_ERROR: S3Error.InternalError,
SERVER_SIDE_ENCRYPTION_CONFIGURATION_NOT_FOUND_ERROR: S3Error.ServerSideEncryptionConfigurationNotFoundError,
NO_SUCH_TAG: S3Error.NoSuchTagSet,
INVALID_ENCODING_TYPE: S3Error.InvalidEncodingType,
});

exports.S3Error = S3Error;
20 changes: 20 additions & 0 deletions src/endpoint/s3/s3_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -648,6 +648,25 @@ function _is_statements_fit(statements, account, method, arn_path) {
return false;
}

function get_response_field_encoder(req) {
const encoding_type = req.query['encoding-type'];
if ((typeof encoding_type === 'undefined') || (encoding_type === null)) return response_field_encoder_none;
if (encoding_type.toLowerCase() === 'url') return response_field_encoder_url;
dbg.warn('Invalid encoding-type', encoding_type);
throw new S3Error(S3Error.InvalidEncodingType);
}

function response_field_encoder_none(value) {
return value;
}

/**
* Using URLSearchParams to encode the string as x-www-form-urlencoded
* with plus (+) instead of spaces (and not %20 as encodeURIComponent() does)
*/
function response_field_encoder_url(value) {
return new URLSearchParams({ 'a': value }).toString().slice(2); // slice the leading 'a='
}

exports.STORAGE_CLASS_STANDARD = STORAGE_CLASS_STANDARD;
exports.DEFAULT_S3_USER = DEFAULT_S3_USER;
Expand Down Expand Up @@ -678,3 +697,4 @@ exports.get_http_response_from_resp = get_http_response_from_resp;
exports.get_http_response_date = get_http_response_date;
exports.has_bucket_policy_permission = has_bucket_policy_permission;
exports.XATTR_SORT_SYMBOL = XATTR_SORT_SYMBOL;
exports.get_response_field_encoder = get_response_field_encoder;

0 comments on commit 09dc819

Please sign in to comment.