From 25fd73a58a6f081f597c57f9a9bf8faa8d84346c Mon Sep 17 00:00:00 2001 From: shirady <57721533+shirady@users.noreply.github.com> Date: Tue, 6 Dec 2022 13:24:08 +0200 Subject: [PATCH] add encoding url to list objects Signed-off-by: shirady <57721533+shirady@users.noreply.github.com> add formatter to the file Signed-off-by: shirady <57721533+shirady@users.noreply.github.com> change call encode url for every field instead of using object fields_to_encode. Signed-off-by: shirady <57721533+shirady@users.noreply.github.com> add encodying url to list multi part uploads and list versions Signed-off-by: shirady <57721533+shirady@users.noreply.github.com> --- src/endpoint/s3/ops/s3_get_bucket.js | 78 ++++++++++--------- src/endpoint/s3/ops/s3_get_bucket_uploads.js | 54 ++++++------- src/endpoint/s3/ops/s3_get_bucket_versions.js | 75 +++++++++--------- src/endpoint/s3/s3_errors.js | 12 ++- src/endpoint/s3/s3_utils.js | 20 +++++ 5 files changed, 138 insertions(+), 101 deletions(-) diff --git a/src/endpoint/s3/ops/s3_get_bucket.js b/src/endpoint/s3/ops/s3_get_bucket.js index e33845f474..7936fe58d3 100644 --- a/src/endpoint/s3/ops/s3_get_bucket.js +++ b/src/endpoint/s3/ops/s3_get_bucket.js @@ -7,7 +7,13 @@ const S3Error = require('../s3_errors').S3Error; const s3_utils = require('../s3_utils'); /** + * list objects and list objects V2: + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjects.html + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html + * + * note: the original documentation was in the below link: * http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html + * (but anyway it is permanently redirected to list object link above) */ async function get_bucket(req) { @@ -35,43 +41,45 @@ async function get_bucket(req) { } const reply = await req.object_sdk.list_objects(params); + const field_encoder = s3_utils.get_response_field_encoder(req); + return { ListBucketResult: [{ - Name: req.params.bucket, - Prefix: req.query.prefix, - Delimiter: req.query.delimiter || undefined, - MaxKeys: max_keys_received, - IsTruncated: reply.is_truncated, - 'Encoding-Type': req.query['encoding-type'], - ...(list_type === '2' ? { - ContinuationToken: cont_tok, - StartAfter: start_after, - KeyCount: reply.objects.length + reply.common_prefixes.length, - NextContinuationToken: key_marker_to_cont_tok( - reply.next_marker, reply.objects, reply.is_truncated), - } : { // list_type v1 - Marker: req.query.marker || '', - NextMarker: req.query.delimiter ? reply.next_marker : undefined, - }), - }, - _.map(reply.objects, obj => ({ - Contents: { - Key: obj.key, - // if the object specifies last_modified_time we use it, otherwise take create_time. - // last_modified_time is set only for cached objects. - // Non cached objects will use obj.create_time - LastModified: s3_utils.format_s3_xml_date(obj.last_modified_time || obj.create_time), - ETag: `"${obj.etag}"`, - Size: obj.size, - Owner: (!list_type || req.query['fetch-owner']) && s3_utils.DEFAULT_S3_USER, - StorageClass: s3_utils.STORAGE_CLASS_STANDARD, - } - })), - _.map(reply.common_prefixes, prefix => ({ - CommonPrefixes: { - Prefix: prefix || '' - } - })) + Name: req.params.bucket, + Prefix: field_encoder(req.query.prefix) || '', + Delimiter: field_encoder(req.query.delimiter) || undefined, + MaxKeys: max_keys_received, + IsTruncated: reply.is_truncated, + EncodingType: req.query['encoding-type'], + ...(list_type === '2' ? { + ContinuationToken: cont_tok, + StartAfter: field_encoder(start_after), + KeyCount: reply.objects.length + reply.common_prefixes.length, + NextContinuationToken: key_marker_to_cont_tok( + reply.next_marker, reply.objects, reply.is_truncated), + } : { // list_type v1 + Marker: req.query.marker || '', + NextMarker: req.query.delimiter ? reply.next_marker : undefined, + }), + }, + _.map(reply.objects, obj => ({ + Contents: { + Key: field_encoder(obj.key), + // if the object specifies last_modified_time we use it, otherwise take create_time. + // last_modified_time is set only for cached objects. + // Non cached objects will use obj.create_time + LastModified: s3_utils.format_s3_xml_date(obj.last_modified_time || obj.create_time), + ETag: `"${obj.etag}"`, + Size: obj.size, + Owner: (!list_type || req.query['fetch-owner']) && s3_utils.DEFAULT_S3_USER, + StorageClass: s3_utils.STORAGE_CLASS_STANDARD, + } + })), + _.map(reply.common_prefixes, prefix => ({ + CommonPrefixes: { + Prefix: field_encoder(prefix) || '' + } + })) ] }; } diff --git a/src/endpoint/s3/ops/s3_get_bucket_uploads.js b/src/endpoint/s3/ops/s3_get_bucket_uploads.js index fe0cb4d4b5..8735d39e40 100644 --- a/src/endpoint/s3/ops/s3_get_bucket_uploads.js +++ b/src/endpoint/s3/ops/s3_get_bucket_uploads.js @@ -26,34 +26,36 @@ async function get_bucket_uploads(req) { limit: Math.min(max_keys_received, 1000), }); + const field_encoder = s3_utils.get_response_field_encoder(req); + return { ListMultipartUploadsResult: [{ - 'Bucket': req.params.bucket, - 'Prefix': req.query.prefix, - 'Delimiter': req.query.delimiter, - 'MaxUploads': max_keys_received, - 'KeyMarker': req.query['key-marker'], - 'UploadIdMarker': req.query['upload-id-marker'], - 'IsTruncated': reply.is_truncated, - 'NextKeyMarker': reply.next_marker, - 'NextUploadIdMarker': reply.next_upload_id_marker, - 'Encoding-Type': req.query['encoding-type'], - }, - _.map(reply.objects, obj => ({ - Upload: { - Key: obj.key, - UploadId: obj.obj_id, - Initiated: s3_utils.format_s3_xml_date(obj.upload_started), - Initiator: s3_utils.DEFAULT_S3_USER, - Owner: s3_utils.DEFAULT_S3_USER, - StorageClass: s3_utils.STORAGE_CLASS_STANDARD, - } - })), - _.map(reply.common_prefixes, prefix => ({ - CommonPrefixes: { - Prefix: prefix || '' - } - })) + Bucket: req.params.bucket, + Prefix: field_encoder(req.query.prefix), + Delimiter: field_encoder(req.query.delimiter), + MaxUploads: max_keys_received, + KeyMarker: field_encoder(req.query['key-marker']), + UploadIdMarker: req.query['upload-id-marker'], + IsTruncated: reply.is_truncated, + NextKeyMarker: field_encoder(reply.next_marker), + NextUploadIdMarker: reply.next_upload_id_marker, + EncodingType: req.query['encoding-type'], + }, + _.map(reply.objects, obj => ({ + Upload: { + Key: field_encoder(obj.key), + UploadId: obj.obj_id, + Initiated: s3_utils.format_s3_xml_date(obj.upload_started), + Initiator: s3_utils.DEFAULT_S3_USER, + Owner: s3_utils.DEFAULT_S3_USER, + StorageClass: s3_utils.STORAGE_CLASS_STANDARD, + } + })), + _.map(reply.common_prefixes, prefix => ({ + CommonPrefixes: { + Prefix: field_encoder(prefix) || '' + } + })) ] }; } diff --git a/src/endpoint/s3/ops/s3_get_bucket_versions.js b/src/endpoint/s3/ops/s3_get_bucket_versions.js index cb34465918..8e8456ec42 100644 --- a/src/endpoint/s3/ops/s3_get_bucket_versions.js +++ b/src/endpoint/s3/ops/s3_get_bucket_versions.js @@ -10,7 +10,6 @@ const s3_utils = require('../s3_utils'); * http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGETVersion.html */ async function get_bucket_versions(req) { - // TODO Implement support for encoding-type const max_keys_received = Number(req.query['max-keys'] || 1000); if (!Number.isInteger(max_keys_received) || max_keys_received < 0) { @@ -27,44 +26,46 @@ async function get_bucket_versions(req) { limit: Math.min(max_keys_received, 1000), }); + const field_encoder = s3_utils.get_response_field_encoder(req); + return { ListVersionsResult: [{ - 'Name': req.params.bucket, - 'Prefix': req.query.prefix, - 'Delimiter': req.query.delimiter, - 'MaxKeys': max_keys_received, - 'KeyMarker': req.query['key-marker'], - 'VersionIdMarker': req.query['version-id-marker'], - 'IsTruncated': reply.is_truncated, - 'NextKeyMarker': reply.next_marker, - 'NextVersionIdMarker': reply.next_version_id_marker, - 'Encoding-Type': req.query['encoding-type'], - }, - _.map(reply.objects, obj => (obj.delete_marker ? ({ - DeleteMarker: { - Key: obj.key, - VersionId: obj.version_id || 'null', - IsLatest: obj.is_latest, - LastModified: s3_utils.format_s3_xml_date(obj.create_time), - Owner: s3_utils.DEFAULT_S3_USER, - } - }) : ({ - Version: { - Key: obj.key, - VersionId: obj.version_id || 'null', - IsLatest: obj.is_latest, - LastModified: s3_utils.format_s3_xml_date(obj.create_time), - ETag: `"${obj.etag}"`, - Size: obj.size, - Owner: s3_utils.DEFAULT_S3_USER, - StorageClass: s3_utils.STORAGE_CLASS_STANDARD, - } - }))), - _.map(reply.common_prefixes, prefix => ({ - CommonPrefixes: { - Prefix: prefix || '' - } - })) + Name: req.params.bucket, + Prefix: field_encoder(req.query.prefix), + Delimiter: field_encoder(req.query.delimiter), + MaxKeys: max_keys_received, + KeyMarker: field_encoder(req.query['key-marker']), + VersionIdMarker: req.query['version-id-marker'], + IsTruncated: reply.is_truncated, + NextKeyMarker: field_encoder(reply.next_marker), + NextVersionIdMarker: reply.next_version_id_marker, + EncodingType: req.query['encoding-type'], + }, + _.map(reply.objects, obj => (obj.delete_marker ? ({ + DeleteMarker: { + Key: field_encoder(obj.key), + VersionId: obj.version_id || 'null', + IsLatest: obj.is_latest, + LastModified: s3_utils.format_s3_xml_date(obj.create_time), + Owner: s3_utils.DEFAULT_S3_USER, + } + }) : ({ + Version: { + Key: field_encoder(obj.key), + VersionId: obj.version_id || 'null', + IsLatest: obj.is_latest, + LastModified: s3_utils.format_s3_xml_date(obj.create_time), + ETag: `"${obj.etag}"`, + Size: obj.size, + Owner: s3_utils.DEFAULT_S3_USER, + StorageClass: s3_utils.STORAGE_CLASS_STANDARD, + } + }))), + _.map(reply.common_prefixes, prefix => ({ + CommonPrefixes: { + Prefix: field_encoder(prefix) || '' + } + })) ] }; } diff --git a/src/endpoint/s3/s3_errors.js b/src/endpoint/s3/s3_errors.js index e0ab840066..eb49f18996 100644 --- a/src/endpoint/s3/s3_errors.js +++ b/src/endpoint/s3/s3_errors.js @@ -41,7 +41,9 @@ class S3Error extends Error { // See http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html - +////////////////////////////////////////// +// Errors documented in AWS error pages // +////////////////////////////////////////// S3Error.AccessDenied = Object.freeze({ code: 'AccessDenied', message: 'Access Denied', @@ -448,8 +450,6 @@ S3Error.NoSuchTagSet = Object.freeze({ http_code: 404, }); - - ///////////////////////////////////// // Errors for generic HTTP replies // ///////////////////////////////////// @@ -506,6 +506,11 @@ S3Error.ObjectLockConfigurationNotFoundError = Object.freeze({ message: 'Object Lock configuration does not exist for this bucket', http_code: 404, }); +S3Error.InvalidEncodingType = Object.freeze({ + code: 'InvalidArgument', + message: 'Invalid Encoding Method specified in Request', + http_code: 400, +}); S3Error.RPC_ERRORS_TO_S3 = Object.freeze({ UNAUTHORIZED: S3Error.AccessDenied, @@ -543,6 +548,7 @@ S3Error.RPC_ERRORS_TO_S3 = Object.freeze({ INTERNAL_ERROR: S3Error.InternalError, SERVER_SIDE_ENCRYPTION_CONFIGURATION_NOT_FOUND_ERROR: S3Error.ServerSideEncryptionConfigurationNotFoundError, NO_SUCH_TAG: S3Error.NoSuchTagSet, + INVALID_ENCODING_TYPE: S3Error.InvalidEncodingType, }); exports.S3Error = S3Error; diff --git a/src/endpoint/s3/s3_utils.js b/src/endpoint/s3/s3_utils.js index 243b17a4d6..af5c0293e8 100644 --- a/src/endpoint/s3/s3_utils.js +++ b/src/endpoint/s3/s3_utils.js @@ -648,6 +648,25 @@ function _is_statements_fit(statements, account, method, arn_path) { return false; } +function get_response_field_encoder(req) { + const encoding_type = req.query['encoding-type']; + if ((typeof encoding_type === 'undefined') || (encoding_type === null)) return response_field_encoder_none; + if (encoding_type.toLowerCase() === 'url') return response_field_encoder_url; + dbg.warn('Invalid encoding-type', encoding_type); + throw new S3Error(S3Error.InvalidEncodingType); +} + +function response_field_encoder_none(value) { + return value; +} + +/** +* Using URLSearchParams to encode the string as x-www-form-urlencoded +* with plus (+) instead of spaces (and not %20 as encodeURIComponent() does) +*/ +function response_field_encoder_url(value) { + return new URLSearchParams({ 'a': value }).toString().slice(2); // slice the leading 'a=' +} exports.STORAGE_CLASS_STANDARD = STORAGE_CLASS_STANDARD; exports.DEFAULT_S3_USER = DEFAULT_S3_USER; @@ -678,3 +697,4 @@ exports.get_http_response_from_resp = get_http_response_from_resp; exports.get_http_response_date = get_http_response_date; exports.has_bucket_policy_permission = has_bucket_policy_permission; exports.XATTR_SORT_SYMBOL = XATTR_SORT_SYMBOL; +exports.get_response_field_encoder = get_response_field_encoder;