diff --git a/S3/Config.py b/S3/Config.py
index 880ec1ff..cff6b1d1 100644
--- a/S3/Config.py
+++ b/S3/Config.py
@@ -133,6 +133,8 @@ class Config(object):
force = False
server_side_encryption = False
enable = None
+ # Used to allow colons in bucket names for Ceph compatibility
+ bucket_name_quirks = False
get_continue = False
put_continue = False
upload_id = u""
diff --git a/S3/S3.py b/S3/S3.py
index 65cdf768..9b45b60d 100644
--- a/S3/S3.py
+++ b/S3/S3.py
@@ -407,17 +407,21 @@ def bucket_create(self, bucket, bucket_location = None, extra_headers = None):
headers.update(extra_headers)
body = ""
- if bucket_location and bucket_location.strip().upper() != "US" and bucket_location.strip().lower() != "us-east-1":
- bucket_location = bucket_location.strip()
- if bucket_location.upper() == "EU":
- bucket_location = bucket_location.upper()
- body = ""
- body += bucket_location
- body += ""
- debug("bucket_location: " + body)
- check_bucket_name(bucket, dns_strict = True)
+ if self.config.bucket_name_quirks:
+ # We are explicitly not AWS
+ check_bucket_name(bucket, dns_strict = False, name_quirks = True)
else:
- check_bucket_name(bucket, dns_strict = False)
+ if bucket_location:
+ # We follow AWS rules
+ bucket_location = bucket_location.strip()
+ if bucket_location.upper() == "EU":
+ bucket_location = bucket_location.upper()
+ body = ""
+ body += bucket_location
+ body += ""
+ debug("bucket_location: " + body)
+ check_bucket_name(bucket, dns_strict = True, name_quirks = False)
+
if self.config.acl_public:
headers["x-amz-acl"] = "public-read"
diff --git a/S3/Utils.py b/S3/Utils.py
index 9e6a6f8a..eb23a97a 100644
--- a/S3/Utils.py
+++ b/S3/Utils.py
@@ -234,8 +234,30 @@ def time_to_epoch(t):
raise S3.Exceptions.ParameterError('Unable to convert %r to an epoch time. Pass an epoch time. Try `date -d \'now + 1 year\' +%%s` (shell) or time.mktime (Python).' % t)
-def check_bucket_name(bucket, dns_strict=True):
- if dns_strict:
+def check_bucket_name(bucket, dns_strict=True, name_quirks=False):
+ """Check that the bucket name is valid for our situation.
+
+ Check against the rules from: https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html
+
+ dns_strict: True means follow above rules exactly. False allows
+ for relaxed naming conventions that existed in use-east-1 prior to
+ March 1 2018.
+
+ name_quirks: If true allow compatibility with services implimenting
+ the S3 API but are not fully bound by the S3 rules.
+
+ """
+
+ # name_quirks has priority over dns_strict. So instead of a 4-way
+ # comparison we can get away with a 3 way one.
+ max_length = 255
+ if name_quirks:
+ invalid = re.search("([^A-Za-z0-9\._:-])", bucket, re.UNICODE)
+ if invalid:
+ raise S3.Exceptions.ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: us-ascii letters (a-z, A-Z), digits (0-9), dot (.), hyphen (-), colon (:), and underscore (_)." % (bucket, invalid.groups()[0]))
+ elif dns_strict:
+ # This is the default
+ max_length = 63
invalid = re.search("([^a-z0-9\.-])", bucket, re.UNICODE)
if invalid:
raise S3.Exceptions.ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: lowercase us-ascii letters (a-z), digits (0-9), dot (.) and hyphen (-)." % (bucket, invalid.groups()[0]))
@@ -244,27 +266,29 @@ def check_bucket_name(bucket, dns_strict=True):
if invalid:
raise S3.Exceptions.ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: us-ascii letters (a-z, A-Z), digits (0-9), dot (.), hyphen (-) and underscore (_)." % (bucket, invalid.groups()[0]))
+ # The above block pre-filters some things. But the lower stuff has to
+ # be more permissive to allow for what's allowed in the least restrictive
+ # filters above.
+
if len(bucket) < 3:
raise S3.Exceptions.ParameterError("Bucket name '%s' is too short (min 3 characters)" % bucket)
- if len(bucket) > 255:
- raise S3.Exceptions.ParameterError("Bucket name '%s' is too long (max 255 characters)" % bucket)
- if dns_strict:
- if len(bucket) > 63:
- raise S3.Exceptions.ParameterError("Bucket name '%s' is too long (max 63 characters)" % bucket)
- if re.search("-\.", bucket, re.UNICODE):
- raise S3.Exceptions.ParameterError("Bucket name '%s' must not contain sequence '-.' for DNS compatibility" % bucket)
- if re.search("\.\.", bucket, re.UNICODE):
- raise S3.Exceptions.ParameterError("Bucket name '%s' must not contain sequence '..' for DNS compatibility" % bucket)
- if not re.search("^[0-9a-z]", bucket, re.UNICODE):
- raise S3.Exceptions.ParameterError("Bucket name '%s' must start with a letter or a digit" % bucket)
- if not re.search("[0-9a-z]$", bucket, re.UNICODE):
- raise S3.Exceptions.ParameterError("Bucket name '%s' must end with a letter or a digit" % bucket)
+ if len(bucket) > max_length:
+ raise S3.Exceptions.ParameterError("Bucket name '%s' is too long (max %d characters)" % (bucket, max_length))
+ if re.search("-\.", bucket, re.UNICODE):
+ raise S3.Exceptions.ParameterError("Bucket name '%s' must not contain sequence '-.' for DNS compatibility" % bucket)
+ if re.search("\.\.", bucket, re.UNICODE):
+ raise S3.Exceptions.ParameterError("Bucket name '%s' must not contain sequence '..' for DNS compatibility" % bucket)
+ if not re.search("^[0-9a-zA-Z]", bucket, re.UNICODE):
+ raise S3.Exceptions.ParameterError("Bucket name '%s' must start with a letter or a digit" % bucket)
+ if not re.search("[0-9a-zA-Z]$", bucket, re.UNICODE):
+ raise S3.Exceptions.ParameterError("Bucket name '%s' must end with a letter or a digit" % bucket)
return True
__all__.append("check_bucket_name")
+
def check_bucket_name_dns_conformity(bucket):
try:
- return check_bucket_name(bucket, dns_strict = True)
+ return check_bucket_name(bucket, dns_strict = True, name_quirks = False)
except S3.Exceptions.ParameterError:
return False
__all__.append("check_bucket_name_dns_conformity")