Skip to content

Commit

Permalink
Add s3 upload storage method
Browse files Browse the repository at this point in the history
Signed-off-by: Mike Perez <[email protected]>
  • Loading branch information
Thingee committed Aug 29, 2024
1 parent 3ed5cdf commit ce225fc
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 3 deletions.
12 changes: 12 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,18 @@ the service as follows::
api_key = 'secret'


storage_method
^^^^^^^^^^^^^^
The ``storage_method`` is a require configuration item, it defines where the
binaries should be stored. The two available method values are ``local`` and
``s3``.

s3_bucket
^^^^^^^^^
The ``s3_bucket`` is required if the ``storage_method`` configuration is set to
``s3``. This defines which bucket the binaries should be stored to.


Self-discovery
--------------
The API provides informational JSON at every step of the URL about what is
Expand Down
26 changes: 23 additions & 3 deletions chacra/controllers/binaries/archs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import logging
import os
import boto3
from botocore.exceptions import ClientError
import pecan
from pecan import response
from pecan.secure import secure
Expand Down Expand Up @@ -89,7 +91,7 @@ def index_post(self):
if request.POST.get('force', False) is False:
error('/errors/invalid', 'resource already exists and "force" key was not used')

full_path = self.save_file(file_obj)
full_path, size = self.save_file(file_obj)

if self.binary is None:
path = full_path
Expand All @@ -102,14 +104,19 @@ def index_post(self):
self.binary = Binary(
self.binary_name, self.project, arch=arch,
distro=distro, distro_version=distro_version,
ref=ref, sha1=sha1, path=path, size=os.path.getsize(path)
ref=ref, sha1=sha1, path=path, size=size
)
else:
self.binary.path = full_path

# check if this binary is interesting for other configured projects,
# and if so, then mark those other repos so that they can be re-built
self.mark_related_repos()

# Remove the local file after S3 upload
if pecan.conf.storage_method == 's3':
os.remove(full_path)

return dict()

def mark_related_repos(self):
Expand Down Expand Up @@ -175,8 +182,21 @@ def save_file(self, file_obj):
for chunk in file_iterable:
f.write(chunk)

if pecan.conf.storage_method == 's3':
bucket = pecan.conf.bucket
object_name = os.path.basename(self.binary_name)

s3_client = boto3.client('s3')
try:
with open(destination, 'rb') as f:
s3_client.upload_fileobj(f, bucket, object_name)
except ClientError as e:
error('/errors/error/', 'file object upload to S3 failed with error %s' % e)

size = os.path.getsize(destination)

# return the full path to the saved object:
return destination
return destination, size

@expose()
def _lookup(self, name, *remainder):
Expand Down
6 changes: 6 additions & 0 deletions config/dev.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,17 @@
'encoding': 'utf-8'
}

# Where to store the data. Options are 's3' or 'local'
storage_method = 'local'

# location for storing uploaded binaries
binary_root = '%(confdir)s/public'
repos_root = '%(confdir)s/repos'
distributions_root = '%(confdir)s/distributions'

# If storage method is s3, provide a bucket name
bucket = ''

# When True it will set the headers so that Nginx can serve the download
# instead of Pecan.
delegate_downloads = False
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,5 @@ alembic
ipython
python-statsd
requests
boto3
importlib_metadata<=3.6; python_version<'3.8'

0 comments on commit ce225fc

Please sign in to comment.