aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--snap/snapcraft.yaml1
-rw-r--r--sos.spec2
-rw-r--r--sos/collector/__init__.py26
-rw-r--r--sos/policies/distros/__init__.py228
-rw-r--r--sos/report/__init__.py24
5 files changed, 274 insertions, 7 deletions
diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml
index dc3b43e2..eae5572f 100644
--- a/snap/snapcraft.yaml
+++ b/snap/snapcraft.yaml
@@ -37,6 +37,7 @@ parts:
- wheel
- python_magic
- packaging
+ - boto3==1.26.155
apps:
sos:
diff --git a/sos.spec b/sos.spec
index a08e2857..ba18b0fd 100644
--- a/sos.spec
+++ b/sos.spec
@@ -22,6 +22,8 @@ Recommends: python3-pyyaml
Obsoletes: sos-collector <= 1.9
# For the _tmpfilesdir macro.
BuildRequires: systemd
+# Mandatory just for uploading to an S3 bucket:
+Recommends: python3-boto3
%description
Sos is a set of tools that gathers information about system
diff --git a/sos/collector/__init__.py b/sos/collector/__init__.py
index 89e1cffa..ff0c1ab7 100644
--- a/sos/collector/__init__.py
+++ b/sos/collector/__init__.py
@@ -134,7 +134,13 @@ class SoSCollector(SoSComponent):
'upload_pass': None,
'upload_method': 'auto',
'upload_no_ssl_verify': False,
- 'upload_protocol': 'auto'
+ 'upload_protocol': 'auto',
+ 'upload_s3_endpoint': None,
+ 'upload_s3_region': None,
+ 'upload_s3_bucket': None,
+ 'upload_s3_access_key': None,
+ 'upload_s3_secret_key': None,
+ 'upload_s3_object_prefix': None
}
def __init__(self, parser, parsed_args, cmdline_args):
@@ -440,8 +446,21 @@ class SoSCollector(SoSComponent):
action='store_true',
help="Disable SSL verification for upload url"
)
+ collect_grp.add_argument("--upload-s3-endpoint", default=None,
+ help="Endpoint to upload to for S3 bucket")
+ collect_grp.add_argument("--upload-s3-region", default=None,
+ help="Region for the S3 bucket")
+ collect_grp.add_argument("--upload-s3-bucket", default=None,
+ help="Name of the S3 bucket to upload to")
+ collect_grp.add_argument("--upload-s3-access-key", default=None,
+ help="Access key for the S3 bucket")
+ collect_grp.add_argument("--upload-s3-secret-key", default=None,
+ help="Secret key for the S3 bucket")
+ collect_grp.add_argument("--upload-s3-object-prefix", default=None,
+ help="Prefix for the S3 object/key")
collect_grp.add_argument("--upload-protocol", default='auto',
- choices=['auto', 'https', 'ftp', 'sftp'],
+ choices=['auto', 'https', 'ftp', 'sftp',
+ 's3'],
help="Manually specify the upload protocol")
# Group the cleaner options together
@@ -1270,7 +1289,8 @@ this utility or remote systems that it connects to.
msg = 'No sosreports were collected, nothing to archive...'
self.exit(msg, 1)
- if self.opts.upload or self.opts.upload_url:
+ if (self.opts.upload and self.policy.get_upload_url()) or \
+ self.opts.upload_s3_endpoint:
try:
self.policy.upload_archive(arc_name)
self.ui_log.info("Uploaded archive successfully")
diff --git a/sos/policies/distros/__init__.py b/sos/policies/distros/__init__.py
index 44d6561e..496514f8 100644
--- a/sos/policies/distros/__init__.py
+++ b/sos/policies/distros/__init__.py
@@ -32,6 +32,12 @@ try:
except ImportError:
REQUESTS_LOADED = False
+try:
+ import boto3
+ BOTO3_LOADED = True
+except ImportError:
+ BOTO3_LOADED = False
+
# Container environment variables for detecting if we're in a container
ENV_CONTAINER = 'container'
ENV_HOST_SYSROOT = 'HOST'
@@ -56,11 +62,23 @@ class LinuxPolicy(Policy):
_upload_user = None
_upload_password = None
_upload_method = None
+ _upload_s3_endpoint = 'https://s3.amazonaws.com'
+ _upload_s3_bucket = None
+ _upload_s3_access_key = None
+ _upload_s3_secret_key = None
+ _upload_s3_region = None
+ _upload_s3_object_prefix = ''
default_container_runtime = 'docker'
_preferred_hash_name = None
upload_url = None
upload_user = None
upload_password = None
+ upload_s3_endpoint = None
+ upload_s3_bucket = None
+ upload_s3_access_key = None
+ upload_s3_secret_key = None
+ upload_s3_region = None
+ upload_s3_object_prefix = None
# collector-focused class attrs
containerized = False
container_image = None
@@ -292,6 +310,13 @@ class LinuxPolicy(Policy):
self.upload_password = cmdline_opts.upload_pass
self.upload_archive_name = ''
+ self.upload_s3_endpoint = cmdline_opts.upload_s3_endpoint
+ self.upload_s3_region = cmdline_opts.upload_s3_region
+ self.upload_s3_access_key = cmdline_opts.upload_s3_access_key
+ self.upload_s3_bucket = cmdline_opts.upload_s3_bucket
+ self.upload_s3_object_prefix = cmdline_opts.upload_s3_object_prefix
+ self.upload_s3_secret_key = cmdline_opts.upload_s3_secret_key
+
# set or query for case id
if not cmdline_opts.batch and not \
cmdline_opts.quiet:
@@ -314,9 +339,15 @@ class LinuxPolicy(Policy):
cmdline_opts.quiet:
try:
# Policies will need to handle the prompts for user information
- if cmdline_opts.upload and self.get_upload_url():
+ if cmdline_opts.upload and self.get_upload_url() and \
+ not cmdline_opts.upload_protocol == 's3':
self.prompt_for_upload_user()
self.prompt_for_upload_password()
+ elif cmdline_opts.upload_protocol == 's3':
+ self.prompt_for_upload_s3_bucket()
+ self.prompt_for_upload_s3_endpoint()
+ self.prompt_for_upload_s3_access_key()
+ self.prompt_for_upload_s3_secret_key()
self.ui_log.info('')
except KeyboardInterrupt:
raise
@@ -354,6 +385,58 @@ class LinuxPolicy(Policy):
except Exception as err:
self.soslog.error(f"Error setting report niceness to 19: {err}")
+ def prompt_for_upload_s3_access_key(self):
+ """Should be overridden by policies to determine if an access key needs
+ to be provided for upload or not
+ """
+ if not self.get_upload_s3_access_key():
+
+ msg = (
+ "Please provide the upload access key for bucket"
+ f" {self.get_upload_s3_bucket()} via endpoint"
+ f" {self.get_upload_s3_endpoint()}: "
+ )
+ self.upload_s3_access_key = input(_(msg))
+
+ def prompt_for_upload_s3_secret_key(self):
+ """Should be overridden by policies to determine if a secret key needs
+ to be provided for upload or not
+ """
+ if not self.get_upload_s3_secret_key():
+ msg = (
+ "Please provide the upload secret key for bucket"
+ f" {self.get_upload_s3_bucket()} via endpoint"
+ f" {self.get_upload_s3_endpoint()}: "
+ )
+ self.upload_s3_secret_key = getpass(msg)
+
+ def prompt_for_upload_s3_bucket(self):
+ """Should be overridden by policies to determine if a bucket needs to
+ be provided for upload or not
+ """
+ if not self.upload_s3_bucket:
+ if self.upload_url and self.upload_url.startswith('s3://'):
+ self.upload_s3_bucket = self.upload_url[5:]
+ else:
+ user_input = input(_("Please provide the upload bucket: "))
+ self.upload_s3_bucket = user_input.strip('/')
+ return self.upload_s3_bucket
+
+ def prompt_for_upload_s3_endpoint(self):
+ """Should be overridden by policies to determine if an endpoint needs
+ to be provided for upload or not
+ """
+ default_endpoint = self._upload_s3_endpoint
+ if not self.upload_s3_endpoint:
+ msg = (
+ "Please provide the upload endpoint for bucket"
+ f" {self.get_upload_s3_bucket()}"
+ f" (default: {default_endpoint}): "
+ )
+ user_input = input(_(msg))
+ self.upload_s3_endpoint = user_input or default_endpoint
+ return self.upload_s3_endpoint
+
def prompt_for_upload_user(self):
"""Should be overridden by policies to determine if a user needs to
be provided or not
@@ -438,7 +521,8 @@ class LinuxPolicy(Policy):
prots = {
'ftp': self.upload_ftp,
'sftp': self.upload_sftp,
- 'https': self.upload_https
+ 'https': self.upload_https,
+ 's3': self.upload_s3
}
if self.commons['cmdlineopts'].upload_protocol in prots.keys():
return prots[self.commons['cmdlineopts'].upload_protocol]
@@ -468,6 +552,73 @@ class LinuxPolicy(Policy):
return requests.auth.HTTPBasicAuth(user, password)
+ def get_upload_s3_access_key(self):
+ """Helper function to determine if we should use the policy default
+ upload access key or one provided by the user
+
+ :returns: The access_key to use for upload
+ :rtype: ``str``
+ """
+ return (os.getenv('SOSUPLOADS3ACCESSKEY', None) or
+ self.upload_s3_access_key or
+ self._upload_s3_access_key)
+
+ def get_upload_s3_endpoint(self):
+ """Helper function to determine if we should use the policy default
+ upload endpoint or one provided by the user
+
+ :returns: The S3 Endpoint to use for upload
+ :rtype: ``str``
+ """
+ if not self.upload_s3_endpoint:
+ self.prompt_for_upload_s3_endpoint()
+ return self.upload_s3_endpoint
+
+ def get_upload_s3_region(self):
+ """Helper function to determine if we should use the policy default
+ upload region or one provided by the user
+
+ :returns: The S3 region to use for upload
+ :rtype: ``str``
+ """
+ return self.upload_s3_region or self._upload_s3_region
+
+ def get_upload_s3_bucket(self):
+ """Helper function to determine if we should use the policy default
+ upload bucket or one provided by the user
+
+ :returns: The S3 bucket to use for upload
+ :rtype: ``str``
+ """
+ if self.upload_url and self.upload_url.startswith('s3://'):
+ bucket_and_prefix = self.upload_url[5:].split('/', 1)
+ self.upload_s3_bucket = bucket_and_prefix[0]
+ if len(bucket_and_prefix) > 1:
+ self.upload_s3_object_prefix = bucket_and_prefix[1]
+ if not self.upload_s3_bucket:
+ self.prompt_for_upload_s3_bucket()
+ return self.upload_s3_bucket or self._upload_s3_bucket
+
+ def get_upload_s3_object_prefix(self):
+ """Helper function to determine if we should use the policy default
+ upload object prefix or one provided by the user
+
+ :returns: The S3 object prefix to use for upload
+ :rtype: ``str``
+ """
+ return self.upload_s3_object_prefix or self._upload_s3_object_prefix
+
+ def get_upload_s3_secret_key(self):
+ """Helper function to determine if we should use the policy default
+ upload secret key or one provided by the user
+
+ :returns: The S3 secret key to use for upload
+ :rtype: ``str``
+ """
+ return (os.getenv('SOSUPLOADS3SECRETKEY', None) or
+ self.upload_s3_secret_key or
+ self._upload_s3_secret_key)
+
def get_upload_url(self):
"""Helper function to determine if we should use the policy default
upload url or one provided by the user
@@ -475,6 +626,14 @@ class LinuxPolicy(Policy):
:returns: The URL to use for upload
:rtype: ``str``
"""
+ if not self.upload_url and (
+ self.upload_s3_bucket and
+ self.upload_s3_access_key and
+ self.upload_s3_secret_key
+ ):
+ bucket = self.get_upload_s3_bucket()
+ prefix = self.get_upload_s3_object_prefix()
+ self._upload_url = f"s3://{bucket}/{prefix}"
return self.upload_url or self._upload_url
def get_upload_url_string(self):
@@ -762,6 +921,71 @@ class LinuxPolicy(Policy):
except IOError:
raise Exception("could not open archive file")
+ def upload_s3(self, endpoint=None, region=None, bucket=None, prefix=None,
+ access_key=None, secret_key=None):
+ """Attempts to upload the archive to an S3 bucket.
+
+ :param endpoint: The S3 endpoint to upload to
+ :type endpoint: str
+
+ :param region: The S3 region to upload to
+ :type region: str
+
+ :param bucket: The name of the S3 bucket to upload to
+ :type bucket: str
+
+ :param prefix: The prefix for the S3 object/key
+ :type prefix: str
+
+ :param access_key: The access key for the S3 bucket
+ :type access_key: str
+
+ :param secret_key: The secret key for the S3 bucket
+ :type secret_key: str
+
+ :returns: True if upload is successful
+ :rtype: bool
+
+ :raises: Exception if upload is unsuccessful
+ """
+ if not BOTO3_LOADED:
+ raise Exception("Unable to upload due to missing python boto3 "
+ "library")
+
+ if not endpoint:
+ endpoint = self.get_upload_s3_endpoint()
+ if not region:
+ region = self.get_upload_s3_region()
+
+ if not bucket:
+ bucket = self.get_upload_s3_bucket().strip('/')
+
+ if not prefix:
+ prefix = self.get_upload_s3_object_prefix()
+ if prefix != '' and prefix.startswith('/'):
+ prefix = prefix[1:]
+ if prefix != '' and not prefix.endswith('/'):
+ prefix = f'{prefix}/' if prefix else ''
+
+ if not access_key:
+ access_key = self.get_upload_s3_access_key()
+
+ if not secret_key:
+ secret_key = self.get_upload_s3_secret_key()
+
+ s3_client = boto3.client('s3', endpoint_url=endpoint,
+ region_name=region,
+ aws_access_key_id=access_key,
+ aws_secret_access_key=secret_key)
+
+ try:
+ key = prefix + self.upload_archive_name.split('/')[-1]
+ s3_client.upload_file(self.upload_archive_name,
+ bucket, key)
+ return True
+ except Exception as e:
+ raise Exception(f"Failed to upload to S3: {str(e)}") from e
+
def set_sos_prefix(self):
"""If sosreport commands need to always be prefixed with something,
for example running in a specific container image, then it should be
diff --git a/sos/report/__init__.py b/sos/report/__init__.py
index 8bfc1990..77087ed1 100644
--- a/sos/report/__init__.py
+++ b/sos/report/__init__.py
@@ -129,6 +129,12 @@ class SoSReport(SoSComponent):
'upload_method': 'auto',
'upload_no_ssl_verify': False,
'upload_protocol': 'auto',
+ 'upload_s3_endpoint': None,
+ 'upload_s3_region': None,
+ 'upload_s3_bucket': None,
+ 'upload_s3_access_key': None,
+ 'upload_s3_secret_key': None,
+ 'upload_s3_object_prefix': None,
'add_preset': '',
'del_preset': ''
}
@@ -312,8 +318,21 @@ class SoSReport(SoSComponent):
report_grp.add_argument("--upload-no-ssl-verify", default=False,
action='store_true',
help="Disable SSL verification for upload url")
+ report_grp.add_argument("--upload-s3-endpoint", default=None,
+ help="Endpoint to upload to for S3 bucket")
+ report_grp.add_argument("--upload-s3-region", default=None,
+ help="Region to upload to for S3 bucket")
+ report_grp.add_argument("--upload-s3-bucket", default=None,
+ help="Name of the S3 bucket to upload to")
+ report_grp.add_argument("--upload-s3-access-key", default=None,
+ help="Access key for the S3 bucket")
+ report_grp.add_argument("--upload-s3-secret-key", default=None,
+ help="Secret key for the S3 bucket")
+ report_grp.add_argument("--upload-s3-object-prefix", default=None,
+ help="Prefix for the S3 object/key")
report_grp.add_argument("--upload-protocol", default='auto',
- choices=['auto', 'https', 'ftp', 'sftp'],
+ choices=['auto', 'https', 'ftp', 'sftp',
+ 's3'],
help="Manually specify the upload protocol")
# Group to make add/del preset exclusive
@@ -1672,7 +1691,8 @@ class SoSReport(SoSComponent):
self.policy.display_results(archive, directory, checksum,
map_file=map_file)
- if self.opts.upload or self.opts.upload_url:
+ if (self.opts.upload or self.opts.upload_url
+ or self.opts.upload_s3_endpoint):
if not self.opts.build:
try:
self.policy.upload_archive(archive)