diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index dc3b43e2dc..e4c548c393 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -37,6 +37,7 @@ parts: - wheel - python_magic - packaging + - boto3 apps: sos: diff --git a/sos.spec b/sos.spec index b575b5232d..9f6313ed66 100644 --- a/sos.spec +++ b/sos.spec @@ -22,6 +22,8 @@ Recommends: python3-pyyaml Obsoletes: sos-collector <= 1.9 # For the _tmpfilesdir macro. BuildRequires: systemd +# Mandatory just for uploading to an S3 bucket: +Recommends: python3-boto3 %description Sos is a set of tools that gathers information about system diff --git a/sos/collector/__init__.py b/sos/collector/__init__.py index ae316755eb..df3b5ee778 100644 --- a/sos/collector/__init__.py +++ b/sos/collector/__init__.py @@ -134,7 +134,13 @@ class SoSCollector(SoSComponent): 'upload_pass': None, 'upload_method': 'auto', 'upload_no_ssl_verify': False, - 'upload_protocol': 'auto' + 'upload_protocol': 'auto', + 'upload_s3_endpoint': None, + 'upload_s3_region': None, + 'upload_s3_bucket': None, + 'upload_s3_access_key': None, + 'upload_s3_secret_key': None, + 'upload_s3_object_prefix': None } def __init__(self, parser, parsed_args, cmdline_args): @@ -440,8 +446,21 @@ def add_parser_options(cls, parser): action='store_true', help="Disable SSL verification for upload url" ) + collect_grp.add_argument("--upload-s3-endpoint", default=None, + help="Endpoint to upload to for S3 bucket") + collect_grp.add_argument("--upload-s3-region", default=None, + help="Region for the S3 bucket") + collect_grp.add_argument("--upload-s3-bucket", default=None, + help="Name of the S3 bucket to upload to") + collect_grp.add_argument("--upload-s3-access-key", default=None, + help="Access key for the S3 bucket") + collect_grp.add_argument("--upload-s3-secret-key", default=None, + help="Secret key for the S3 bucket") + collect_grp.add_argument("--upload-s3-object-prefix", default=None, + help="Prefix for the S3 object/key") collect_grp.add_argument("--upload-protocol", default='auto', - choices=['auto', 'https', 'ftp', 'sftp'], + choices=['auto', 'https', 'ftp', 'sftp', + 's3'], help="Manually specify the upload protocol") # Group the cleaner options together diff --git a/sos/policies/distros/__init__.py b/sos/policies/distros/__init__.py index 44d6561ea5..1ef9c4c7a6 100644 --- a/sos/policies/distros/__init__.py +++ b/sos/policies/distros/__init__.py @@ -32,6 +32,12 @@ except ImportError: REQUESTS_LOADED = False +try: + import boto3 + BOTO3_LOADED = True +except ImportError: + BOTO3_LOADED = False + # Container environment variables for detecting if we're in a container ENV_CONTAINER = 'container' ENV_HOST_SYSROOT = 'HOST' @@ -56,11 +62,23 @@ class LinuxPolicy(Policy): _upload_user = None _upload_password = None _upload_method = None + _upload_s3_endpoint = None + _upload_s3_bucket = None + _upload_s3_access_key = None + _upload_s3_secret_key = None + _upload_s3_region = None + _upload_s3_object_prefix = None default_container_runtime = 'docker' _preferred_hash_name = None upload_url = None upload_user = None upload_password = None + upload_s3_endpoint = None + upload_s3_bucket = None + upload_s3_access_key = None + upload_s3_secret_key = None + upload_s3_region = None + upload_s3_object_prefix = None # collector-focused class attrs containerized = False container_image = None @@ -292,6 +310,13 @@ def pre_work(self): self.upload_password = cmdline_opts.upload_pass self.upload_archive_name = '' + self.upload_s3_endpoint = cmdline_opts.upload_s3_endpoint + self.upload_s3_region = cmdline_opts.upload_s3_region + self.upload_s3_access_key = cmdline_opts.upload_s3_access_key + self.upload_s3_bucket = cmdline_opts.upload_s3_bucket + self.upload_s3_object_prefix = cmdline_opts.upload_s3_object_prefix + self.upload_s3_secret_key = cmdline_opts.upload_s3_secret_key + # set or query for case id if not cmdline_opts.batch and not \ cmdline_opts.quiet: @@ -354,6 +379,38 @@ def _configure_low_priority(self): except Exception as err: self.soslog.error(f"Error setting report niceness to 19: {err}") + def prompt_for_upload_s3_access_key(self): + """Should be overridden by policies to determine if an access key needs + to be provided for upload or not + """ + if not self.get_upload_s3_access_key(): + msg = "Please provide the upload access key for %s: " + self.upload_s3_access_key = input(_(msg)) + + def prompt_for_upload_s3_bucket(self): + """Should be overridden by policies to determine if a bucket needs to + be provided for upload or not + """ + if not self.get_upload_s3_bucket(): + msg = "Please provide the upload bucket for %s: " + self.upload_s3_bucket = input(_(msg)) + + def prompt_for_upload_s3_endpoint(self): + """Should be overridden by policies to determine if an endpoint needs + to be provided for upload or not + """ + if not self.get_upload_s3_endpoint(): + msg = "Please provide the upload endpoint for %s: " + self.upload_s3_endpoint = input(_(msg)) + + def prompt_for_upload_s3_secret_key(self): + """Should be overridden by policies to determine if a secret key needs + to be provided for upload or not + """ + if not self.get_upload_s3_secret_key(): + msg = "Please provide the upload secret key for %s: " + self.upload_s3_secret_key = getpass(msg) + def prompt_for_upload_user(self): """Should be overridden by policies to determine if a user needs to be provided or not @@ -438,7 +495,8 @@ def _determine_upload_type(self): prots = { 'ftp': self.upload_ftp, 'sftp': self.upload_sftp, - 'https': self.upload_https + 'https': self.upload_https, + 's3': self.upload_s3 } if self.commons['cmdlineopts'].upload_protocol in prots.keys(): return prots[self.commons['cmdlineopts'].upload_protocol] @@ -468,6 +526,64 @@ def get_upload_https_auth(self, user=None, password=None): return requests.auth.HTTPBasicAuth(user, password) + def get_upload_s3_access_key(self): + """Helper function to determine if we should use the policy default + upload access key or one provided by the user + + :returns: The access_key to use for upload + :rtype: ``str`` + """ + return (os.getenv('SOSUPLOADS3ACCESSKEY', None) or + self.upload_s3_access_key or + self._upload_s3_access_key) + + def get_upload_s3_endpoint(self): + """Helper function to determine if we should use the policy default + upload endpoint or one provided by the user + + :returns: The S3 Endpoint to use for upload + :rtype: ``str`` + """ + return self.upload_s3_endpoint or self._upload_s3_endpoint + + def get_upload_s3_region(self): + """Helper function to determine if we should use the policy default + upload region or one provided by the user + + :returns: The S3 region to use for upload + :rtype: ``str`` + """ + return self.upload_s3_region or self._upload_s3_region + + def get_upload_s3_bucket(self): + """Helper function to determine if we should use the policy default + upload bucket or one provided by the user + + :returns: The S3 bucket to use for upload + :rtype: ``str`` + """ + return self.upload_s3_bucket or self._upload_s3_bucket + + def get_upload_s3_object_prefix(self): + """Helper function to determine if we should use the policy default + upload object prefix or one provided by the user + + :returns: The S3 object prefix to use for upload + :rtype: ``str`` + """ + return self.upload_s3_object_prefix or self._upload_s3_object_prefix + + def get_upload_s3_secret_key(self): + """Helper function to determine if we should use the policy default + upload secret key or one provided by the user + + :returns: The S3 secret key to use for upload + :rtype: ``str`` + """ + return (os.getenv('SOSUPLOADS3SECRETKEY', None) or + self.upload_s3_secret_key or + self._upload_s3_secret_key) + def get_upload_url(self): """Helper function to determine if we should use the policy default upload url or one provided by the user @@ -762,6 +878,66 @@ def upload_ftp(self, url=None, directory=None, user=None, password=None): except IOError: raise Exception("could not open archive file") + def upload_s3(self, endpoint=None, region=None, bucket=None, prefix=None, + access_key=None, secret_key=None): + """Attempts to upload the archive to an S3 bucket. + + :param endpoint: The S3 endpoint to upload to + :type endpoint: str + + :param region: The S3 region to upload to + :type region: str + + :param bucket: The name of the S3 bucket to upload to + :type bucket: str + + :param prefix: The prefix for the S3 object/key + :type prefix: str + + :param access_key: The access key for the S3 bucket + :type access_key: str + + :param secret_key: The secret key for the S3 bucket + :type secret_key: str + + :returns: True if upload is successful + :rtype: bool + + :raises: Exception if upload is unsuccessful + """ + if not BOTO3_LOADED: + raise Exception("Unable to upload due to missing python boto3 " + "library") + + if not endpoint: + endpoint = self.get_upload_s3_endpoint() + if not region: + region = self.get_upload_s3_region() + + if not bucket: + bucket = self.get_upload_s3_bucket() + + if not prefix: + prefix = self.get_upload_s3_object_prefix() + + if not access_key: + access_key = self.get_upload_s3_access_key() + + if not secret_key: + secret_key = self.get_upload_s3_secret_key() + + s3_client = boto3.client('s3', endpoint_url=endpoint, + region_name=region, + aws_access_key_id=access_key, + aws_secret_access_key=secret_key) + + try: + s3_client.upload_file(self.upload_archive_name, + bucket, prefix + self.upload_archive_name) + return True + except Exception as e: + raise Exception("Failed to upload to S3: %s" % str(e)) + def set_sos_prefix(self): """If sosreport commands need to always be prefixed with something, for example running in a specific container image, then it should be diff --git a/sos/report/__init__.py b/sos/report/__init__.py index 591782eedd..2a9db8afe6 100644 --- a/sos/report/__init__.py +++ b/sos/report/__init__.py @@ -129,6 +129,12 @@ class SoSReport(SoSComponent): 'upload_method': 'auto', 'upload_no_ssl_verify': False, 'upload_protocol': 'auto', + 'upload_s3_endpoint': None, + 'upload_s3_region': None, + 'upload_s3_bucket': None, + 'upload_s3_access_key': None, + 'upload_s3_secret_key': None, + 'upload_s3_object_prefix': None, 'add_preset': '', 'del_preset': '' } @@ -312,8 +318,21 @@ def add_parser_options(cls, parser): report_grp.add_argument("--upload-no-ssl-verify", default=False, action='store_true', help="Disable SSL verification for upload url") + report_grp.add_argument("--upload-s3-endpoint", default=None, + help="Endpoint to upload to for S3 bucket") + report_grp.add_argument("--upload-s3-region", default=None, + help="Region to upload to for S3 bucket") + report_grp.add_argument("--upload-s3-bucket", default=None, + help="Name of the S3 bucket to upload to") + report_grp.add_argument("--upload-s3-access-key", default=None, + help="Access key for the S3 bucket") + report_grp.add_argument("--upload-s3-secret-key", default=None, + help="Secret key for the S3 bucket") + report_grp.add_argument("--upload-s3-object-prefix", default=None, + help="Prefix for the S3 object/key") report_grp.add_argument("--upload-protocol", default='auto', - choices=['auto', 'https', 'ftp', 'sftp'], + choices=['auto', 'https', 'ftp', 'sftp', + 's3'], help="Manually specify the upload protocol") # Group to make add/del preset exclusive @@ -1670,7 +1689,8 @@ def final_work(self): self.policy.display_results(archive, directory, checksum, map_file=map_file) - if self.opts.upload or self.opts.upload_url: + if (self.opts.upload or self.opts.upload_url + or self.opts.upload_s3_endpoint): if not self.opts.build: try: self.policy.upload_archive(archive)