diff --git a/cloud/amazon/s3.py b/cloud/amazon/s3.py index fddf3698694..310bda3bfbc 100644 --- a/cloud/amazon/s3.py +++ b/cloud/amazon/s3.py @@ -83,6 +83,14 @@ options: required: false default: null version_added: "1.8" + retries: + description: + - On recoverable failure, how many times to retry before actually failing. + required: false + default: 0 + version_added: "2.0" + +requirements: [ "boto" ] author: Lester Wade, Ralph Tice extends_documentation_fragment: aws ''' @@ -113,6 +121,7 @@ EXAMPLES = ''' import os import urlparse import hashlib +from ssl import SSLError try: import boto @@ -221,14 +230,23 @@ def upload_s3file(module, s3, bucket, obj, src, expiry, metadata): except s3.provider.storage_copy_error, e: module.fail_json(msg= str(e)) -def download_s3file(module, s3, bucket, obj, dest): - try: - bucket = s3.lookup(bucket) - key = bucket.lookup(obj) - key.get_contents_to_filename(dest) - module.exit_json(msg="GET operation complete", changed=True) - except s3.provider.storage_copy_error, e: - module.fail_json(msg= str(e)) +def download_s3file(module, s3, bucket, obj, dest, retries): + # retries is the number of loops; range/xrange needs to be one + # more to get that count of loops. + bucket = s3.lookup(bucket) + key = bucket.lookup(obj) + for x in range(0, retries + 1): + try: + key.get_contents_to_filename(dest) + module.exit_json(msg="GET operation complete", changed=True) + except s3.provider.storage_copy_error, e: + module.fail_json(msg= str(e)) + except SSLError as e: + # actually fail on last pass through the loop. + if x >= retries: + module.fail_json(msg="s3 download failed; %s" % e) + # otherwise, try again, this may be a transient timeout. + pass def download_s3str(module, s3, bucket, obj): try: @@ -277,6 +295,7 @@ def main(): s3_url = dict(aliases=['S3_URL']), overwrite = dict(aliases=['force'], default='always'), metadata = dict(type='dict'), + retries = dict(aliases=['retry'], type='int', default=0), ), ) module = AnsibleModule(argument_spec=argument_spec) @@ -294,6 +313,7 @@ def main(): s3_url = module.params.get('s3_url') overwrite = module.params.get('overwrite') metadata = module.params.get('metadata') + retries = module.params.get('retries') if overwrite not in ['always', 'never', 'different']: if module.boolean(overwrite): @@ -363,7 +383,7 @@ def main(): # If the destination path doesn't exist, no need to md5um etag check, so just download. pathrtn = path_check(dest) if pathrtn is False: - download_s3file(module, s3, bucket, obj, dest) + download_s3file(module, s3, bucket, obj, dest, retries) # Compare the remote MD5 sum of the object with the local dest md5sum, if it already exists. if pathrtn is True: @@ -372,13 +392,13 @@ def main(): if md5_local == md5_remote: sum_matches = True if overwrite == 'always': - download_s3file(module, s3, bucket, obj, dest) + download_s3file(module, s3, bucket, obj, dest, retries) else: module.exit_json(msg="Local and remote object are identical, ignoring. Use overwrite=always parameter to force.", changed=False) else: sum_matches = False if overwrite in ('always', 'different'): - download_s3file(module, s3, bucket, obj, dest) + download_s3file(module, s3, bucket, obj, dest, retries) else: module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force download.") @@ -389,6 +409,7 @@ def main(): # At this point explicitly define the overwrite condition. if sum_matches is True and pathrtn is True and overwrite == 'always': download_s3file(module, s3, bucket, obj, dest) + download_s3file(module, s3, bucket, obj, dest, retries) # If sum does not match but the destination exists, we