Merge branch 'stansonhealth-s3_retry' into devel
This commit is contained in:
commit
c80c471876
1 changed files with 32 additions and 11 deletions
|
@ -83,6 +83,14 @@ options:
|
||||||
required: false
|
required: false
|
||||||
default: null
|
default: null
|
||||||
version_added: "1.8"
|
version_added: "1.8"
|
||||||
|
retries:
|
||||||
|
description:
|
||||||
|
- On recoverable failure, how many times to retry before actually failing.
|
||||||
|
required: false
|
||||||
|
default: 0
|
||||||
|
version_added: "2.0"
|
||||||
|
|
||||||
|
requirements: [ "boto" ]
|
||||||
author: Lester Wade, Ralph Tice
|
author: Lester Wade, Ralph Tice
|
||||||
extends_documentation_fragment: aws
|
extends_documentation_fragment: aws
|
||||||
'''
|
'''
|
||||||
|
@ -113,6 +121,7 @@ EXAMPLES = '''
|
||||||
import os
|
import os
|
||||||
import urlparse
|
import urlparse
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from ssl import SSLError
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import boto
|
import boto
|
||||||
|
@ -221,14 +230,23 @@ def upload_s3file(module, s3, bucket, obj, src, expiry, metadata):
|
||||||
except s3.provider.storage_copy_error, e:
|
except s3.provider.storage_copy_error, e:
|
||||||
module.fail_json(msg= str(e))
|
module.fail_json(msg= str(e))
|
||||||
|
|
||||||
def download_s3file(module, s3, bucket, obj, dest):
|
def download_s3file(module, s3, bucket, obj, dest, retries):
|
||||||
try:
|
# retries is the number of loops; range/xrange needs to be one
|
||||||
bucket = s3.lookup(bucket)
|
# more to get that count of loops.
|
||||||
key = bucket.lookup(obj)
|
bucket = s3.lookup(bucket)
|
||||||
key.get_contents_to_filename(dest)
|
key = bucket.lookup(obj)
|
||||||
module.exit_json(msg="GET operation complete", changed=True)
|
for x in range(0, retries + 1):
|
||||||
except s3.provider.storage_copy_error, e:
|
try:
|
||||||
module.fail_json(msg= str(e))
|
key.get_contents_to_filename(dest)
|
||||||
|
module.exit_json(msg="GET operation complete", changed=True)
|
||||||
|
except s3.provider.storage_copy_error, e:
|
||||||
|
module.fail_json(msg= str(e))
|
||||||
|
except SSLError as e:
|
||||||
|
# actually fail on last pass through the loop.
|
||||||
|
if x >= retries:
|
||||||
|
module.fail_json(msg="s3 download failed; %s" % e)
|
||||||
|
# otherwise, try again, this may be a transient timeout.
|
||||||
|
pass
|
||||||
|
|
||||||
def download_s3str(module, s3, bucket, obj):
|
def download_s3str(module, s3, bucket, obj):
|
||||||
try:
|
try:
|
||||||
|
@ -277,6 +295,7 @@ def main():
|
||||||
s3_url = dict(aliases=['S3_URL']),
|
s3_url = dict(aliases=['S3_URL']),
|
||||||
overwrite = dict(aliases=['force'], default='always'),
|
overwrite = dict(aliases=['force'], default='always'),
|
||||||
metadata = dict(type='dict'),
|
metadata = dict(type='dict'),
|
||||||
|
retries = dict(aliases=['retry'], type='int', default=0),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
module = AnsibleModule(argument_spec=argument_spec)
|
module = AnsibleModule(argument_spec=argument_spec)
|
||||||
|
@ -294,6 +313,7 @@ def main():
|
||||||
s3_url = module.params.get('s3_url')
|
s3_url = module.params.get('s3_url')
|
||||||
overwrite = module.params.get('overwrite')
|
overwrite = module.params.get('overwrite')
|
||||||
metadata = module.params.get('metadata')
|
metadata = module.params.get('metadata')
|
||||||
|
retries = module.params.get('retries')
|
||||||
|
|
||||||
if overwrite not in ['always', 'never', 'different']:
|
if overwrite not in ['always', 'never', 'different']:
|
||||||
if module.boolean(overwrite):
|
if module.boolean(overwrite):
|
||||||
|
@ -363,7 +383,7 @@ def main():
|
||||||
# If the destination path doesn't exist, no need to md5um etag check, so just download.
|
# If the destination path doesn't exist, no need to md5um etag check, so just download.
|
||||||
pathrtn = path_check(dest)
|
pathrtn = path_check(dest)
|
||||||
if pathrtn is False:
|
if pathrtn is False:
|
||||||
download_s3file(module, s3, bucket, obj, dest)
|
download_s3file(module, s3, bucket, obj, dest, retries)
|
||||||
|
|
||||||
# Compare the remote MD5 sum of the object with the local dest md5sum, if it already exists.
|
# Compare the remote MD5 sum of the object with the local dest md5sum, if it already exists.
|
||||||
if pathrtn is True:
|
if pathrtn is True:
|
||||||
|
@ -372,13 +392,13 @@ def main():
|
||||||
if md5_local == md5_remote:
|
if md5_local == md5_remote:
|
||||||
sum_matches = True
|
sum_matches = True
|
||||||
if overwrite == 'always':
|
if overwrite == 'always':
|
||||||
download_s3file(module, s3, bucket, obj, dest)
|
download_s3file(module, s3, bucket, obj, dest, retries)
|
||||||
else:
|
else:
|
||||||
module.exit_json(msg="Local and remote object are identical, ignoring. Use overwrite=always parameter to force.", changed=False)
|
module.exit_json(msg="Local and remote object are identical, ignoring. Use overwrite=always parameter to force.", changed=False)
|
||||||
else:
|
else:
|
||||||
sum_matches = False
|
sum_matches = False
|
||||||
if overwrite in ('always', 'different'):
|
if overwrite in ('always', 'different'):
|
||||||
download_s3file(module, s3, bucket, obj, dest)
|
download_s3file(module, s3, bucket, obj, dest, retries)
|
||||||
else:
|
else:
|
||||||
module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force download.")
|
module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force download.")
|
||||||
|
|
||||||
|
@ -389,6 +409,7 @@ def main():
|
||||||
# At this point explicitly define the overwrite condition.
|
# At this point explicitly define the overwrite condition.
|
||||||
if sum_matches is True and pathrtn is True and overwrite == 'always':
|
if sum_matches is True and pathrtn is True and overwrite == 'always':
|
||||||
download_s3file(module, s3, bucket, obj, dest)
|
download_s3file(module, s3, bucket, obj, dest)
|
||||||
|
download_s3file(module, s3, bucket, obj, dest, retries)
|
||||||
|
|
||||||
# If sum does not match but the destination exists, we
|
# If sum does not match but the destination exists, we
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue