Merge pull request #1464 from qrtt1/devel

Prevent s3 module getting the memory-error from uploading or downloading a large file
This commit is contained in:
Brian Coca 2015-06-12 13:53:50 -04:00
commit acfa1b4c39

View file

@ -299,6 +299,13 @@ def is_walrus(s3_url):
else:
return False
def get_md5_digest(local_file):
md5 = hashlib.md5()
with open(local_file, 'rb') as f:
for data in f.read(1024 ** 2):
md5.update(data)
return md5.hexdigest()
def main():
argument_spec = ec2_argument_spec()
@ -413,7 +420,7 @@ def main():
# Compare the remote MD5 sum of the object with the local dest md5sum, if it already exists.
if pathrtn is True:
md5_remote = keysum(module, s3, bucket, obj)
md5_local = hashlib.md5(open(dest, 'rb').read()).hexdigest()
md5_local = get_md5_digest(dest)
if md5_local == md5_remote:
sum_matches = True
if overwrite == 'always':
@ -457,7 +464,8 @@ def main():
# Lets check key state. Does it exist and if it does, compute the etag md5sum.
if bucketrtn is True and keyrtn is True:
md5_remote = keysum(module, s3, bucket, obj)
md5_local = hashlib.md5(open(src, 'rb').read()).hexdigest()
md5_local = get_md5_digest(src)
if md5_local == md5_remote:
sum_matches = True
if overwrite == 'always':