s3 module: fix urlparse invocation and netloc mixup on Python 3 (#20836)
This commit is contained in:
parent
65f561e496
commit
3f14061584
2 changed files with 83 additions and 31 deletions
|
@ -415,7 +415,7 @@ def get_download_url(module, s3, bucket, obj, expiry, changed=True):
|
|||
def is_fakes3(s3_url):
|
||||
""" Return True if s3_url has scheme fakes3:// """
|
||||
if s3_url is not None:
|
||||
return urlparse.urlparse(s3_url).scheme in ('fakes3', 'fakes3s')
|
||||
return urlparse(s3_url).scheme in ('fakes3', 'fakes3s')
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -424,8 +424,8 @@ def is_walrus(s3_url):
|
|||
|
||||
We assume anything other than *.amazonaws.com is Walrus"""
|
||||
if s3_url is not None:
|
||||
o = urlparse.urlparse(s3_url)
|
||||
return not o.hostname.endswith('amazonaws.com')
|
||||
o = urlparse(s3_url)
|
||||
return not o.netloc.endswith('amazonaws.com')
|
||||
else:
|
||||
return False
|
||||
|
||||
|
@ -517,34 +517,7 @@ def main():
|
|||
# Look at s3_url and tweak connection settings
|
||||
# if connecting to RGW, Walrus or fakes3
|
||||
try:
|
||||
if s3_url and rgw:
|
||||
rgw = urlparse.urlparse(s3_url)
|
||||
s3 = boto.connect_s3(
|
||||
is_secure=rgw.scheme == 'https',
|
||||
host=rgw.hostname,
|
||||
port=rgw.port,
|
||||
calling_format=OrdinaryCallingFormat(),
|
||||
**aws_connect_kwargs
|
||||
)
|
||||
elif is_fakes3(s3_url):
|
||||
fakes3 = urlparse.urlparse(s3_url)
|
||||
s3 = S3Connection(
|
||||
is_secure=fakes3.scheme == 'fakes3s',
|
||||
host=fakes3.hostname,
|
||||
port=fakes3.port,
|
||||
calling_format=OrdinaryCallingFormat(),
|
||||
**aws_connect_kwargs
|
||||
)
|
||||
elif is_walrus(s3_url):
|
||||
walrus = urlparse.urlparse(s3_url).hostname
|
||||
s3 = boto.connect_walrus(walrus, **aws_connect_kwargs)
|
||||
else:
|
||||
aws_connect_kwargs['is_secure'] = True
|
||||
try:
|
||||
s3 = connect_to_aws(boto.s3, location, **aws_connect_kwargs)
|
||||
except AnsibleAWSError:
|
||||
# use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases
|
||||
s3 = boto.connect_s3(**aws_connect_kwargs)
|
||||
s3 = get_s3_connection(aws_connect_kwargs, location, rgw, s3_url)
|
||||
|
||||
except boto.exception.NoAuthHandlerFound as e:
|
||||
module.fail_json(msg='No Authentication Handler found: %s ' % str(e))
|
||||
|
@ -736,6 +709,39 @@ def main():
|
|||
|
||||
module.exit_json(failed=False)
|
||||
|
||||
|
||||
def get_s3_connection(aws_connect_kwargs, location, rgw, s3_url):
|
||||
if s3_url and rgw:
|
||||
rgw = urlparse(s3_url)
|
||||
s3 = boto.connect_s3(
|
||||
is_secure=rgw.scheme == 'https',
|
||||
host=rgw.hostname,
|
||||
port=rgw.port,
|
||||
calling_format=OrdinaryCallingFormat(),
|
||||
**aws_connect_kwargs
|
||||
)
|
||||
elif is_fakes3(s3_url):
|
||||
fakes3 = urlparse(s3_url)
|
||||
s3 = S3Connection(
|
||||
is_secure=fakes3.scheme == 'fakes3s',
|
||||
host=fakes3.hostname,
|
||||
port=fakes3.port,
|
||||
calling_format=OrdinaryCallingFormat(),
|
||||
**aws_connect_kwargs
|
||||
)
|
||||
elif is_walrus(s3_url):
|
||||
walrus = urlparse(s3_url).hostname
|
||||
s3 = boto.connect_walrus(walrus, **aws_connect_kwargs)
|
||||
else:
|
||||
aws_connect_kwargs['is_secure'] = True
|
||||
try:
|
||||
s3 = connect_to_aws(boto.s3, location, **aws_connect_kwargs)
|
||||
except AnsibleAWSError:
|
||||
# use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases
|
||||
s3 = boto.connect_s3(**aws_connect_kwargs)
|
||||
return s3
|
||||
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.module_utils.ec2 import *
|
||||
|
|
46
test/units/modules/cloud/amazon/test_s3.py
Normal file
46
test/units/modules/cloud/amazon/test_s3.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
from nose.plugins.skip import SkipTest
|
||||
|
||||
try:
|
||||
import boto
|
||||
HAS_BOTO = True
|
||||
except ImportError:
|
||||
HAS_BOTO = False
|
||||
|
||||
if not HAS_BOTO:
|
||||
raise SkipTest("test_s3.py requires the python module 'boto3' and 'botocore'")
|
||||
|
||||
import unittest
|
||||
import ansible.modules.cloud.amazon.s3 as s3
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
|
||||
class TestUrlparse(unittest.TestCase):
|
||||
|
||||
def test_urlparse(self):
|
||||
actual = urlparse("http://test.com/here")
|
||||
self.assertEqual("http", actual.scheme)
|
||||
self.assertEqual("test.com", actual.netloc)
|
||||
self.assertEqual("/here", actual.path)
|
||||
|
||||
def test_is_fakes3(self):
|
||||
actual = s3.is_fakes3("fakes3://bla.blubb")
|
||||
self.assertEqual(True, actual)
|
||||
|
||||
def test_is_walrus(self):
|
||||
actual = s3.is_walrus("trulywalrus_but_invalid_url")
|
||||
#I don't know if this makes sense, but this is the current behaviour...
|
||||
self.assertEqual(True, actual)
|
||||
actual = s3.is_walrus("http://notwalrus.amazonaws.com")
|
||||
self.assertEqual(False, actual)
|
||||
|
||||
def test_get_s3_connection(self):
|
||||
aws_connect_kwargs = dict(aws_access_key_id="access_key",
|
||||
aws_secret_access_key="secret_key")
|
||||
location=None
|
||||
rgw=True
|
||||
s3_url="http://bla.blubb"
|
||||
actual = s3.get_s3_connection(aws_connect_kwargs, location, rgw, s3_url)
|
||||
self.assertEqual("bla.blubb", actual.host)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Loading…
Reference in a new issue