Update f5 module utils from downstream (#45819)
* various refactoring * lgtm fixes * bigiq support to different auth providers
This commit is contained in:
parent
1ed3bd9168
commit
35e0434042
5 changed files with 255 additions and 90 deletions
|
@ -7,6 +7,7 @@ from __future__ import absolute_import, division, print_function
|
|||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
try:
|
||||
|
@ -73,14 +74,21 @@ class F5RestClient(F5BaseClient):
|
|||
return self._client
|
||||
for x in range(0, 10):
|
||||
try:
|
||||
provider = self.provider['auth_provider'] or 'local'
|
||||
url = "https://{0}:{1}/mgmt/shared/authn/login".format(
|
||||
self.provider['server'], self.provider['server_port']
|
||||
)
|
||||
payload = {
|
||||
'username': self.provider['user'],
|
||||
'password': self.provider['password'],
|
||||
'loginProviderName': self.provider['auth_provider'] or 'local'
|
||||
}
|
||||
|
||||
# - local is a special provider that is baked into the system and
|
||||
# has no loginReference
|
||||
if provider != 'local':
|
||||
login_ref = self.get_login_ref(provider)
|
||||
payload.update(login_ref)
|
||||
|
||||
session = iControlRestSession()
|
||||
session.verify = self.provider['validate_certs']
|
||||
response = session.post(url, json=payload)
|
||||
|
@ -102,3 +110,68 @@ class F5RestClient(F5BaseClient):
|
|||
if exc is not None:
|
||||
error += ' The reported error was "{0}".'.format(str(exc))
|
||||
raise F5ModuleError(error)
|
||||
|
||||
def get_login_ref(self, provider):
|
||||
info = self.read_provider_info_from_device()
|
||||
uuids = [os.path.basename(os.path.dirname(x['link'])) for x in info['providers'] if '-' in x['link']]
|
||||
if provider in uuids:
|
||||
name = self.get_name_of_provider_id(info, provider)
|
||||
if not name:
|
||||
raise F5ModuleError(
|
||||
"No name found for the provider '{0}'".format(provider)
|
||||
)
|
||||
return dict(
|
||||
loginReference=dict(
|
||||
link="https://localhost/mgmt/cm/system/authn/providers/{0}/{1}/login".format(name, provider)
|
||||
)
|
||||
)
|
||||
names = [os.path.basename(os.path.dirname(x['link'])) for x in info['providers'] if '-' in x['link']]
|
||||
if names.count(provider) > 1:
|
||||
raise F5ModuleError(
|
||||
"Ambiguous auth_provider provided. Please specify a specific provider ID."
|
||||
)
|
||||
uuid = self.get_id_of_provider_name(info, provider)
|
||||
if not uuid:
|
||||
raise F5ModuleError(
|
||||
"No name found for the provider '{0}'".format(provider)
|
||||
)
|
||||
return dict(
|
||||
loginReference=dict(
|
||||
link="https://localhost/mgmt/cm/system/authn/providers/{0}/{1}/login".format(provider, uuid)
|
||||
)
|
||||
)
|
||||
|
||||
def get_name_of_provider_id(self, info, provider):
|
||||
# Add slashes to the provider name so that it specifically finds the provider
|
||||
# as part of the URL and not a part of another substring
|
||||
provider = '/' + provider + '/'
|
||||
for x in info['providers']:
|
||||
if x['link'].find(provider) > -1:
|
||||
return x['name']
|
||||
return None
|
||||
|
||||
def get_id_of_provider_name(self, info, provider):
|
||||
for x in info['providers']:
|
||||
if x['name'] == provider:
|
||||
return os.path.basename(os.path.dirname(x['link']))
|
||||
return None
|
||||
|
||||
def read_provider_info_from_device(self):
|
||||
uri = "https://{0}:{1}/info/system".format(
|
||||
self.provider['server'], self.provider['server_port']
|
||||
)
|
||||
session = iControlRestSession()
|
||||
session.verify = self.provider['validate_certs']
|
||||
|
||||
resp = session.get(uri)
|
||||
try:
|
||||
response = resp.json()
|
||||
except ValueError as ex:
|
||||
raise F5ModuleError(str(ex))
|
||||
|
||||
if 'code' in response and response['code'] == 400:
|
||||
if 'message' in response:
|
||||
raise F5ModuleError(response['message'])
|
||||
else:
|
||||
raise F5ModuleError(resp.content)
|
||||
return response
|
||||
|
|
|
@ -532,9 +532,18 @@ class F5BaseClient(object):
|
|||
|
||||
def merge_provider_params(self):
|
||||
result = dict()
|
||||
|
||||
provider = self.params.get('provider', {})
|
||||
|
||||
self.merge_provider_server_param(result, provider)
|
||||
self.merge_provider_server_port_param(result, provider)
|
||||
self.merge_provider_validate_certs_param(result, provider)
|
||||
self.merge_provider_auth_provider_param(result, provider)
|
||||
self.merge_provider_user_param(result, provider)
|
||||
self.merge_provider_password_param(result, provider)
|
||||
|
||||
return result
|
||||
|
||||
def merge_provider_server_param(self, result, provider):
|
||||
if self.validate_params('server', provider):
|
||||
result['server'] = provider['server']
|
||||
elif self.validate_params('server', self.params):
|
||||
|
@ -544,6 +553,7 @@ class F5BaseClient(object):
|
|||
else:
|
||||
raise F5ModuleError('Server parameter cannot be None or missing, please provide a valid value')
|
||||
|
||||
def merge_provider_server_port_param(self, result, provider):
|
||||
if self.validate_params('server_port', provider):
|
||||
result['server_port'] = provider['server_port']
|
||||
elif self.validate_params('server_port', self.params):
|
||||
|
@ -553,6 +563,7 @@ class F5BaseClient(object):
|
|||
else:
|
||||
result['server_port'] = 443
|
||||
|
||||
def merge_provider_validate_certs_param(self, result, provider):
|
||||
if self.validate_params('validate_certs', provider):
|
||||
result['validate_certs'] = provider['validate_certs']
|
||||
elif self.validate_params('validate_certs', self.params):
|
||||
|
@ -561,14 +572,37 @@ class F5BaseClient(object):
|
|||
result['validate_certs'] = os.environ['F5_VALIDATE_CERTS']
|
||||
else:
|
||||
result['validate_certs'] = True
|
||||
if result['validate_certs'] in BOOLEANS_TRUE:
|
||||
result['validate_certs'] = True
|
||||
else:
|
||||
result['validate_certs'] = False
|
||||
|
||||
def merge_provider_auth_provider_param(self, result, provider):
|
||||
if self.validate_params('auth_provider', provider):
|
||||
result['auth_provider'] = provider['auth_provider']
|
||||
elif self.validate_params('auth_provider', self.params):
|
||||
result['auth_provider'] = self.params['auth_provider']
|
||||
elif self.validate_params('F5_AUTH_PROVIDER', os.environ):
|
||||
result['auth_provider'] = os.environ['F5_AUTH_PROVIDER']
|
||||
else:
|
||||
result['auth_provider'] = None
|
||||
|
||||
# Handle a specific case of the user specifying ``|default(omit)``
|
||||
# as the value to the auth_provider.
|
||||
#
|
||||
# In this case, Ansible will inject the omit-placeholder value
|
||||
# and the module params incorrectly interpret this. This case
|
||||
# can occur when specifying ``|default(omit)`` for a variable
|
||||
# value defined in the ``environment`` section of a Play.
|
||||
#
|
||||
# An example of the omit placeholder is shown below.
|
||||
#
|
||||
# __omit_place_holder__11bd71a2840bff144594b9cc2149db814256f253
|
||||
#
|
||||
if result['auth_provider'] is not None and '__omit_place_holder__' in result['auth_provider']:
|
||||
result['auth_provider'] = None
|
||||
|
||||
def merge_provider_user_param(self, result, provider):
|
||||
if self.validate_params('user', provider):
|
||||
result['user'] = provider['user']
|
||||
elif self.validate_params('user', self.params):
|
||||
|
@ -580,6 +614,7 @@ class F5BaseClient(object):
|
|||
else:
|
||||
result['user'] = None
|
||||
|
||||
def merge_provider_password_param(self, result, provider):
|
||||
if self.validate_params('password', provider):
|
||||
result['password'] = provider['password']
|
||||
elif self.validate_params('password', self.params):
|
||||
|
@ -591,13 +626,6 @@ class F5BaseClient(object):
|
|||
else:
|
||||
result['password'] = None
|
||||
|
||||
if result['validate_certs'] in BOOLEANS_TRUE:
|
||||
result['validate_certs'] = True
|
||||
else:
|
||||
result['validate_certs'] = False
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class AnsibleF5Parameters(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
|
@ -10,9 +10,9 @@ __metaclass__ = type
|
|||
def cmp_simple_list(want, have):
|
||||
if want is None:
|
||||
return None
|
||||
if have is None and want == '':
|
||||
if have is None and want in ['', 'none']:
|
||||
return None
|
||||
if have is not None and want == '':
|
||||
if have is not None and want in ['', 'none']:
|
||||
return []
|
||||
if have is None:
|
||||
return want
|
||||
|
|
|
@ -8,14 +8,16 @@ __metaclass__ = type
|
|||
|
||||
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.urls import open_url, fetch_url
|
||||
from ansible.module_utils.parsing.convert_bool import BOOLEANS
|
||||
from ansible.module_utils.six import string_types
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible.module_utils.urls import urllib_error
|
||||
from ansible.module_utils.urls import urlparse
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.six import PY3
|
||||
|
||||
|
@ -139,7 +141,6 @@ class PreparedRequest(object):
|
|||
|
||||
def prepare_body(self, data, json=None):
|
||||
body = None
|
||||
content_type = None
|
||||
|
||||
if not data and json is not None:
|
||||
self.headers['Content-Type'] = 'application/json'
|
||||
|
@ -149,10 +150,6 @@ class PreparedRequest(object):
|
|||
|
||||
if data:
|
||||
body = data
|
||||
content_type = None
|
||||
|
||||
if content_type and 'content-type' not in self.headers:
|
||||
self.headers['Content-Type'] = content_type
|
||||
|
||||
self.body = body
|
||||
|
||||
|
@ -397,7 +394,7 @@ def download_file(client, url, dest):
|
|||
return True
|
||||
|
||||
|
||||
def upload_file(client, url, dest):
|
||||
def upload_file(client, url, src, dest=None):
|
||||
"""Upload a file to an arbitrary URL.
|
||||
|
||||
This method is responsible for correctly chunking an upload request to an
|
||||
|
@ -406,7 +403,8 @@ def upload_file(client, url, dest):
|
|||
Arguments:
|
||||
client (object): The F5RestClient connection object.
|
||||
url (string): The URL to upload a file to.
|
||||
dest (string): The file to be uploaded.
|
||||
src (string): The file to be uploaded.
|
||||
dest (string): The file name to create on the remote device.
|
||||
|
||||
Examples:
|
||||
The ``dest`` may be either an absolute or relative path. The basename
|
||||
|
@ -433,73 +431,139 @@ def upload_file(client, url, dest):
|
|||
Raises:
|
||||
F5ModuleError: Raised if ``retries`` limit is exceeded.
|
||||
"""
|
||||
with open(dest, 'rb') as fileobj:
|
||||
size = os.stat(dest).st_size
|
||||
if isinstance(src, StringIO):
|
||||
fileobj = src
|
||||
else:
|
||||
fileobj = open(src, 'rb')
|
||||
|
||||
# This appears to be the largest chunk size that iControlREST can handle.
|
||||
#
|
||||
# The trade-off you are making by choosing a chunk size is speed, over size of
|
||||
# transmission. A lower chunk size will be slower because a smaller amount of
|
||||
# data is read from disk and sent via HTTP. Lots of disk reads are slower and
|
||||
# There is overhead in sending the request to the BIG-IP.
|
||||
#
|
||||
# Larger chunk sizes are faster because more data is read from disk in one
|
||||
# go, and therefore more data is transmitted to the BIG-IP in one HTTP request.
|
||||
#
|
||||
# If you are transmitting over a slow link though, it may be more reliable to
|
||||
# transmit many small chunks that fewer large chunks. It will clearly take
|
||||
# longer, but it may be more robust.
|
||||
chunk_size = 1024 * 7168
|
||||
start = 0
|
||||
retries = 0
|
||||
basename = os.path.basename(dest)
|
||||
url = '{0}/{1}'.format(url.rstrip('/'), basename)
|
||||
try:
|
||||
size = os.stat(src).st_size
|
||||
is_file = True
|
||||
except TypeError:
|
||||
src.seek(0, os.SEEK_END)
|
||||
size = src.tell()
|
||||
src.seek(0)
|
||||
is_file = False
|
||||
|
||||
while True:
|
||||
if retries == 3:
|
||||
# Retries are used here to allow the REST API to recover if you kill
|
||||
# an upload mid-transfer.
|
||||
# This appears to be the largest chunk size that iControlREST can handle.
|
||||
#
|
||||
# The trade-off you are making by choosing a chunk size is speed, over size of
|
||||
# transmission. A lower chunk size will be slower because a smaller amount of
|
||||
# data is read from disk and sent via HTTP. Lots of disk reads are slower and
|
||||
# There is overhead in sending the request to the BIG-IP.
|
||||
#
|
||||
# Larger chunk sizes are faster because more data is read from disk in one
|
||||
# go, and therefore more data is transmitted to the BIG-IP in one HTTP request.
|
||||
#
|
||||
# If you are transmitting over a slow link though, it may be more reliable to
|
||||
# transmit many small chunks that fewer large chunks. It will clearly take
|
||||
# longer, but it may be more robust.
|
||||
chunk_size = 1024 * 7168
|
||||
start = 0
|
||||
retries = 0
|
||||
if dest is None and is_file:
|
||||
basename = os.path.basename(src)
|
||||
else:
|
||||
basename = dest
|
||||
url = '{0}/{1}'.format(url.rstrip('/'), basename)
|
||||
|
||||
while True:
|
||||
if retries == 3:
|
||||
# Retries are used here to allow the REST API to recover if you kill
|
||||
# an upload mid-transfer.
|
||||
#
|
||||
# There exists a case where retrying a new upload will result in the
|
||||
# API returning the POSTed payload (in bytes) with a non-200 response
|
||||
# code.
|
||||
#
|
||||
# Retrying (after seeking back to 0) seems to resolve this problem.
|
||||
raise F5ModuleError(
|
||||
"Failed to upload file too many times."
|
||||
)
|
||||
try:
|
||||
file_slice = fileobj.read(chunk_size)
|
||||
if not file_slice:
|
||||
break
|
||||
|
||||
current_bytes = len(file_slice)
|
||||
if current_bytes < chunk_size:
|
||||
end = size
|
||||
else:
|
||||
end = start + current_bytes
|
||||
headers = {
|
||||
'Content-Range': '%s-%s/%s' % (start, end - 1, size),
|
||||
'Content-Type': 'application/octet-stream'
|
||||
}
|
||||
|
||||
# Data should always be sent using the ``data`` keyword and not the
|
||||
# ``json`` keyword. This allows bytes to be sent (such as in the case
|
||||
# of uploading ISO files.
|
||||
response = client.api.post(url, headers=headers, data=file_slice)
|
||||
|
||||
if response.status != 200:
|
||||
# When this fails, the output is usually the body of whatever you
|
||||
# POSTed. This is almost always unreadable because it is a series
|
||||
# of bytes.
|
||||
#
|
||||
# There exists a case where retrying a new upload will result in the
|
||||
# API returning the POSTed payload (in bytes) with a non-200 response
|
||||
# code.
|
||||
#
|
||||
# Retrying (after seeking back to 0) seems to resolve this problem.
|
||||
raise F5ModuleError(
|
||||
"Failed to upload file too many times."
|
||||
)
|
||||
try:
|
||||
file_slice = fileobj.read(chunk_size)
|
||||
if not file_slice:
|
||||
break
|
||||
|
||||
current_bytes = len(file_slice)
|
||||
if current_bytes < chunk_size:
|
||||
end = size
|
||||
else:
|
||||
end = start + current_bytes
|
||||
headers = {
|
||||
'Content-Range': '%s-%s/%s' % (start, end - 1, size),
|
||||
'Content-Type': 'application/octet-stream'
|
||||
}
|
||||
|
||||
# Data should always be sent using the ``data`` keyword and not the
|
||||
# ``json`` keyword. This allows bytes to be sent (such as in the case
|
||||
# of uploading ISO files.
|
||||
response = client.api.post(url, headers=headers, data=file_slice)
|
||||
|
||||
if response.status != 200:
|
||||
# When this fails, the output is usually the body of whatever you
|
||||
# POSTed. This is almost always unreadable because it is a series
|
||||
# of bytes.
|
||||
#
|
||||
# Therefore, including an empty exception here.
|
||||
raise F5ModuleError()
|
||||
start += current_bytes
|
||||
except F5ModuleError:
|
||||
# You must seek back to the beginning of the file upon exception.
|
||||
#
|
||||
# If this is not done, then you risk uploading a partial file.
|
||||
fileobj.seek(0)
|
||||
retries += 1
|
||||
# Therefore, including an empty exception here.
|
||||
raise F5ModuleError()
|
||||
start += current_bytes
|
||||
except F5ModuleError:
|
||||
# You must seek back to the beginning of the file upon exception.
|
||||
#
|
||||
# If this is not done, then you risk uploading a partial file.
|
||||
fileobj.seek(0)
|
||||
retries += 1
|
||||
return True
|
||||
|
||||
|
||||
def tmos_version(client):
|
||||
uri = "https://{0}:{1}/mgmt/tm/sys/".format(
|
||||
client.provider['server'],
|
||||
client.provider['server_port'],
|
||||
)
|
||||
resp = client.api.get(uri)
|
||||
|
||||
try:
|
||||
response = resp.json()
|
||||
except ValueError as ex:
|
||||
raise F5ModuleError(str(ex))
|
||||
|
||||
if 'code' in response and response['code'] in [400, 403]:
|
||||
if 'message' in response:
|
||||
raise F5ModuleError(response['message'])
|
||||
else:
|
||||
raise F5ModuleError(resp.content)
|
||||
|
||||
to_parse = urlparse(response['selfLink'])
|
||||
query = to_parse.query
|
||||
version = query.split('=')[1]
|
||||
return version
|
||||
|
||||
|
||||
def module_provisioned(client, module_name):
|
||||
modules = dict(
|
||||
afm='provisioned.cpu.afm', avr='provisioned.cpu.avr', asm='provisioned.cpu.asm',
|
||||
apm='provisioned.cpu.apm', gtm='provisioned.cpu.gtm', ilx='provisioned.cpu.ilx',
|
||||
pem='provisioned.cpu.pem', vcmp='provisioned.cpu.vcmp'
|
||||
)
|
||||
uri = "https://{0}:{1}/mgmt/tm/sys/db/{2}".format(
|
||||
client.provider['server'],
|
||||
client.provider['server_port'],
|
||||
modules[module_name]
|
||||
)
|
||||
resp = client.api.get(uri)
|
||||
|
||||
try:
|
||||
response = resp.json()
|
||||
except ValueError as ex:
|
||||
raise F5ModuleError(str(ex))
|
||||
|
||||
if 'code' in response and response['code'] in [400, 403]:
|
||||
if 'message' in response:
|
||||
raise F5ModuleError(response['message'])
|
||||
else:
|
||||
raise F5ModuleError(resp.content)
|
||||
if int(response['value']) == 0:
|
||||
return False
|
||||
return True
|
||||
|
|
|
@ -73,7 +73,7 @@ def ipv6_netmask_to_cidr(mask):
|
|||
break
|
||||
count += bit_masks.index(int(w, 16))
|
||||
return count
|
||||
except:
|
||||
except Exception:
|
||||
return -1
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue