Resolves issue with vmware_host module for v2.0
When this module was written back in May 2015 we were using 1.9.x. Being lazy I added to param the objects that the other functions would need. What I have noticed is in 2.0 exit_json is trying to jsonify those complex objects and failing. This PR resolves that issue with the vmware_host module. @kamsz reported this issue in https://github.com/ansible/ansible-modules-extras/pull/1568 Playbook ``` - name: Add Host local_action: module: vmware_host hostname: "{{ mgmt_ip_address }}" username: "{{ vcsa_user }}" password: "{{ vcsa_pass }}" datacenter_name: "{{ mgmt_vdc }}" cluster_name: "{{ mgmt_cluster }}" esxi_hostname: "{{ hostvars[item].hostname }}" esxi_username: "{{ esxi_username }}" esxi_password: "{{ site_passwd }}" state: present with_items: groups['foundation_esxi'] ``` Module Testing ``` TASK [Add Host] **************************************************************** task path: /opt/autodeploy/projects/emmet/site_deploy.yml:214 ESTABLISH LOCAL CONNECTION FOR USER: root localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937 )" ) localhost PUT /tmp/tmppmr9i9 TO /root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/vmware_host localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/" > /dev/null 2>&1 localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834 )" ) localhost PUT /tmp/tmpVB81f2 TO /root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/vmware_host localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/" > /dev/null 2>&1 localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563 )" ) localhost PUT /tmp/tmpFB7VQB TO /root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/vmware_host localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/" > /dev/null 2>&1 changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-01) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp001", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-01", "result": "'vim.HostSystem:host-15'"} changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-02) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp002", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-02", "result": "'vim.HostSystem:host-20'"} changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-03) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp003", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-03", "result": "'vim.HostSystem:host-21'"} ```
This commit is contained in:
parent
fb3bb746b2
commit
cef9e42896
1 changed files with 98 additions and 101 deletions
|
@ -87,102 +87,118 @@ except ImportError:
|
||||||
HAS_PYVMOMI = False
|
HAS_PYVMOMI = False
|
||||||
|
|
||||||
|
|
||||||
def find_host_by_cluster_datacenter(module):
|
class VMwareHost(object):
|
||||||
datacenter_name = module.params['datacenter_name']
|
def __init__(self, module):
|
||||||
cluster_name = module.params['cluster_name']
|
self.module = module
|
||||||
content = module.params['content']
|
self.datacenter_name = module.params['datacenter_name']
|
||||||
esxi_hostname = module.params['esxi_hostname']
|
self.cluster_name = module.params['cluster_name']
|
||||||
|
self.esxi_hostname = module.params['esxi_hostname']
|
||||||
|
self.esxi_username = module.params['esxi_username']
|
||||||
|
self.esxi_password = module.params['esxi_password']
|
||||||
|
self.state = module.params['state']
|
||||||
|
self.dc = None
|
||||||
|
self.cluster = None
|
||||||
|
self.host = None
|
||||||
|
self.content = connect_to_api(module)
|
||||||
|
|
||||||
dc = find_datacenter_by_name(content, datacenter_name)
|
def process_state(self):
|
||||||
cluster = find_cluster_by_name_datacenter(dc, cluster_name)
|
try:
|
||||||
|
# Currently state_update_dvs is not implemented.
|
||||||
|
host_states = {
|
||||||
|
'absent': {
|
||||||
|
'present': self.state_remove_host,
|
||||||
|
'absent': self.state_exit_unchanged,
|
||||||
|
},
|
||||||
|
'present': {
|
||||||
|
'present': self.state_exit_unchanged,
|
||||||
|
'absent': self.state_add_host,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for host in cluster.host:
|
host_states[self.state][self.check_host_state()]()
|
||||||
if host.name == esxi_hostname:
|
|
||||||
return host, cluster
|
|
||||||
|
|
||||||
return None, cluster
|
except vmodl.RuntimeFault as runtime_fault:
|
||||||
|
self.module.fail_json(msg=runtime_fault.msg)
|
||||||
|
except vmodl.MethodFault as method_fault:
|
||||||
|
self.module.fail_json(msg=method_fault.msg)
|
||||||
|
except Exception as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
def find_host_by_cluster_datacenter(self):
|
||||||
|
self.dc = find_datacenter_by_name(self.content, self.datacenter_name)
|
||||||
|
self.cluster = find_cluster_by_name_datacenter(self.dc, self.cluster_name)
|
||||||
|
|
||||||
def add_host_to_vcenter(module):
|
for host in self.cluster.host:
|
||||||
cluster = module.params['cluster']
|
if host.name == self.esxi_hostname:
|
||||||
|
return host, self.cluster
|
||||||
|
|
||||||
host_connect_spec = vim.host.ConnectSpec()
|
return None, self.cluster
|
||||||
host_connect_spec.hostName = module.params['esxi_hostname']
|
|
||||||
host_connect_spec.userName = module.params['esxi_username']
|
|
||||||
host_connect_spec.password = module.params['esxi_password']
|
|
||||||
host_connect_spec.force = True
|
|
||||||
host_connect_spec.sslThumbprint = ""
|
|
||||||
as_connected = True
|
|
||||||
esxi_license = None
|
|
||||||
resource_pool = None
|
|
||||||
|
|
||||||
try:
|
def add_host_to_vcenter(self):
|
||||||
task = cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
|
host_connect_spec = vim.host.ConnectSpec()
|
||||||
|
host_connect_spec.hostName = self.esxi_hostname
|
||||||
|
host_connect_spec.userName = self.esxi_username
|
||||||
|
host_connect_spec.password = self.esxi_password
|
||||||
|
host_connect_spec.force = True
|
||||||
|
host_connect_spec.sslThumbprint = ""
|
||||||
|
as_connected = True
|
||||||
|
esxi_license = None
|
||||||
|
resource_pool = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
task = self.cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
|
||||||
|
success, result = wait_for_task(task)
|
||||||
|
return success, result
|
||||||
|
except TaskError as add_task_error:
|
||||||
|
# This is almost certain to fail the first time.
|
||||||
|
# In order to get the sslThumbprint we first connect
|
||||||
|
# get the vim.fault.SSLVerifyFault then grab the sslThumbprint
|
||||||
|
# from that object.
|
||||||
|
#
|
||||||
|
# args is a tuple, selecting the first tuple
|
||||||
|
ssl_verify_fault = add_task_error.args[0]
|
||||||
|
host_connect_spec.sslThumbprint = ssl_verify_fault.thumbprint
|
||||||
|
|
||||||
|
task = self.cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
|
||||||
success, result = wait_for_task(task)
|
success, result = wait_for_task(task)
|
||||||
return success, result
|
return success, result
|
||||||
except TaskError as add_task_error:
|
|
||||||
# This is almost certain to fail the first time.
|
|
||||||
# In order to get the sslThumbprint we first connect
|
|
||||||
# get the vim.fault.SSLVerifyFault then grab the sslThumbprint
|
|
||||||
# from that object.
|
|
||||||
#
|
|
||||||
# args is a tuple, selecting the first tuple
|
|
||||||
ssl_verify_fault = add_task_error.args[0]
|
|
||||||
host_connect_spec.sslThumbprint = ssl_verify_fault.thumbprint
|
|
||||||
|
|
||||||
task = cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
|
def state_exit_unchanged(self):
|
||||||
success, result = wait_for_task(task)
|
self.module.exit_json(changed=False)
|
||||||
return success, result
|
|
||||||
|
|
||||||
|
def state_remove_host(self):
|
||||||
|
changed = True
|
||||||
|
result = None
|
||||||
|
if not self.module.check_mode:
|
||||||
|
if not self.host.runtime.inMaintenanceMode:
|
||||||
|
maintenance_mode_task = self.host.EnterMaintenanceMode_Task(300, True, None)
|
||||||
|
changed, result = wait_for_task(maintenance_mode_task)
|
||||||
|
|
||||||
def state_exit_unchanged(module):
|
if changed:
|
||||||
module.exit_json(changed=False)
|
task = self.host.Destroy_Task()
|
||||||
|
changed, result = wait_for_task(task)
|
||||||
|
else:
|
||||||
|
raise Exception(result)
|
||||||
|
self.module.exit_json(changed=changed, result=str(result))
|
||||||
|
|
||||||
|
def state_update_host(self):
|
||||||
|
self.module.exit_json(changed=False, msg="Currently not implemented.")
|
||||||
|
|
||||||
def state_remove_host(module):
|
def state_add_host(self):
|
||||||
host = module.params['host']
|
changed = True
|
||||||
changed = True
|
result = None
|
||||||
result = None
|
|
||||||
if not module.check_mode:
|
|
||||||
if not host.runtime.inMaintenanceMode:
|
|
||||||
maintenance_mode_task = host.EnterMaintenanceMode_Task(300, True, None)
|
|
||||||
changed, result = wait_for_task(maintenance_mode_task)
|
|
||||||
|
|
||||||
if changed:
|
if not self.module.check_mode:
|
||||||
task = host.Destroy_Task()
|
changed, result = self.add_host_to_vcenter()
|
||||||
changed, result = wait_for_task(task)
|
self.module.exit_json(changed=changed, result=str(result))
|
||||||
|
|
||||||
|
def check_host_state(self):
|
||||||
|
self.host, self.cluster = self.find_host_by_cluster_datacenter()
|
||||||
|
|
||||||
|
if self.host is None:
|
||||||
|
return 'absent'
|
||||||
else:
|
else:
|
||||||
raise Exception(result)
|
return 'present'
|
||||||
module.exit_json(changed=changed, result=str(result))
|
|
||||||
|
|
||||||
|
|
||||||
def state_update_host(module):
|
|
||||||
module.exit_json(changed=False, msg="Currently not implemented.")
|
|
||||||
|
|
||||||
|
|
||||||
def state_add_host(module):
|
|
||||||
|
|
||||||
changed = True
|
|
||||||
result = None
|
|
||||||
|
|
||||||
if not module.check_mode:
|
|
||||||
changed, result = add_host_to_vcenter(module)
|
|
||||||
module.exit_json(changed=changed, result=str(result))
|
|
||||||
|
|
||||||
|
|
||||||
def check_host_state(module):
|
|
||||||
|
|
||||||
content = connect_to_api(module)
|
|
||||||
module.params['content'] = content
|
|
||||||
|
|
||||||
host, cluster = find_host_by_cluster_datacenter(module)
|
|
||||||
|
|
||||||
module.params['cluster'] = cluster
|
|
||||||
if host is None:
|
|
||||||
return 'absent'
|
|
||||||
else:
|
|
||||||
module.params['host'] = host
|
|
||||||
return 'present'
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -199,27 +215,8 @@ def main():
|
||||||
if not HAS_PYVMOMI:
|
if not HAS_PYVMOMI:
|
||||||
module.fail_json(msg='pyvmomi is required for this module')
|
module.fail_json(msg='pyvmomi is required for this module')
|
||||||
|
|
||||||
try:
|
vmware_host = VMwareHost(module)
|
||||||
# Currently state_update_dvs is not implemented.
|
vmware_host.process_state()
|
||||||
host_states = {
|
|
||||||
'absent': {
|
|
||||||
'present': state_remove_host,
|
|
||||||
'absent': state_exit_unchanged,
|
|
||||||
},
|
|
||||||
'present': {
|
|
||||||
'present': state_exit_unchanged,
|
|
||||||
'absent': state_add_host,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
host_states[module.params['state']][check_host_state(module)](module)
|
|
||||||
|
|
||||||
except vmodl.RuntimeFault as runtime_fault:
|
|
||||||
module.fail_json(msg=runtime_fault.msg)
|
|
||||||
except vmodl.MethodFault as method_fault:
|
|
||||||
module.fail_json(msg=method_fault.msg)
|
|
||||||
except Exception as e:
|
|
||||||
module.fail_json(msg=str(e))
|
|
||||||
|
|
||||||
from ansible.module_utils.vmware import *
|
from ansible.module_utils.vmware import *
|
||||||
from ansible.module_utils.basic import *
|
from ansible.module_utils.basic import *
|
||||||
|
|
Loading…
Reference in a new issue