Change to fix issue found while cloning template with opaque network (#55375)

Fixing Format issues in yml files. Check have failed

Signed-off-by: pgbidkar <pbidkar@vmware.com>
Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
This commit is contained in:
Abhijeet Kasurde 2019-04-17 11:18:17 +05:30 committed by GitHub
parent 22f666c962
commit dcbfa60413
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 106 additions and 15 deletions

View file

@ -844,6 +844,7 @@ class PyVmomiHelper(PyVmomi):
super(PyVmomiHelper, self).__init__(module) super(PyVmomiHelper, self).__init__(module)
self.device_helper = PyVmomiDeviceHelper(self.module) self.device_helper = PyVmomiDeviceHelper(self.module)
self.configspec = None self.configspec = None
self.relospec = None
self.change_detected = False # a change was detected and needs to be applied through reconfiguration self.change_detected = False # a change was detected and needs to be applied through reconfiguration
self.change_applied = False # a change was applied meaning at least one task succeeded self.change_applied = False # a change was applied meaning at least one task succeeded
self.customspec = None self.customspec = None
@ -1397,6 +1398,7 @@ class PyVmomiHelper(PyVmomi):
nic.device.backing.opaqueNetworkType = 'nsx.LogicalSwitch' nic.device.backing.opaqueNetworkType = 'nsx.LogicalSwitch'
nic.device.backing.opaqueNetworkId = network_id nic.device.backing.opaqueNetworkId = network_id
nic.device.deviceInfo.summary = 'nsx.LogicalSwitch: %s' % network_id nic.device.deviceInfo.summary = 'nsx.LogicalSwitch: %s' % network_id
nic_change_detected = True
else: else:
# vSwitch # vSwitch
if not isinstance(nic.device.backing, vim.vm.device.VirtualEthernetCard.NetworkBackingInfo): if not isinstance(nic.device.backing, vim.vm.device.VirtualEthernetCard.NetworkBackingInfo):
@ -1413,6 +1415,12 @@ class PyVmomiHelper(PyVmomi):
nic_change_detected = True nic_change_detected = True
if nic_change_detected: if nic_change_detected:
# Change to fix the issue found while configuring opaque network
# VMs cloned from a template with opaque network will get disconnected
# Replacing deprecated config parameter with relocation Spec
if isinstance(self.cache.get_network(network_name), vim.OpaqueNetwork):
self.relospec.deviceChange.append(nic)
else:
self.configspec.deviceChange.append(nic) self.configspec.deviceChange.append(nic)
self.change_detected = True self.change_detected = True
@ -2143,6 +2151,9 @@ class PyVmomiHelper(PyVmomi):
self.configspec = vim.vm.ConfigSpec() self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = [] self.configspec.deviceChange = []
# create the relocation spec
self.relospec = vim.vm.RelocateSpec()
self.relospec.deviceChange = []
self.configure_guestid(vm_obj=vm_obj, vm_creation=True) self.configure_guestid(vm_obj=vm_obj, vm_creation=True)
self.configure_cpu_and_memory(vm_obj=vm_obj, vm_creation=True) self.configure_cpu_and_memory(vm_obj=vm_obj, vm_creation=True)
self.configure_hardware_params(vm_obj=vm_obj) self.configure_hardware_params(vm_obj=vm_obj)
@ -2167,13 +2178,10 @@ class PyVmomiHelper(PyVmomi):
clone_method = None clone_method = None
try: try:
if self.params['template']: if self.params['template']:
# create the relocation spec
relospec = vim.vm.RelocateSpec()
# Only select specific host when ESXi hostname is provided # Only select specific host when ESXi hostname is provided
if self.params['esxi_hostname']: if self.params['esxi_hostname']:
relospec.host = self.select_host() self.relospec.host = self.select_host()
relospec.datastore = datastore self.relospec.datastore = datastore
# Convert disk present in template if is set # Convert disk present in template if is set
if self.params['convert']: if self.params['convert']:
@ -2189,22 +2197,22 @@ class PyVmomiHelper(PyVmomi):
disk_locator.diskBackingInfo.diskMode = "persistent" disk_locator.diskBackingInfo.diskMode = "persistent"
disk_locator.diskId = device.key disk_locator.diskId = device.key
disk_locator.datastore = datastore disk_locator.datastore = datastore
relospec.disk.append(disk_locator) self.relospec.disk.append(disk_locator)
# https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html # https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html
# > pool: For a clone operation from a template to a virtual machine, this argument is required. # > pool: For a clone operation from a template to a virtual machine, this argument is required.
relospec.pool = resource_pool self.relospec.pool = resource_pool
linked_clone = self.params.get('linked_clone') linked_clone = self.params.get('linked_clone')
snapshot_src = self.params.get('snapshot_src', None) snapshot_src = self.params.get('snapshot_src', None)
if linked_clone: if linked_clone:
if snapshot_src is not None: if snapshot_src is not None:
relospec.diskMoveType = vim.vm.RelocateSpec.DiskMoveOptions.createNewChildDiskBacking self.relospec.diskMoveType = vim.vm.RelocateSpec.DiskMoveOptions.createNewChildDiskBacking
else: else:
self.module.fail_json(msg="Parameter 'linked_src' and 'snapshot_src' are" self.module.fail_json(msg="Parameter 'linked_src' and 'snapshot_src' are"
" required together for linked clone operation.") " required together for linked clone operation.")
clonespec = vim.vm.CloneSpec(template=self.params['is_template'], location=relospec) clonespec = vim.vm.CloneSpec(template=self.params['is_template'], location=self.relospec)
if self.customspec: if self.customspec:
clonespec.customization = self.customspec clonespec.customization = self.customspec
@ -2314,7 +2322,9 @@ class PyVmomiHelper(PyVmomi):
def reconfigure_vm(self): def reconfigure_vm(self):
self.configspec = vim.vm.ConfigSpec() self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = [] self.configspec.deviceChange = []
# create the relocation spec
self.relospec = vim.vm.RelocateSpec()
self.relospec.deviceChange = []
self.configure_guestid(vm_obj=self.current_vm_obj) self.configure_guestid(vm_obj=self.current_vm_obj)
self.configure_cpu_and_memory(vm_obj=self.current_vm_obj) self.configure_cpu_and_memory(vm_obj=self.current_vm_obj)
self.configure_hardware_params(vm_obj=self.current_vm_obj) self.configure_hardware_params(vm_obj=self.current_vm_obj)
@ -2329,12 +2339,11 @@ class PyVmomiHelper(PyVmomi):
self.configspec.annotation = str(self.params['annotation']) self.configspec.annotation = str(self.params['annotation'])
self.change_detected = True self.change_detected = True
relospec = vim.vm.RelocateSpec()
if self.params['resource_pool']: if self.params['resource_pool']:
relospec.pool = self.get_resource_pool() self.relospec.pool = self.get_resource_pool()
if relospec.pool != self.current_vm_obj.resourcePool: if self.relospec.pool != self.current_vm_obj.resourcePool:
task = self.current_vm_obj.RelocateVM_Task(spec=relospec) task = self.current_vm_obj.RelocateVM_Task(spec=self.relospec)
self.wait_for_task(task) self.wait_for_task(task)
if task.info.state == 'error': if task.info.state == 'error':
return {'changed': self.change_applied, 'failed': True, 'msg': task.info.error.msg, 'op': 'relocate'} return {'changed': self.change_applied, 'failed': True, 'msg': task.info.error.msg, 'op': 'relocate'}

View file

@ -0,0 +1,81 @@
# Test code for the vmware_guest module.
# Copyright: (c) 2019, Pavan Bidkar <pbidkar@vmware.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: Wait for Flask controller to come up online
wait_for:
host: "{{ vcsim }}"
port: 5000
state: started
- name: kill vcsim
uri:
url: http://{{ vcsim }}:5000/killall
- name: start vcsim with no folders
uri:
url: http://{{ vcsim }}:5000/spawn?datacenter=1&cluster=1&folder=0
register: vcsim_instance
- name: Wait for Flask controller to come up online
wait_for:
host: "{{ vcsim }}"
port: 443
state: started
- name: get a list of VMS from vcsim
uri:
url: http://{{ vcsim }}:5000/govc_find?filter=VM
register: vmlist
- debug:
var: vcsim_instance
- debug:
var: vmlist
- name: clone vm from template and customize GOS
vmware_guest:
validate_certs: False
hostname: "{{ vcsim }}"
username: "{{ vcsim_instance['json']['username'] }}"
password: "{{ vcsim_instance['json']['password'] }}"
name: "{{ 'net_customize_' + item|basename }}"
template: "{{ item|basename }}"
datacenter: "{{ (item|basename).split('_')[0] }}"
state: poweredoff
folder: "{{ item|dirname }}"
convert: thin
with_items: "{{ vmlist['json'] }}"
register: clone_customize
- debug:
var: clone_customize
- name: assert that changes were made
assert:
that:
- "clone_customize.results|map(attribute='changed')|unique|list == [true]"
- name: clone vm from template and customize GOS again
vmware_guest:
validate_certs: False
hostname: "{{ vcsim }}"
username: "{{ vcsim_instance['json']['username'] }}"
password: "{{ vcsim_instance['json']['password'] }}"
name: "{{ 'net_customize_' + item|basename }}"
template: "{{ item|basename }}"
datacenter: "{{ (item|basename).split('_')[0] }}"
state: poweredoff
folder: "{{ item|dirname }}"
convert: thin
with_items: "{{ vmlist['json'] }}"
register: clone_customize_again
- debug:
var: clone_customize_again
- name: assert that changes were not made
assert:
that:
- "clone_customize_again.results|map(attribute='changed')|unique|list == [false]"

View file

@ -28,3 +28,4 @@
- include: linked_clone_d1_c1_f0.yml - include: linked_clone_d1_c1_f0.yml
- include: boot_firmware_d1_c1_f0.yml - include: boot_firmware_d1_c1_f0.yml
- include: clone_with_convert.yml - include: clone_with_convert.yml
- include: clone_customize_guest_test.yml