Merge pull request #11446 from cchurch/windows_azure_from_tower
Updates to Windows Azure inventory from Tower.
This commit is contained in:
commit
86100da3a0
2 changed files with 85 additions and 22 deletions
|
@ -13,8 +13,9 @@
|
||||||
|
|
||||||
# API calls to Windows Azure may be slow. For this reason, we cache the results
|
# API calls to Windows Azure may be slow. For this reason, we cache the results
|
||||||
# of an API call. Set this to the path you want cache files to be written to.
|
# of an API call. Set this to the path you want cache files to be written to.
|
||||||
# One file will be written to this directory:
|
# Two files will be written to this directory:
|
||||||
# - ansible-azure.cache
|
# - ansible-azure.cache
|
||||||
|
# - ansible-azure.index
|
||||||
#
|
#
|
||||||
cache_path = /tmp
|
cache_path = /tmp
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,14 @@ class AzureInventory(object):
|
||||||
self.inventory = {}
|
self.inventory = {}
|
||||||
# Index of deployment name -> host
|
# Index of deployment name -> host
|
||||||
self.index = {}
|
self.index = {}
|
||||||
|
self.host_metadata = {}
|
||||||
|
|
||||||
|
# Cache setting defaults.
|
||||||
|
# These can be overridden in settings (see `read_settings`).
|
||||||
|
cache_dir = os.path.expanduser('~')
|
||||||
|
self.cache_path_cache = os.path.join(cache_dir, '.ansible-azure.cache')
|
||||||
|
self.cache_path_index = os.path.join(cache_dir, '.ansible-azure.index')
|
||||||
|
self.cache_max_age = 0
|
||||||
|
|
||||||
# Read settings and parse CLI arguments
|
# Read settings and parse CLI arguments
|
||||||
self.read_settings()
|
self.read_settings()
|
||||||
|
@ -82,15 +90,37 @@ class AzureInventory(object):
|
||||||
|
|
||||||
if self.args.list_images:
|
if self.args.list_images:
|
||||||
data_to_print = self.json_format_dict(self.get_images(), True)
|
data_to_print = self.json_format_dict(self.get_images(), True)
|
||||||
elif self.args.list:
|
elif self.args.list or self.args.host:
|
||||||
# Display list of nodes for inventory
|
# Display list of nodes for inventory
|
||||||
if len(self.inventory) == 0:
|
if len(self.inventory) == 0:
|
||||||
data_to_print = self.get_inventory_from_cache()
|
data = json.loads(self.get_inventory_from_cache())
|
||||||
else:
|
else:
|
||||||
data_to_print = self.json_format_dict(self.inventory, True)
|
data = self.inventory
|
||||||
|
|
||||||
|
if self.args.host:
|
||||||
|
data_to_print = self.get_host(self.args.host)
|
||||||
|
else:
|
||||||
|
# Add the `['_meta']['hostvars']` information.
|
||||||
|
hostvars = {}
|
||||||
|
if len(data) > 0:
|
||||||
|
for host in set([h for hosts in data.values() for h in hosts if h]):
|
||||||
|
hostvars[host] = self.get_host(host, jsonify=False)
|
||||||
|
data['_meta'] = {'hostvars': hostvars}
|
||||||
|
|
||||||
|
# JSONify the data.
|
||||||
|
data_to_print = self.json_format_dict(data, pretty=True)
|
||||||
print data_to_print
|
print data_to_print
|
||||||
|
|
||||||
|
def get_host(self, hostname, jsonify=True):
|
||||||
|
"""Return information about the given hostname, based on what
|
||||||
|
the Windows Azure API provides.
|
||||||
|
"""
|
||||||
|
if hostname not in self.host_metadata:
|
||||||
|
return "No host found: %s" % json.dumps(self.host_metadata)
|
||||||
|
if jsonify:
|
||||||
|
return json.dumps(self.host_metadata[hostname])
|
||||||
|
return self.host_metadata[hostname]
|
||||||
|
|
||||||
def get_images(self):
|
def get_images(self):
|
||||||
images = []
|
images = []
|
||||||
for image in self.sms.list_os_images():
|
for image in self.sms.list_os_images():
|
||||||
|
@ -121,28 +151,36 @@ class AzureInventory(object):
|
||||||
|
|
||||||
# Cache related
|
# Cache related
|
||||||
if config.has_option('azure', 'cache_path'):
|
if config.has_option('azure', 'cache_path'):
|
||||||
cache_path = config.get('azure', 'cache_path')
|
cache_path = os.path.expandvars(os.path.expanduser(config.get('azure', 'cache_path')))
|
||||||
self.cache_path_cache = cache_path + "/ansible-azure.cache"
|
self.cache_path_cache = os.path.join(cache_path, 'ansible-azure.cache')
|
||||||
self.cache_path_index = cache_path + "/ansible-azure.index"
|
self.cache_path_index = os.path.join(cache_path, 'ansible-azure.index')
|
||||||
if config.has_option('azure', 'cache_max_age'):
|
if config.has_option('azure', 'cache_max_age'):
|
||||||
self.cache_max_age = config.getint('azure', 'cache_max_age')
|
self.cache_max_age = config.getint('azure', 'cache_max_age')
|
||||||
|
|
||||||
def read_environment(self):
|
def read_environment(self):
|
||||||
''' Reads the settings from environment variables '''
|
''' Reads the settings from environment variables '''
|
||||||
# Credentials
|
# Credentials
|
||||||
if os.getenv("AZURE_SUBSCRIPTION_ID"): self.subscription_id = os.getenv("AZURE_SUBSCRIPTION_ID")
|
if os.getenv("AZURE_SUBSCRIPTION_ID"):
|
||||||
if os.getenv("AZURE_CERT_PATH"): self.cert_path = os.getenv("AZURE_CERT_PATH")
|
self.subscription_id = os.getenv("AZURE_SUBSCRIPTION_ID")
|
||||||
|
if os.getenv("AZURE_CERT_PATH"):
|
||||||
|
self.cert_path = os.getenv("AZURE_CERT_PATH")
|
||||||
|
|
||||||
def parse_cli_args(self):
|
def parse_cli_args(self):
|
||||||
"""Command line argument processing"""
|
"""Command line argument processing"""
|
||||||
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Azure')
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Produce an Ansible Inventory file based on Azure',
|
||||||
|
)
|
||||||
parser.add_argument('--list', action='store_true', default=True,
|
parser.add_argument('--list', action='store_true', default=True,
|
||||||
help='List nodes (default: True)')
|
help='List nodes (default: True)')
|
||||||
parser.add_argument('--list-images', action='store',
|
parser.add_argument('--list-images', action='store',
|
||||||
help='Get all available images.')
|
help='Get all available images.')
|
||||||
parser.add_argument('--refresh-cache', action='store_true', default=False,
|
parser.add_argument('--refresh-cache',
|
||||||
help='Force refresh of cache by making API requests to Azure (default: False - use cache files)')
|
action='store_true', default=False,
|
||||||
|
help='Force refresh of thecache by making API requests to Azure '
|
||||||
|
'(default: False - use cache files)',
|
||||||
|
)
|
||||||
|
parser.add_argument('--host', action='store',
|
||||||
|
help='Get all information about an instance.')
|
||||||
self.args = parser.parse_args()
|
self.args = parser.parse_args()
|
||||||
|
|
||||||
def do_api_calls_update_cache(self):
|
def do_api_calls_update_cache(self):
|
||||||
|
@ -163,11 +201,12 @@ class AzureInventory(object):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def add_deployments(self, cloud_service):
|
def add_deployments(self, cloud_service):
|
||||||
"""Makes an Azure API call to get the list of virtual machines associated with a cloud service"""
|
"""Makes an Azure API call to get the list of virtual machines
|
||||||
|
associated with a cloud service.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name,embed_detail=True).deployments.deployments:
|
for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name,embed_detail=True).deployments.deployments:
|
||||||
if deployment.deployment_slot == "Production":
|
self.add_deployment(cloud_service, deployment)
|
||||||
self.add_deployment(cloud_service, deployment)
|
|
||||||
except WindowsAzureError as e:
|
except WindowsAzureError as e:
|
||||||
print "Looks like Azure's API is down:"
|
print "Looks like Azure's API is down:"
|
||||||
print
|
print
|
||||||
|
@ -176,20 +215,43 @@ class AzureInventory(object):
|
||||||
|
|
||||||
def add_deployment(self, cloud_service, deployment):
|
def add_deployment(self, cloud_service, deployment):
|
||||||
"""Adds a deployment to the inventory and index"""
|
"""Adds a deployment to the inventory and index"""
|
||||||
|
for role in deployment.role_instance_list.role_instances:
|
||||||
|
try:
|
||||||
|
# Default port 22 unless port found with name 'SSH'
|
||||||
|
port = '22'
|
||||||
|
for ie in role.instance_endpoints.instance_endpoints:
|
||||||
|
if ie.name == 'SSH':
|
||||||
|
port = ie.public_port
|
||||||
|
break
|
||||||
|
except AttributeError as e:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
self.add_instance(role.instance_name, deployment, port, cloud_service, role.instance_status)
|
||||||
|
|
||||||
|
def add_instance(self, hostname, deployment, ssh_port, cloud_service, status):
|
||||||
|
"""Adds an instance to the inventory and index"""
|
||||||
|
|
||||||
dest = urlparse(deployment.url).hostname
|
dest = urlparse(deployment.url).hostname
|
||||||
|
|
||||||
# Add to index
|
# Add to index
|
||||||
self.index[dest] = deployment.name
|
self.index[hostname] = deployment.name
|
||||||
|
|
||||||
|
self.host_metadata[hostname] = dict(ansible_ssh_host=dest,
|
||||||
|
ansible_ssh_port=int(ssh_port),
|
||||||
|
instance_status=status,
|
||||||
|
private_id=deployment.private_id)
|
||||||
|
|
||||||
# List of all azure deployments
|
# List of all azure deployments
|
||||||
self.push(self.inventory, "azure", dest)
|
self.push(self.inventory, "azure", hostname)
|
||||||
|
|
||||||
# Inventory: Group by service name
|
# Inventory: Group by service name
|
||||||
self.push(self.inventory, self.to_safe(cloud_service.service_name), dest)
|
self.push(self.inventory, self.to_safe(cloud_service.service_name), hostname)
|
||||||
|
|
||||||
|
if int(ssh_port) == 22:
|
||||||
|
self.push(self.inventory, "Cloud_services", hostname)
|
||||||
|
|
||||||
# Inventory: Group by region
|
# Inventory: Group by region
|
||||||
self.push(self.inventory, self.to_safe(cloud_service.hosted_service_properties.location), dest)
|
self.push(self.inventory, self.to_safe(cloud_service.hosted_service_properties.location), hostname)
|
||||||
|
|
||||||
def push(self, my_dict, key, element):
|
def push(self, my_dict, key, element):
|
||||||
"""Pushed an element onto an array that may not have been defined in the dict."""
|
"""Pushed an element onto an array that may not have been defined in the dict."""
|
||||||
|
|
Loading…
Reference in a new issue