2013-12-05 01:14:00 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
'''
|
|
|
|
Windows Azure external inventory script
|
|
|
|
=======================================
|
|
|
|
|
|
|
|
Generates inventory that Ansible can understand by making API request to
|
|
|
|
Windows Azure using the azure python library.
|
|
|
|
|
|
|
|
NOTE: This script assumes Ansible is being executed where azure is already
|
|
|
|
installed.
|
|
|
|
|
|
|
|
pip install azure
|
|
|
|
|
|
|
|
Adapted from the ansible Linode plugin by Dan Slimmon.
|
|
|
|
'''
|
|
|
|
|
|
|
|
# (c) 2013, John Whitbeck
|
|
|
|
#
|
|
|
|
# This file is part of Ansible,
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
######################################################################
|
|
|
|
|
|
|
|
# Standard imports
|
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import argparse
|
|
|
|
import os
|
2017-10-10 05:18:09 +02:00
|
|
|
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
2013-12-05 01:14:00 +01:00
|
|
|
from time import time
|
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
|
|
|
try:
|
|
|
|
from azure.servicemanagement import ServiceManagementService
|
|
|
|
except ImportError as e:
|
2016-04-22 05:18:45 +02:00
|
|
|
sys.exit("ImportError: {0}".format(str(e)))
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
# Imports for ansible
|
|
|
|
import ConfigParser
|
|
|
|
|
2017-05-09 23:38:08 +02:00
|
|
|
|
2013-12-05 01:14:00 +01:00
|
|
|
class AzureInventory(object):
|
|
|
|
def __init__(self):
|
|
|
|
"""Main execution path."""
|
|
|
|
# Inventory grouped by display group
|
|
|
|
self.inventory = {}
|
|
|
|
# Index of deployment name -> host
|
|
|
|
self.index = {}
|
2015-08-10 23:35:47 +02:00
|
|
|
self.host_metadata = {}
|
|
|
|
|
|
|
|
# Cache setting defaults.
|
|
|
|
# These can be overridden in settings (see `read_settings`).
|
|
|
|
cache_dir = os.path.expanduser('~')
|
|
|
|
self.cache_path_cache = os.path.join(cache_dir, '.ansible-azure.cache')
|
|
|
|
self.cache_path_index = os.path.join(cache_dir, '.ansible-azure.index')
|
|
|
|
self.cache_max_age = 0
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
# Read settings and parse CLI arguments
|
|
|
|
self.read_settings()
|
|
|
|
self.read_environment()
|
|
|
|
self.parse_cli_args()
|
|
|
|
|
|
|
|
# Initialize Azure ServiceManagementService
|
|
|
|
self.sms = ServiceManagementService(self.subscription_id, self.cert_path)
|
|
|
|
|
|
|
|
# Cache
|
|
|
|
if self.args.refresh_cache:
|
|
|
|
self.do_api_calls_update_cache()
|
|
|
|
elif not self.is_cache_valid():
|
|
|
|
self.do_api_calls_update_cache()
|
|
|
|
|
2014-06-08 00:36:55 +02:00
|
|
|
if self.args.list_images:
|
|
|
|
data_to_print = self.json_format_dict(self.get_images(), True)
|
2015-08-10 23:35:47 +02:00
|
|
|
elif self.args.list or self.args.host:
|
2013-12-05 01:14:00 +01:00
|
|
|
# Display list of nodes for inventory
|
|
|
|
if len(self.inventory) == 0:
|
2015-08-10 23:35:47 +02:00
|
|
|
data = json.loads(self.get_inventory_from_cache())
|
2013-12-05 01:14:00 +01:00
|
|
|
else:
|
2015-08-10 23:35:47 +02:00
|
|
|
data = self.inventory
|
2013-12-05 01:14:00 +01:00
|
|
|
|
2015-08-10 23:35:47 +02:00
|
|
|
if self.args.host:
|
|
|
|
data_to_print = self.get_host(self.args.host)
|
|
|
|
else:
|
|
|
|
# Add the `['_meta']['hostvars']` information.
|
|
|
|
hostvars = {}
|
|
|
|
if len(data) > 0:
|
|
|
|
for host in set([h for hosts in data.values() for h in hosts if h]):
|
|
|
|
hostvars[host] = self.get_host(host, jsonify=False)
|
|
|
|
data['_meta'] = {'hostvars': hostvars}
|
|
|
|
|
|
|
|
# JSONify the data.
|
|
|
|
data_to_print = self.json_format_dict(data, pretty=True)
|
2015-08-28 08:18:13 +02:00
|
|
|
print(data_to_print)
|
2013-12-05 01:14:00 +01:00
|
|
|
|
2015-08-10 23:35:47 +02:00
|
|
|
def get_host(self, hostname, jsonify=True):
|
|
|
|
"""Return information about the given hostname, based on what
|
|
|
|
the Windows Azure API provides.
|
|
|
|
"""
|
|
|
|
if hostname not in self.host_metadata:
|
|
|
|
return "No host found: %s" % json.dumps(self.host_metadata)
|
|
|
|
if jsonify:
|
|
|
|
return json.dumps(self.host_metadata[hostname])
|
|
|
|
return self.host_metadata[hostname]
|
|
|
|
|
2014-06-08 00:36:55 +02:00
|
|
|
def get_images(self):
|
|
|
|
images = []
|
|
|
|
for image in self.sms.list_os_images():
|
|
|
|
if str(image.label).lower().find(self.args.list_images.lower()) >= 0:
|
|
|
|
images.append(vars(image))
|
|
|
|
return json.loads(json.dumps(images, default=lambda o: o.__dict__))
|
|
|
|
|
2013-12-05 01:14:00 +01:00
|
|
|
def is_cache_valid(self):
|
|
|
|
"""Determines if the cache file has expired, or if it is still valid."""
|
|
|
|
if os.path.isfile(self.cache_path_cache):
|
|
|
|
mod_time = os.path.getmtime(self.cache_path_cache)
|
|
|
|
current_time = time()
|
|
|
|
if (mod_time + self.cache_max_age) > current_time:
|
|
|
|
if os.path.isfile(self.cache_path_index):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def read_settings(self):
|
|
|
|
"""Reads the settings from the .ini file."""
|
|
|
|
config = ConfigParser.SafeConfigParser()
|
|
|
|
config.read(os.path.dirname(os.path.realpath(__file__)) + '/windows_azure.ini')
|
|
|
|
|
|
|
|
# Credentials related
|
|
|
|
if config.has_option('azure', 'subscription_id'):
|
|
|
|
self.subscription_id = config.get('azure', 'subscription_id')
|
|
|
|
if config.has_option('azure', 'cert_path'):
|
|
|
|
self.cert_path = config.get('azure', 'cert_path')
|
|
|
|
|
|
|
|
# Cache related
|
|
|
|
if config.has_option('azure', 'cache_path'):
|
2015-08-10 23:35:47 +02:00
|
|
|
cache_path = os.path.expandvars(os.path.expanduser(config.get('azure', 'cache_path')))
|
|
|
|
self.cache_path_cache = os.path.join(cache_path, 'ansible-azure.cache')
|
|
|
|
self.cache_path_index = os.path.join(cache_path, 'ansible-azure.index')
|
2013-12-05 01:14:00 +01:00
|
|
|
if config.has_option('azure', 'cache_max_age'):
|
|
|
|
self.cache_max_age = config.getint('azure', 'cache_max_age')
|
|
|
|
|
|
|
|
def read_environment(self):
|
|
|
|
''' Reads the settings from environment variables '''
|
|
|
|
# Credentials
|
2015-08-10 23:35:47 +02:00
|
|
|
if os.getenv("AZURE_SUBSCRIPTION_ID"):
|
|
|
|
self.subscription_id = os.getenv("AZURE_SUBSCRIPTION_ID")
|
|
|
|
if os.getenv("AZURE_CERT_PATH"):
|
|
|
|
self.cert_path = os.getenv("AZURE_CERT_PATH")
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
def parse_cli_args(self):
|
|
|
|
"""Command line argument processing"""
|
2015-08-10 23:35:47 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Produce an Ansible Inventory file based on Azure',
|
|
|
|
)
|
2013-12-05 01:14:00 +01:00
|
|
|
parser.add_argument('--list', action='store_true', default=True,
|
2015-08-10 23:35:47 +02:00
|
|
|
help='List nodes (default: True)')
|
2014-06-08 00:36:55 +02:00
|
|
|
parser.add_argument('--list-images', action='store',
|
2015-08-10 23:35:47 +02:00
|
|
|
help='Get all available images.')
|
|
|
|
parser.add_argument('--refresh-cache',
|
2017-05-09 23:38:08 +02:00
|
|
|
action='store_true', default=False,
|
|
|
|
help='Force refresh of thecache by making API requests to Azure '
|
|
|
|
'(default: False - use cache files)')
|
2015-08-10 23:35:47 +02:00
|
|
|
parser.add_argument('--host', action='store',
|
|
|
|
help='Get all information about an instance.')
|
2013-12-05 01:14:00 +01:00
|
|
|
self.args = parser.parse_args()
|
|
|
|
|
|
|
|
def do_api_calls_update_cache(self):
|
|
|
|
"""Do API calls, and save data in cache files."""
|
|
|
|
self.add_cloud_services()
|
|
|
|
self.write_to_cache(self.inventory, self.cache_path_cache)
|
|
|
|
self.write_to_cache(self.index, self.cache_path_index)
|
|
|
|
|
|
|
|
def add_cloud_services(self):
|
|
|
|
"""Makes an Azure API call to get the list of cloud services."""
|
|
|
|
try:
|
|
|
|
for cloud_service in self.sms.list_hosted_services():
|
|
|
|
self.add_deployments(cloud_service)
|
2016-04-22 05:18:45 +02:00
|
|
|
except Exception as e:
|
|
|
|
sys.exit("Error: Failed to access cloud services - {0}".format(str(e)))
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
def add_deployments(self, cloud_service):
|
2015-08-10 23:35:47 +02:00
|
|
|
"""Makes an Azure API call to get the list of virtual machines
|
|
|
|
associated with a cloud service.
|
|
|
|
"""
|
2013-12-05 01:14:00 +01:00
|
|
|
try:
|
2017-05-09 23:38:08 +02:00
|
|
|
for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name, embed_detail=True).deployments.deployments:
|
2015-08-10 23:35:47 +02:00
|
|
|
self.add_deployment(cloud_service, deployment)
|
2016-04-22 05:18:45 +02:00
|
|
|
except Exception as e:
|
|
|
|
sys.exit("Error: Failed to access deployments - {0}".format(str(e)))
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
def add_deployment(self, cloud_service, deployment):
|
|
|
|
"""Adds a deployment to the inventory and index"""
|
2015-08-10 23:35:47 +02:00
|
|
|
for role in deployment.role_instance_list.role_instances:
|
|
|
|
try:
|
|
|
|
# Default port 22 unless port found with name 'SSH'
|
|
|
|
port = '22'
|
|
|
|
for ie in role.instance_endpoints.instance_endpoints:
|
|
|
|
if ie.name == 'SSH':
|
|
|
|
port = ie.public_port
|
|
|
|
break
|
|
|
|
except AttributeError as e:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
self.add_instance(role.instance_name, deployment, port, cloud_service, role.instance_status)
|
|
|
|
|
|
|
|
def add_instance(self, hostname, deployment, ssh_port, cloud_service, status):
|
|
|
|
"""Adds an instance to the inventory and index"""
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
dest = urlparse(deployment.url).hostname
|
|
|
|
|
|
|
|
# Add to index
|
2015-08-10 23:35:47 +02:00
|
|
|
self.index[hostname] = deployment.name
|
|
|
|
|
|
|
|
self.host_metadata[hostname] = dict(ansible_ssh_host=dest,
|
|
|
|
ansible_ssh_port=int(ssh_port),
|
|
|
|
instance_status=status,
|
|
|
|
private_id=deployment.private_id)
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
# List of all azure deployments
|
2015-08-10 23:35:47 +02:00
|
|
|
self.push(self.inventory, "azure", hostname)
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
# Inventory: Group by service name
|
2015-08-10 23:35:47 +02:00
|
|
|
self.push(self.inventory, self.to_safe(cloud_service.service_name), hostname)
|
|
|
|
|
|
|
|
if int(ssh_port) == 22:
|
|
|
|
self.push(self.inventory, "Cloud_services", hostname)
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
# Inventory: Group by region
|
2015-08-10 23:35:47 +02:00
|
|
|
self.push(self.inventory, self.to_safe(cloud_service.hosted_service_properties.location), hostname)
|
2013-12-05 01:14:00 +01:00
|
|
|
|
|
|
|
def push(self, my_dict, key, element):
|
|
|
|
"""Pushed an element onto an array that may not have been defined in the dict."""
|
|
|
|
if key in my_dict:
|
2017-01-28 09:12:11 +01:00
|
|
|
my_dict[key].append(element)
|
2013-12-05 01:14:00 +01:00
|
|
|
else:
|
|
|
|
my_dict[key] = [element]
|
|
|
|
|
|
|
|
def get_inventory_from_cache(self):
|
|
|
|
"""Reads the inventory from the cache file and returns it as a JSON object."""
|
|
|
|
cache = open(self.cache_path_cache, 'r')
|
|
|
|
json_inventory = cache.read()
|
|
|
|
return json_inventory
|
|
|
|
|
|
|
|
def load_index_from_cache(self):
|
|
|
|
"""Reads the index from the cache file and sets self.index."""
|
|
|
|
cache = open(self.cache_path_index, 'r')
|
|
|
|
json_index = cache.read()
|
|
|
|
self.index = json.loads(json_index)
|
|
|
|
|
|
|
|
def write_to_cache(self, data, filename):
|
|
|
|
"""Writes data in JSON format to a file."""
|
|
|
|
json_data = self.json_format_dict(data, True)
|
|
|
|
cache = open(filename, 'w')
|
|
|
|
cache.write(json_data)
|
|
|
|
cache.close()
|
|
|
|
|
|
|
|
def to_safe(self, word):
|
|
|
|
"""Escapes any characters that would be invalid in an ansible group name."""
|
|
|
|
return re.sub("[^A-Za-z0-9\-]", "_", word)
|
|
|
|
|
|
|
|
def json_format_dict(self, data, pretty=False):
|
|
|
|
"""Converts a dict to a JSON object and dumps it as a formatted string."""
|
|
|
|
if pretty:
|
|
|
|
return json.dumps(data, sort_keys=True, indent=2)
|
|
|
|
else:
|
|
|
|
return json.dumps(data)
|
|
|
|
|
|
|
|
|
|
|
|
AzureInventory()
|