2015-01-07 01:29:19 +01:00
|
|
|
#!/usr/bin/env python
|
2019-02-05 01:44:45 +01:00
|
|
|
|
|
|
|
# Copyright: (c) 2013, Google Inc.
|
|
|
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
'''
|
|
|
|
GCE external inventory script
|
|
|
|
=================================
|
|
|
|
|
|
|
|
Generates inventory that Ansible can understand by making API requests
|
|
|
|
Google Compute Engine via the libcloud library. Full install/configuration
|
|
|
|
instructions for the gce* modules can be found in the comments of
|
|
|
|
ansible/test/gce_tests.py.
|
|
|
|
|
|
|
|
When run against a specific host, this script returns the following variables
|
|
|
|
based on the data obtained from the libcloud Node object:
|
|
|
|
- gce_uuid
|
|
|
|
- gce_id
|
|
|
|
- gce_image
|
|
|
|
- gce_machine_type
|
|
|
|
- gce_private_ip
|
|
|
|
- gce_public_ip
|
|
|
|
- gce_name
|
|
|
|
- gce_description
|
|
|
|
- gce_status
|
|
|
|
- gce_zone
|
|
|
|
- gce_tags
|
|
|
|
- gce_metadata
|
|
|
|
- gce_network
|
2017-04-26 13:44:58 +02:00
|
|
|
- gce_subnetwork
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
When run in --list mode, instances are grouped by the following categories:
|
|
|
|
- zone:
|
|
|
|
zone group name examples are us-central1-b, europe-west1-a, etc.
|
|
|
|
- instance tags:
|
|
|
|
An entry is created for each tag. For example, if you have two instances
|
|
|
|
with a common tag called 'foo', they will both be grouped together under
|
|
|
|
the 'tag_foo' name.
|
|
|
|
- network name:
|
|
|
|
the name of the network is appended to 'network_' (e.g. the 'default'
|
|
|
|
network will result in a group named 'network_default')
|
|
|
|
- machine type
|
|
|
|
types follow a pattern like n1-standard-4, g1-small, etc.
|
|
|
|
- running status:
|
|
|
|
group name prefixed with 'status_' (e.g. status_running, status_stopped,..)
|
|
|
|
- image:
|
|
|
|
when using an ephemeral/scratch disk, this will be set to the image name
|
|
|
|
used when creating the instance (e.g. debian-7-wheezy-v20130816). when
|
|
|
|
your instance was created with a root persistent disk it will be set to
|
|
|
|
'persistent_disk' since there is no current way to determine the image.
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
Execute uname on all instances in the us-central1-a zone
|
|
|
|
$ ansible -i gce.py us-central1-a -m shell -a "/bin/uname -a"
|
|
|
|
|
|
|
|
Use the GCE inventory script to print out instance specific information
|
2015-07-10 18:59:52 +02:00
|
|
|
$ contrib/inventory/gce.py --host my_instance
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
Author: Eric Johnson <erjohnso@google.com>
|
2018-05-11 17:38:13 +02:00
|
|
|
Contributors: Matt Hite <mhite@hotmail.com>, Tom Melendez <supertom@google.com>,
|
|
|
|
John Roach <johnroach1985@gmail.com>
|
|
|
|
Version: 0.0.4
|
2013-10-07 23:01:37 +02:00
|
|
|
'''
|
|
|
|
|
2015-04-20 18:49:25 +02:00
|
|
|
try:
|
|
|
|
import pkg_resources
|
|
|
|
except ImportError:
|
|
|
|
# Use pkg_resources to find the correct versions of libraries and set
|
|
|
|
# sys.path appropriately when there are multiversion installs. We don't
|
|
|
|
# fail here as there is code that better expresses the errors where the
|
|
|
|
# library is used.
|
|
|
|
pass
|
|
|
|
|
2017-05-09 23:38:08 +02:00
|
|
|
USER_AGENT_PRODUCT = "Ansible-gce_inventory_plugin"
|
|
|
|
USER_AGENT_VERSION = "v2"
|
2013-10-08 18:36:35 +02:00
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import argparse
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
|
|
|
|
from time import time
|
|
|
|
|
2017-08-18 06:27:03 +02:00
|
|
|
if sys.version_info >= (3, 0):
|
|
|
|
import configparser
|
|
|
|
else:
|
|
|
|
import ConfigParser as configparser
|
2013-10-07 23:01:37 +02:00
|
|
|
|
2016-03-18 17:25:40 +01:00
|
|
|
import logging
|
|
|
|
logging.getLogger('libcloud.common.google').addHandler(logging.NullHandler())
|
|
|
|
|
2018-08-10 18:13:29 +02:00
|
|
|
import json
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
from libcloud.compute.types import Provider
|
|
|
|
from libcloud.compute.providers import get_driver
|
|
|
|
_ = Provider.GCE
|
2018-09-08 02:59:46 +02:00
|
|
|
except Exception:
|
2016-08-23 17:09:23 +02:00
|
|
|
sys.exit("GCE inventory script requires libcloud >= 0.13")
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
class CloudInventoryCache(object):
|
|
|
|
def __init__(self, cache_name='ansible-cloud-cache', cache_path='/tmp',
|
|
|
|
cache_max_age=300):
|
|
|
|
cache_dir = os.path.expanduser(cache_path)
|
|
|
|
if not os.path.exists(cache_dir):
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
self.cache_path_cache = os.path.join(cache_dir, cache_name)
|
|
|
|
|
|
|
|
self.cache_max_age = cache_max_age
|
|
|
|
|
|
|
|
def is_valid(self, max_age=None):
|
|
|
|
''' Determines if the cache files have expired, or if it is still valid '''
|
|
|
|
|
|
|
|
if max_age is None:
|
|
|
|
max_age = self.cache_max_age
|
|
|
|
|
|
|
|
if os.path.isfile(self.cache_path_cache):
|
|
|
|
mod_time = os.path.getmtime(self.cache_path_cache)
|
|
|
|
current_time = time()
|
|
|
|
if (mod_time + max_age) > current_time:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_all_data_from_cache(self, filename=''):
|
|
|
|
''' Reads the JSON inventory from the cache file. Returns Python dictionary. '''
|
|
|
|
|
|
|
|
data = ''
|
|
|
|
if not filename:
|
|
|
|
filename = self.cache_path_cache
|
|
|
|
with open(filename, 'r') as cache:
|
|
|
|
data = cache.read()
|
|
|
|
return json.loads(data)
|
|
|
|
|
|
|
|
def write_to_cache(self, data, filename=''):
|
|
|
|
''' Writes data to file as JSON. Returns True. '''
|
|
|
|
if not filename:
|
|
|
|
filename = self.cache_path_cache
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
with open(filename, 'w') as cache:
|
|
|
|
cache.write(json_data)
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
class GceInventory(object):
|
|
|
|
def __init__(self):
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
# Cache object
|
|
|
|
self.cache = None
|
|
|
|
# dictionary containing inventory read from disk
|
|
|
|
self.inventory = {}
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
# Read settings and parse CLI arguments
|
|
|
|
self.parse_cli_args()
|
2016-05-04 00:32:12 +02:00
|
|
|
self.config = self.get_config()
|
2018-05-11 17:38:13 +02:00
|
|
|
self.drivers = self.get_gce_drivers()
|
2016-05-04 00:32:12 +02:00
|
|
|
self.ip_type = self.get_inventory_options()
|
|
|
|
if self.ip_type:
|
|
|
|
self.ip_type = self.ip_type.lower()
|
2013-10-07 23:01:37 +02:00
|
|
|
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
# Cache management
|
|
|
|
start_inventory_time = time()
|
|
|
|
cache_used = False
|
|
|
|
if self.args.refresh_cache or not self.cache.is_valid():
|
|
|
|
self.do_api_calls_update_cache()
|
|
|
|
else:
|
|
|
|
self.load_inventory_from_cache()
|
|
|
|
cache_used = True
|
|
|
|
self.inventory['_meta']['stats'] = {'use_cache': True}
|
|
|
|
self.inventory['_meta']['stats'] = {
|
|
|
|
'inventory_load_time': time() - start_inventory_time,
|
|
|
|
'cache_used': cache_used
|
|
|
|
}
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
# Just display data for specific host
|
|
|
|
if self.args.host:
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
print(self.json_format_dict(
|
|
|
|
self.inventory['_meta']['hostvars'][self.args.host],
|
|
|
|
pretty=self.args.pretty))
|
|
|
|
else:
|
|
|
|
# Otherwise, assume user wants all instances grouped
|
|
|
|
zones = self.parse_env_zones()
|
|
|
|
print(self.json_format_dict(self.inventory,
|
|
|
|
pretty=self.args.pretty))
|
2013-10-07 23:01:37 +02:00
|
|
|
sys.exit(0)
|
|
|
|
|
2016-05-04 00:32:12 +02:00
|
|
|
def get_config(self):
|
|
|
|
"""
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
Reads the settings from the gce.ini file.
|
|
|
|
|
2016-05-04 00:32:12 +02:00
|
|
|
Populates a SafeConfigParser object with defaults and
|
|
|
|
attempts to read an .ini-style configuration from the filename
|
|
|
|
specified in GCE_INI_PATH. If the environment variable is
|
|
|
|
not present, the filename defaults to gce.ini in the current
|
|
|
|
working directory.
|
2014-08-01 23:12:57 +02:00
|
|
|
"""
|
2013-11-19 23:30:22 +01:00
|
|
|
gce_ini_default_path = os.path.join(
|
|
|
|
os.path.dirname(os.path.realpath(__file__)), "gce.ini")
|
|
|
|
gce_ini_path = os.environ.get('GCE_INI_PATH', gce_ini_default_path)
|
|
|
|
|
2014-08-01 23:12:57 +02:00
|
|
|
# Create a ConfigParser.
|
|
|
|
# This provides empty defaults to each key, so that environment
|
|
|
|
# variable configuration (as opposed to INI configuration) is able
|
|
|
|
# to work.
|
2017-08-18 06:27:03 +02:00
|
|
|
config = configparser.SafeConfigParser(defaults={
|
2014-08-01 23:12:57 +02:00
|
|
|
'gce_service_account_email_address': '',
|
|
|
|
'gce_service_account_pem_file_path': '',
|
|
|
|
'gce_project_id': '',
|
2017-03-16 13:28:30 +01:00
|
|
|
'gce_zone': '',
|
2014-08-01 23:12:57 +02:00
|
|
|
'libcloud_secrets': '',
|
2018-03-14 19:09:06 +01:00
|
|
|
'instance_tags': '',
|
2016-05-04 00:32:12 +02:00
|
|
|
'inventory_ip_type': '',
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
'cache_path': '~/.ansible/tmp',
|
|
|
|
'cache_max_age': '300'
|
2014-08-01 23:12:57 +02:00
|
|
|
})
|
|
|
|
if 'gce' not in config.sections():
|
|
|
|
config.add_section('gce')
|
2016-05-04 00:32:12 +02:00
|
|
|
if 'inventory' not in config.sections():
|
|
|
|
config.add_section('inventory')
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
if 'cache' not in config.sections():
|
|
|
|
config.add_section('cache')
|
2016-05-04 00:32:12 +02:00
|
|
|
|
2013-11-19 23:30:22 +01:00
|
|
|
config.read(gce_ini_path)
|
2016-06-14 23:13:21 +02:00
|
|
|
|
|
|
|
#########
|
|
|
|
# Section added for processing ini settings
|
|
|
|
#########
|
|
|
|
|
|
|
|
# Set the instance_states filter based on config file options
|
|
|
|
self.instance_states = []
|
|
|
|
if config.has_option('gce', 'instance_states'):
|
|
|
|
states = config.get('gce', 'instance_states')
|
|
|
|
# Ignore if instance_states is an empty string.
|
|
|
|
if states:
|
|
|
|
self.instance_states = states.split(',')
|
|
|
|
|
2018-03-14 19:09:06 +01:00
|
|
|
# Set the instance_tags filter, env var overrides config from file
|
|
|
|
# and cli param overrides all
|
|
|
|
if self.args.instance_tags:
|
|
|
|
self.instance_tags = self.args.instance_tags
|
|
|
|
else:
|
|
|
|
self.instance_tags = os.environ.get(
|
|
|
|
'GCE_INSTANCE_TAGS', config.get('gce', 'instance_tags'))
|
|
|
|
if self.instance_tags:
|
|
|
|
self.instance_tags = self.instance_tags.split(',')
|
|
|
|
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
# Caching
|
|
|
|
cache_path = config.get('cache', 'cache_path')
|
|
|
|
cache_max_age = config.getint('cache', 'cache_max_age')
|
|
|
|
# TOOD(supertom): support project-specific caches
|
|
|
|
cache_name = 'ansible-gce.cache'
|
|
|
|
self.cache = CloudInventoryCache(cache_path=cache_path,
|
|
|
|
cache_max_age=cache_max_age,
|
|
|
|
cache_name=cache_name)
|
2016-05-04 00:32:12 +02:00
|
|
|
return config
|
|
|
|
|
|
|
|
def get_inventory_options(self):
|
|
|
|
"""Determine inventory options. Environment variables always
|
|
|
|
take precedence over configuration files."""
|
|
|
|
ip_type = self.config.get('inventory', 'inventory_ip_type')
|
|
|
|
# If the appropriate environment variables are set, they override
|
|
|
|
# other configuration
|
|
|
|
ip_type = os.environ.get('INVENTORY_IP_TYPE', ip_type)
|
|
|
|
return ip_type
|
2013-10-07 23:01:37 +02:00
|
|
|
|
2018-05-11 17:38:13 +02:00
|
|
|
def get_gce_drivers(self):
|
|
|
|
"""Determine the GCE authorization settings and return a list of
|
|
|
|
libcloud drivers.
|
2016-05-04 00:32:12 +02:00
|
|
|
"""
|
2014-08-01 23:12:57 +02:00
|
|
|
# Attempt to get GCE params from a configuration file, if one
|
|
|
|
# exists.
|
2016-05-04 00:32:12 +02:00
|
|
|
secrets_path = self.config.get('gce', 'libcloud_secrets')
|
2013-10-07 23:01:37 +02:00
|
|
|
secrets_found = False
|
2017-08-18 06:27:03 +02:00
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
try:
|
|
|
|
import secrets
|
2017-08-18 06:27:03 +02:00
|
|
|
args = list(secrets.GCE_PARAMS)
|
|
|
|
kwargs = secrets.GCE_KEYWORD_PARAMS
|
2013-10-07 23:01:37 +02:00
|
|
|
secrets_found = True
|
2018-09-08 02:59:46 +02:00
|
|
|
except Exception:
|
2013-10-07 23:01:37 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
if not secrets_found and secrets_path:
|
|
|
|
if not secrets_path.endswith('secrets.py'):
|
|
|
|
err = "Must specify libcloud secrets file as "
|
|
|
|
err += "/absolute/path/to/secrets.py"
|
2016-08-23 17:09:23 +02:00
|
|
|
sys.exit(err)
|
2013-10-07 23:01:37 +02:00
|
|
|
sys.path.append(os.path.dirname(secrets_path))
|
|
|
|
try:
|
|
|
|
import secrets
|
2014-08-01 23:12:57 +02:00
|
|
|
args = list(getattr(secrets, 'GCE_PARAMS', []))
|
2013-10-07 23:01:37 +02:00
|
|
|
kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
|
|
|
|
secrets_found = True
|
2018-09-08 02:59:46 +02:00
|
|
|
except Exception:
|
2013-10-07 23:01:37 +02:00
|
|
|
pass
|
2017-04-26 13:44:58 +02:00
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
if not secrets_found:
|
2014-08-01 23:12:57 +02:00
|
|
|
args = [
|
2017-05-09 23:38:08 +02:00
|
|
|
self.config.get('gce', 'gce_service_account_email_address'),
|
|
|
|
self.config.get('gce', 'gce_service_account_pem_file_path')
|
2014-08-01 23:12:57 +02:00
|
|
|
]
|
2017-03-16 13:28:30 +01:00
|
|
|
kwargs = {'project': self.config.get('gce', 'gce_project_id'),
|
|
|
|
'datacenter': self.config.get('gce', 'gce_zone')}
|
2013-10-08 18:36:35 +02:00
|
|
|
|
2014-08-01 23:12:57 +02:00
|
|
|
# If the appropriate environment variables are set, they override
|
|
|
|
# other configuration; process those into our args and kwargs.
|
|
|
|
args[0] = os.environ.get('GCE_EMAIL', args[0])
|
|
|
|
args[1] = os.environ.get('GCE_PEM_FILE_PATH', args[1])
|
2017-08-30 20:46:45 +02:00
|
|
|
args[1] = os.environ.get('GCE_CREDENTIALS_FILE_PATH', args[1])
|
|
|
|
|
2014-08-01 23:12:57 +02:00
|
|
|
kwargs['project'] = os.environ.get('GCE_PROJECT', kwargs['project'])
|
2017-03-16 13:28:30 +01:00
|
|
|
kwargs['datacenter'] = os.environ.get('GCE_ZONE', kwargs['datacenter'])
|
2014-08-01 23:12:57 +02:00
|
|
|
|
2018-05-11 17:38:13 +02:00
|
|
|
gce_drivers = []
|
|
|
|
projects = kwargs['project'].split(',')
|
|
|
|
for project in projects:
|
|
|
|
kwargs['project'] = project
|
|
|
|
gce = get_driver(Provider.GCE)(*args, **kwargs)
|
|
|
|
gce.connection.user_agent_append(
|
|
|
|
'%s/%s' % (USER_AGENT_PRODUCT, USER_AGENT_VERSION),
|
|
|
|
)
|
|
|
|
gce_drivers.append(gce)
|
|
|
|
return gce_drivers
|
2013-10-07 23:01:37 +02:00
|
|
|
|
2016-07-15 04:10:39 +02:00
|
|
|
def parse_env_zones(self):
|
2016-12-11 03:50:09 +01:00
|
|
|
'''returns a list of comma separated zones parsed from the GCE_ZONE environment variable.
|
2016-07-15 04:10:39 +02:00
|
|
|
If provided, this will be used to filter the results of the grouped_instances call'''
|
|
|
|
import csv
|
2017-05-09 23:38:08 +02:00
|
|
|
reader = csv.reader([os.environ.get('GCE_ZONE', "")], skipinitialspace=True)
|
2016-07-15 04:10:39 +02:00
|
|
|
zones = [r for r in reader]
|
|
|
|
return [z for z in zones[0]]
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
def parse_cli_args(self):
|
|
|
|
''' Command line argument processing '''
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
2017-01-29 08:28:53 +01:00
|
|
|
description='Produce an Ansible Inventory file based on GCE')
|
2013-10-07 23:01:37 +02:00
|
|
|
parser.add_argument('--list', action='store_true', default=True,
|
2017-05-09 23:38:08 +02:00
|
|
|
help='List instances (default: True)')
|
2013-10-07 23:01:37 +02:00
|
|
|
parser.add_argument('--host', action='store',
|
2017-05-09 23:38:08 +02:00
|
|
|
help='Get all information about an instance')
|
2018-03-14 19:09:06 +01:00
|
|
|
parser.add_argument('--instance-tags', action='store',
|
|
|
|
help='Only include instances with this tags, separated by comma')
|
2014-10-14 18:16:49 +02:00
|
|
|
parser.add_argument('--pretty', action='store_true', default=False,
|
2017-05-09 23:38:08 +02:00
|
|
|
help='Pretty format (default: False)')
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
parser.add_argument(
|
|
|
|
'--refresh-cache', action='store_true', default=False,
|
|
|
|
help='Force refresh of cache by making API requests (default: False - use cache files)')
|
2013-10-07 23:01:37 +02:00
|
|
|
self.args = parser.parse_args()
|
|
|
|
|
|
|
|
def node_to_dict(self, inst):
|
|
|
|
md = {}
|
2014-03-01 21:56:15 +01:00
|
|
|
|
|
|
|
if inst is None:
|
|
|
|
return {}
|
|
|
|
|
2016-11-16 20:53:03 +01:00
|
|
|
if 'items' in inst.extra['metadata']:
|
2013-10-07 23:01:37 +02:00
|
|
|
for entry in inst.extra['metadata']['items']:
|
|
|
|
md[entry['key']] = entry['value']
|
|
|
|
|
|
|
|
net = inst.extra['networkInterfaces'][0]['network'].split('/')[-1]
|
2017-04-26 13:44:58 +02:00
|
|
|
subnet = None
|
|
|
|
if 'subnetwork' in inst.extra['networkInterfaces'][0]:
|
|
|
|
subnet = inst.extra['networkInterfaces'][0]['subnetwork'].split('/')[-1]
|
2016-05-04 00:32:12 +02:00
|
|
|
# default to exernal IP unless user has specified they prefer internal
|
|
|
|
if self.ip_type == 'internal':
|
|
|
|
ssh_host = inst.private_ips[0]
|
|
|
|
else:
|
|
|
|
ssh_host = inst.public_ips[0] if len(inst.public_ips) >= 1 else inst.private_ips[0]
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
return {
|
|
|
|
'gce_uuid': inst.uuid,
|
|
|
|
'gce_id': inst.id,
|
|
|
|
'gce_image': inst.image,
|
|
|
|
'gce_machine_type': inst.size,
|
2013-11-19 23:30:22 +01:00
|
|
|
'gce_private_ip': inst.private_ips[0],
|
2015-06-17 18:22:28 +02:00
|
|
|
'gce_public_ip': inst.public_ips[0] if len(inst.public_ips) >= 1 else None,
|
2013-10-07 23:01:37 +02:00
|
|
|
'gce_name': inst.name,
|
|
|
|
'gce_description': inst.extra['description'],
|
|
|
|
'gce_status': inst.extra['status'],
|
|
|
|
'gce_zone': inst.extra['zone'].name,
|
|
|
|
'gce_tags': inst.extra['tags'],
|
|
|
|
'gce_metadata': md,
|
2014-03-01 21:56:15 +01:00
|
|
|
'gce_network': net,
|
2017-04-26 13:44:58 +02:00
|
|
|
'gce_subnetwork': subnet,
|
2014-03-01 21:56:15 +01:00
|
|
|
# Hosts don't have a public name, so we add an IP
|
2016-05-04 00:32:12 +02:00
|
|
|
'ansible_ssh_host': ssh_host
|
2013-10-07 23:01:37 +02:00
|
|
|
}
|
|
|
|
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
def load_inventory_from_cache(self):
|
|
|
|
''' Loads inventory from JSON on disk. '''
|
|
|
|
|
2014-03-01 21:56:15 +01:00
|
|
|
try:
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
self.inventory = self.cache.get_all_data_from_cache()
|
|
|
|
hosts = self.inventory['_meta']['hostvars']
|
2015-08-27 20:58:51 +02:00
|
|
|
except Exception as e:
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
print(
|
|
|
|
"Invalid inventory file %s. Please rebuild with -refresh-cache option."
|
|
|
|
% (self.cache.cache_path_cache))
|
|
|
|
raise
|
|
|
|
|
|
|
|
def do_api_calls_update_cache(self):
|
|
|
|
''' Do API calls and save data in cache. '''
|
|
|
|
zones = self.parse_env_zones()
|
|
|
|
data = self.group_instances(zones)
|
|
|
|
self.cache.write_to_cache(data)
|
|
|
|
self.inventory = data
|
2013-10-07 23:01:37 +02:00
|
|
|
|
2016-12-06 22:22:01 +01:00
|
|
|
def list_nodes(self):
|
|
|
|
all_nodes = []
|
|
|
|
params, more_results = {'maxResults': 500}, True
|
|
|
|
while more_results:
|
2018-05-11 17:38:13 +02:00
|
|
|
for driver in self.drivers:
|
|
|
|
driver.connection.gce_params = params
|
|
|
|
all_nodes.extend(driver.list_nodes())
|
|
|
|
more_results = 'pageToken' in params
|
2016-12-06 22:22:01 +01:00
|
|
|
return all_nodes
|
|
|
|
|
2016-07-15 04:10:39 +02:00
|
|
|
def group_instances(self, zones=None):
|
2013-10-07 23:01:37 +02:00
|
|
|
'''Group all instances'''
|
|
|
|
groups = {}
|
2014-10-14 16:43:28 +02:00
|
|
|
meta = {}
|
|
|
|
meta["hostvars"] = {}
|
|
|
|
|
2016-12-06 22:22:01 +01:00
|
|
|
for node in self.list_nodes():
|
2016-06-14 23:13:21 +02:00
|
|
|
|
|
|
|
# This check filters on the desired instance states defined in the
|
|
|
|
# config file with the instance_states config option.
|
|
|
|
#
|
|
|
|
# If the instance_states list is _empty_ then _ALL_ states are returned.
|
|
|
|
#
|
|
|
|
# If the instance_states list is _populated_ then check the current
|
|
|
|
# state against the instance_states list
|
|
|
|
if self.instance_states and not node.extra['status'] in self.instance_states:
|
|
|
|
continue
|
|
|
|
|
2018-03-14 19:09:06 +01:00
|
|
|
# This check filters on the desired instance tags defined in the
|
|
|
|
# config file with the instance_tags config option, env var GCE_INSTANCE_TAGS,
|
|
|
|
# or as the cli param --instance-tags.
|
|
|
|
#
|
|
|
|
# If the instance_tags list is _empty_ then _ALL_ instances are returned.
|
|
|
|
#
|
|
|
|
# If the instance_tags list is _populated_ then check the current
|
|
|
|
# instance tags against the instance_tags list. If the instance has
|
|
|
|
# at least one tag from the instance_tags list, it is returned.
|
|
|
|
if self.instance_tags and not set(self.instance_tags) & set(node.extra['tags']):
|
|
|
|
continue
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
name = node.name
|
|
|
|
|
2014-10-14 16:43:28 +02:00
|
|
|
meta["hostvars"][name] = self.node_to_dict(node)
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
zone = node.extra['zone'].name
|
2016-07-15 04:10:39 +02:00
|
|
|
|
|
|
|
# To avoid making multiple requests per zone
|
|
|
|
# we list all nodes and then filter the results
|
|
|
|
if zones and zone not in zones:
|
|
|
|
continue
|
|
|
|
|
2017-01-28 09:12:11 +01:00
|
|
|
if zone in groups:
|
|
|
|
groups[zone].append(name)
|
|
|
|
else:
|
|
|
|
groups[zone] = [name]
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
tags = node.extra['tags']
|
|
|
|
for t in tags:
|
2015-07-17 22:28:30 +02:00
|
|
|
if t.startswith('group-'):
|
|
|
|
tag = t[6:]
|
|
|
|
else:
|
|
|
|
tag = 'tag_%s' % t
|
2017-01-28 09:12:11 +01:00
|
|
|
if tag in groups:
|
|
|
|
groups[tag].append(name)
|
|
|
|
else:
|
|
|
|
groups[tag] = [name]
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
net = node.extra['networkInterfaces'][0]['network'].split('/')[-1]
|
|
|
|
net = 'network_%s' % net
|
2017-01-28 09:12:11 +01:00
|
|
|
if net in groups:
|
|
|
|
groups[net].append(name)
|
|
|
|
else:
|
|
|
|
groups[net] = [name]
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
machine_type = node.size
|
2017-01-28 09:12:11 +01:00
|
|
|
if machine_type in groups:
|
|
|
|
groups[machine_type].append(name)
|
|
|
|
else:
|
|
|
|
groups[machine_type] = [name]
|
2013-10-07 23:01:37 +02:00
|
|
|
|
2019-02-05 01:44:45 +01:00
|
|
|
image = node.image or 'persistent_disk'
|
2017-01-28 09:12:11 +01:00
|
|
|
if image in groups:
|
|
|
|
groups[image].append(name)
|
|
|
|
else:
|
|
|
|
groups[image] = [name]
|
2013-10-07 23:01:37 +02:00
|
|
|
|
|
|
|
status = node.extra['status']
|
|
|
|
stat = 'status_%s' % status.lower()
|
2017-01-28 09:12:11 +01:00
|
|
|
if stat in groups:
|
|
|
|
groups[stat].append(name)
|
|
|
|
else:
|
|
|
|
groups[stat] = [name]
|
2014-10-14 16:43:28 +02:00
|
|
|
|
2017-08-09 20:45:00 +02:00
|
|
|
for private_ip in node.private_ips:
|
|
|
|
groups[private_ip] = [name]
|
|
|
|
|
|
|
|
if len(node.public_ips) >= 1:
|
|
|
|
for public_ip in node.public_ips:
|
|
|
|
groups[public_ip] = [name]
|
|
|
|
|
2014-10-14 16:43:28 +02:00
|
|
|
groups["_meta"] = meta
|
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
return groups
|
|
|
|
|
|
|
|
def json_format_dict(self, data, pretty=False):
|
|
|
|
''' Converts a dict to a JSON object and dumps it as a formatted
|
|
|
|
string '''
|
|
|
|
|
|
|
|
if pretty:
|
|
|
|
return json.dumps(data, sort_keys=True, indent=2)
|
|
|
|
else:
|
|
|
|
return json.dumps(data)
|
|
|
|
|
2018-07-29 13:46:06 +02:00
|
|
|
|
2013-10-07 23:01:37 +02:00
|
|
|
# Run the script
|
[GCE] Caching support for inventory script. (#18093)
* [GCE] Caching support for inventory script.
The GCE inventory script now supports reading from a cache rather than making the request each time. The format of the list and host output have not changed.
On script execution, the cache is checked to see if it older than 'cache_max_age', and if so, it is rebuilt (it can also be explicity rebuilt).
To support this functionality, the following have been added.
* Config file (gce.ini) changes: A new 'cache' section has been added to the config file, with 'cache_path' and 'cache_max_age' options to allow for configuration. There are intelligent defaults in place if that section and options are not found in the configuration file.
* Command line argument: A new --refresh-cache argument has been added to force the cache to be rebuild.
* A CloudInventoryCache class, contained in the same file has been added. As a seperate class, it allowed for testing (unit tests not included in this PR) and hopefully could be re-used in the future (it contains borrowed code from other inventory scripts)
* load_inventory_from_cache and do_api_calls_and_update_cache methods (, which were largely lifted from other inventory scripts, in a hope to promote consistency in the future) to determine if the cache is fresh and rebuild if necessary.
* A 'main' check, to support the script being imported and testable.
A new dictionary has been added to the list output, located at ['_meta']['stats'] that informs if the cache was used and how long it took to load the inventory (in 'cache_used' and 'inventory_load_time', respectively).
* fixed default value error; change cache time to 300
2016-10-24 22:32:50 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
GceInventory()
|