2013-06-27 05:01:30 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2017-12-09 06:57:18 +01:00
|
|
|
"""
|
2013-06-27 05:01:30 +02:00
|
|
|
DigitalOcean external inventory script
|
|
|
|
======================================
|
|
|
|
|
|
|
|
Generates Ansible inventory of DigitalOcean Droplets.
|
|
|
|
|
|
|
|
In addition to the --list and --host options used by Ansible, there are options
|
|
|
|
for generating JSON of other DigitalOcean data. This is useful when creating
|
|
|
|
droplets. For example, --regions will return all the DigitalOcean Regions.
|
|
|
|
This information can also be easily found in the cache file, whose default
|
|
|
|
location is /tmp/ansible-digital_ocean.cache).
|
|
|
|
|
|
|
|
The --pretty (-p) option pretty-prints the output for better human readability.
|
|
|
|
|
2013-07-02 21:49:43 +02:00
|
|
|
----
|
|
|
|
Although the cache stores all the information received from DigitalOcean,
|
|
|
|
the cache is not used for current droplet information (in --list, --host,
|
|
|
|
--all, and --droplets). This is so that accurate droplet information is always
|
|
|
|
found. You can force this script to use the cache with --force-cache.
|
|
|
|
|
2013-06-27 20:52:32 +02:00
|
|
|
----
|
2013-06-27 05:01:30 +02:00
|
|
|
Configuration is read from `digital_ocean.ini`, then from environment variables,
|
2017-06-18 10:51:42 +02:00
|
|
|
and then from command-line arguments.
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2015-05-13 17:12:48 +02:00
|
|
|
Most notably, the DigitalOcean API Token must be specified. It can be specified
|
|
|
|
in the INI file or with the following environment variables:
|
|
|
|
export DO_API_TOKEN='abc123' or
|
|
|
|
export DO_API_KEY='abc123'
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2015-05-13 17:12:48 +02:00
|
|
|
Alternatively, it can be passed on the command-line with --api-token.
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2013-06-27 20:52:32 +02:00
|
|
|
If you specify DigitalOcean credentials in the INI file, a handy way to
|
|
|
|
get them into your environment (e.g., to use the digital_ocean module)
|
|
|
|
is to use the output of the --env option with export:
|
|
|
|
export $(digital_ocean.py --env)
|
|
|
|
|
|
|
|
----
|
2013-06-27 05:01:30 +02:00
|
|
|
The following groups are generated from --list:
|
|
|
|
- ID (droplet ID)
|
|
|
|
- NAME (droplet NAME)
|
2018-02-10 03:18:03 +01:00
|
|
|
- digital_ocean
|
2013-06-27 05:01:30 +02:00
|
|
|
- image_ID
|
|
|
|
- image_NAME
|
|
|
|
- distro_NAME (distribution NAME from image)
|
|
|
|
- region_NAME
|
|
|
|
- size_NAME
|
|
|
|
- status_STATUS
|
|
|
|
|
2017-01-05 15:48:46 +01:00
|
|
|
For each host, the following variables are registered:
|
2015-05-13 17:12:48 +02:00
|
|
|
- do_backup_ids
|
2013-06-27 05:01:30 +02:00
|
|
|
- do_created_at
|
2015-05-13 17:12:48 +02:00
|
|
|
- do_disk
|
|
|
|
- do_features - list
|
2013-06-27 05:01:30 +02:00
|
|
|
- do_id
|
2015-05-13 17:12:48 +02:00
|
|
|
- do_image - object
|
2013-06-27 05:01:30 +02:00
|
|
|
- do_ip_address
|
2015-08-02 05:21:20 +02:00
|
|
|
- do_private_ip_address
|
2015-05-13 17:12:48 +02:00
|
|
|
- do_kernel - object
|
|
|
|
- do_locked
|
2016-09-27 22:05:08 +02:00
|
|
|
- do_memory
|
2013-06-27 05:01:30 +02:00
|
|
|
- do_name
|
2015-05-13 17:12:48 +02:00
|
|
|
- do_networks - object
|
|
|
|
- do_next_backup_window
|
|
|
|
- do_region - object
|
|
|
|
- do_size - object
|
|
|
|
- do_size_slug
|
|
|
|
- do_snapshot_ids - list
|
2013-06-27 05:01:30 +02:00
|
|
|
- do_status
|
2016-09-27 22:05:08 +02:00
|
|
|
- do_tags
|
2015-05-13 17:12:48 +02:00
|
|
|
- do_vcpus
|
2016-09-27 22:05:08 +02:00
|
|
|
- do_volume_ids
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
-----
|
|
|
|
```
|
2017-12-09 06:57:18 +01:00
|
|
|
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all] [--droplets]
|
|
|
|
[--regions] [--images] [--sizes] [--ssh-keys]
|
|
|
|
[--domains] [--tags] [--pretty]
|
|
|
|
[--cache-path CACHE_PATH]
|
|
|
|
[--cache-max_age CACHE_MAX_AGE] [--force-cache]
|
|
|
|
[--refresh-cache] [--env] [--api-token API_TOKEN]
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
Produce an Ansible Inventory file based on DigitalOcean credentials
|
|
|
|
|
|
|
|
optional arguments:
|
|
|
|
-h, --help show this help message and exit
|
|
|
|
--list List all active Droplets as Ansible inventory
|
|
|
|
(default: True)
|
|
|
|
--host HOST Get all Ansible inventory variables about a specific
|
|
|
|
Droplet
|
|
|
|
--all List all DigitalOcean information as JSON
|
2017-12-09 06:57:18 +01:00
|
|
|
--droplets, -d List Droplets as JSON
|
2013-06-27 05:01:30 +02:00
|
|
|
--regions List Regions as JSON
|
|
|
|
--images List Images as JSON
|
|
|
|
--sizes List Sizes as JSON
|
|
|
|
--ssh-keys List SSH keys as JSON
|
|
|
|
--domains List Domains as JSON
|
2017-12-09 06:57:18 +01:00
|
|
|
--tags List Tags as JSON
|
2013-06-27 05:01:30 +02:00
|
|
|
--pretty, -p Pretty-print results
|
2015-05-13 17:12:48 +02:00
|
|
|
--cache-path CACHE_PATH
|
|
|
|
Path to the cache files (default: .)
|
|
|
|
--cache-max_age CACHE_MAX_AGE
|
|
|
|
Maximum age of the cached items (default: 0)
|
|
|
|
--force-cache Only use data from the cache
|
2017-12-09 06:57:18 +01:00
|
|
|
--refresh-cache, -r Force refresh of cache by making API requests to
|
2015-05-13 17:12:48 +02:00
|
|
|
DigitalOcean (default: False - use cache files)
|
2017-12-09 06:57:18 +01:00
|
|
|
--env, -e Display DO_API_TOKEN
|
2015-05-07 21:53:10 +02:00
|
|
|
--api-token API_TOKEN, -a API_TOKEN
|
|
|
|
DigitalOcean API Token
|
2013-06-27 05:01:30 +02:00
|
|
|
```
|
|
|
|
|
2017-12-09 06:57:18 +01:00
|
|
|
"""
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
# (c) 2013, Evan Wies <evan@neomantra.net>
|
2017-12-09 06:57:18 +01:00
|
|
|
# (c) 2017, Ansible Project
|
|
|
|
# (c) 2017, Abhijeet Kasurde <akasurde@redhat.com>
|
2013-06-27 05:01:30 +02:00
|
|
|
#
|
|
|
|
# Inspired by the EC2 inventory plugin:
|
2015-07-10 18:59:52 +02:00
|
|
|
# https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.py
|
2013-06-27 05:01:30 +02:00
|
|
|
#
|
|
|
|
# This file is part of Ansible,
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
######################################################################
|
|
|
|
|
2017-12-09 06:57:18 +01:00
|
|
|
import argparse
|
|
|
|
import ast
|
2013-06-27 05:01:30 +02:00
|
|
|
import os
|
|
|
|
import re
|
2017-12-09 06:57:18 +01:00
|
|
|
import requests
|
|
|
|
import sys
|
2013-06-27 05:01:30 +02:00
|
|
|
from time import time
|
|
|
|
|
2017-05-29 16:47:45 +02:00
|
|
|
try:
|
|
|
|
import ConfigParser
|
|
|
|
except ImportError:
|
|
|
|
import configparser as ConfigParser
|
|
|
|
|
2013-06-27 05:01:30 +02:00
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
2017-12-09 06:57:18 +01:00
|
|
|
|
|
|
|
class DoManager:
|
|
|
|
def __init__(self, api_token):
|
|
|
|
self.api_token = api_token
|
|
|
|
self.api_endpoint = 'https://api.digitalocean.com/v2'
|
|
|
|
self.headers = {'Authorization': 'Bearer {0}'.format(self.api_token),
|
|
|
|
'Content-type': 'application/json'}
|
|
|
|
self.timeout = 60
|
|
|
|
|
|
|
|
def _url_builder(self, path):
|
|
|
|
if path[0] == '/':
|
|
|
|
path = path[1:]
|
|
|
|
return '%s/%s' % (self.api_endpoint, path)
|
|
|
|
|
|
|
|
def send(self, url, method='GET', data=None):
|
|
|
|
url = self._url_builder(url)
|
|
|
|
data = json.dumps(data)
|
|
|
|
try:
|
|
|
|
if method == 'GET':
|
|
|
|
resp_data = {}
|
|
|
|
incomplete = True
|
|
|
|
while incomplete:
|
|
|
|
resp = requests.get(url, data=data, headers=self.headers, timeout=self.timeout)
|
|
|
|
json_resp = resp.json()
|
|
|
|
|
|
|
|
for key, value in json_resp.items():
|
|
|
|
if isinstance(value, list) and key in resp_data:
|
|
|
|
resp_data[key] += value
|
|
|
|
else:
|
|
|
|
resp_data[key] = value
|
|
|
|
|
|
|
|
try:
|
|
|
|
url = json_resp['links']['pages']['next']
|
|
|
|
except KeyError:
|
|
|
|
incomplete = False
|
|
|
|
|
|
|
|
except ValueError as e:
|
|
|
|
sys.exit("Unable to parse result from %s: %s" % (url, e))
|
2018-01-23 14:59:50 +01:00
|
|
|
return resp_data
|
2017-12-09 06:57:18 +01:00
|
|
|
|
|
|
|
def all_active_droplets(self):
|
|
|
|
resp = self.send('droplets/')
|
|
|
|
return resp['droplets']
|
|
|
|
|
|
|
|
def all_regions(self):
|
|
|
|
resp = self.send('regions/')
|
|
|
|
return resp['regions']
|
|
|
|
|
|
|
|
def all_images(self, filter_name='global'):
|
|
|
|
params = {'filter': filter_name}
|
|
|
|
resp = self.send('images/', data=params)
|
|
|
|
return resp['images']
|
|
|
|
|
|
|
|
def sizes(self):
|
|
|
|
resp = self.send('sizes/')
|
|
|
|
return resp['sizes']
|
|
|
|
|
|
|
|
def all_ssh_keys(self):
|
|
|
|
resp = self.send('account/keys')
|
|
|
|
return resp['ssh_keys']
|
|
|
|
|
|
|
|
def all_domains(self):
|
|
|
|
resp = self.send('domains/')
|
|
|
|
return resp['domains']
|
|
|
|
|
|
|
|
def show_droplet(self, droplet_id):
|
|
|
|
resp = self.send('droplets/%s' % droplet_id)
|
|
|
|
return resp['droplet']
|
|
|
|
|
|
|
|
def all_tags(self):
|
|
|
|
resp = self.send('tags/')
|
|
|
|
return resp['tags']
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
class DigitalOceanInventory(object):
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Main execution path
|
|
|
|
###########################################################################
|
|
|
|
|
|
|
|
def __init__(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
"""Main execution path """
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
# DigitalOceanInventory data
|
2017-05-09 23:38:08 +02:00
|
|
|
self.data = {} # All DigitalOcean data
|
|
|
|
self.inventory = {} # Ansible Inventory
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2015-05-13 17:12:48 +02:00
|
|
|
# Define defaults
|
|
|
|
self.cache_path = '.'
|
|
|
|
self.cache_max_age = 0
|
2015-10-03 01:16:44 +02:00
|
|
|
self.use_private_network = False
|
2016-02-14 17:04:26 +01:00
|
|
|
self.group_variables = {}
|
2015-05-13 17:12:48 +02:00
|
|
|
|
2013-06-27 05:01:30 +02:00
|
|
|
# Read settings, environment variables, and CLI arguments
|
|
|
|
self.read_settings()
|
|
|
|
self.read_environment()
|
|
|
|
self.read_cli_args()
|
|
|
|
|
|
|
|
# Verify credentials were set
|
2015-05-07 21:53:10 +02:00
|
|
|
if not hasattr(self, 'api_token'):
|
2017-12-09 06:57:18 +01:00
|
|
|
msg = 'Could not find values for DigitalOcean api_token. They must be specified via either ini file, ' \
|
|
|
|
'command line argument (--api-token), or environment variables (DO_API_TOKEN)\n'
|
|
|
|
sys.stderr.write(msg)
|
2013-06-27 05:01:30 +02:00
|
|
|
sys.exit(-1)
|
|
|
|
|
2013-06-27 20:52:32 +02:00
|
|
|
# env command, show DigitalOcean credentials
|
|
|
|
if self.args.env:
|
2015-08-28 08:18:13 +02:00
|
|
|
print("DO_API_TOKEN=%s" % self.api_token)
|
2013-06-27 20:52:32 +02:00
|
|
|
sys.exit(0)
|
|
|
|
|
2015-05-13 17:12:48 +02:00
|
|
|
# Manage cache
|
|
|
|
self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
|
|
|
|
self.cache_refreshed = False
|
|
|
|
|
2015-10-07 14:42:37 +02:00
|
|
|
if self.is_cache_valid():
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_cache()
|
|
|
|
if len(self.data) == 0:
|
|
|
|
if self.args.force_cache:
|
2017-12-09 06:57:18 +01:00
|
|
|
sys.stderr.write('Cache is empty and --force-cache was specified\n')
|
2015-05-13 17:12:48 +02:00
|
|
|
sys.exit(-1)
|
|
|
|
|
2017-12-09 06:57:18 +01:00
|
|
|
self.manager = DoManager(self.api_token)
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
# Pick the json_data to print based on the CLI command
|
2015-05-07 21:53:10 +02:00
|
|
|
if self.args.droplets:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean('droplets')
|
|
|
|
json_data = {'droplets': self.data['droplets']}
|
2015-05-07 21:53:10 +02:00
|
|
|
elif self.args.regions:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean('regions')
|
|
|
|
json_data = {'regions': self.data['regions']}
|
2015-05-07 21:53:10 +02:00
|
|
|
elif self.args.images:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean('images')
|
|
|
|
json_data = {'images': self.data['images']}
|
2015-05-07 21:53:10 +02:00
|
|
|
elif self.args.sizes:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean('sizes')
|
|
|
|
json_data = {'sizes': self.data['sizes']}
|
2015-05-07 21:53:10 +02:00
|
|
|
elif self.args.ssh_keys:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean('ssh_keys')
|
|
|
|
json_data = {'ssh_keys': self.data['ssh_keys']}
|
2015-05-07 21:53:10 +02:00
|
|
|
elif self.args.domains:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean('domains')
|
|
|
|
json_data = {'domains': self.data['domains']}
|
2017-12-09 06:57:18 +01:00
|
|
|
elif self.args.tags:
|
|
|
|
self.load_from_digital_ocean('tags')
|
|
|
|
json_data = {'tags': self.data['tags']}
|
2015-05-07 21:53:10 +02:00
|
|
|
elif self.args.all:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean()
|
|
|
|
json_data = self.data
|
2015-05-07 21:53:10 +02:00
|
|
|
elif self.args.host:
|
|
|
|
json_data = self.load_droplet_variables_for_host()
|
2013-06-27 05:01:30 +02:00
|
|
|
else: # '--list' this is last to make it default
|
2015-05-13 17:12:48 +02:00
|
|
|
self.load_from_digital_ocean('droplets')
|
2015-05-07 21:53:10 +02:00
|
|
|
self.build_inventory()
|
|
|
|
json_data = self.inventory
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2015-05-14 08:42:48 +02:00
|
|
|
if self.cache_refreshed:
|
|
|
|
self.write_to_cache()
|
|
|
|
|
2013-06-27 05:01:30 +02:00
|
|
|
if self.args.pretty:
|
2018-02-10 03:22:10 +01:00
|
|
|
print(json.dumps(json_data, indent=2))
|
2013-06-27 05:01:30 +02:00
|
|
|
else:
|
2015-08-28 08:18:13 +02:00
|
|
|
print(json.dumps(json_data))
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Script configuration
|
|
|
|
###########################################################################
|
|
|
|
|
|
|
|
def read_settings(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Reads the settings from the digital_ocean.ini file """
|
2013-06-27 05:01:30 +02:00
|
|
|
config = ConfigParser.SafeConfigParser()
|
2017-12-09 06:57:18 +01:00
|
|
|
config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'digital_ocean.ini')
|
|
|
|
config.read(config_path)
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
# Credentials
|
2015-05-07 21:53:10 +02:00
|
|
|
if config.has_option('digital_ocean', 'api_token'):
|
|
|
|
self.api_token = config.get('digital_ocean', 'api_token')
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
# Cache related
|
|
|
|
if config.has_option('digital_ocean', 'cache_path'):
|
|
|
|
self.cache_path = config.get('digital_ocean', 'cache_path')
|
|
|
|
if config.has_option('digital_ocean', 'cache_max_age'):
|
|
|
|
self.cache_max_age = config.getint('digital_ocean', 'cache_max_age')
|
|
|
|
|
2015-10-03 01:16:44 +02:00
|
|
|
# Private IP Address
|
|
|
|
if config.has_option('digital_ocean', 'use_private_network'):
|
2016-10-22 18:23:03 +02:00
|
|
|
self.use_private_network = config.getboolean('digital_ocean', 'use_private_network')
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2016-02-14 17:04:26 +01:00
|
|
|
# Group variables
|
|
|
|
if config.has_option('digital_ocean', 'group_variables'):
|
|
|
|
self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables'))
|
|
|
|
|
2013-06-27 05:01:30 +02:00
|
|
|
def read_environment(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Reads the settings from environment variables """
|
2013-06-27 05:01:30 +02:00
|
|
|
# Setup credentials
|
2015-05-07 21:53:10 +02:00
|
|
|
if os.getenv("DO_API_TOKEN"):
|
|
|
|
self.api_token = os.getenv("DO_API_TOKEN")
|
|
|
|
if os.getenv("DO_API_KEY"):
|
|
|
|
self.api_token = os.getenv("DO_API_KEY")
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
def read_cli_args(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Command line argument processing """
|
2013-06-27 05:01:30 +02:00
|
|
|
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials')
|
|
|
|
|
|
|
|
parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)')
|
|
|
|
parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet')
|
|
|
|
|
|
|
|
parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON')
|
2017-05-09 23:38:08 +02:00
|
|
|
parser.add_argument('--droplets', '-d', action='store_true', help='List Droplets as JSON')
|
2013-06-27 05:01:30 +02:00
|
|
|
parser.add_argument('--regions', action='store_true', help='List Regions as JSON')
|
|
|
|
parser.add_argument('--images', action='store_true', help='List Images as JSON')
|
|
|
|
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON')
|
|
|
|
parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON')
|
2017-05-09 23:38:08 +02:00
|
|
|
parser.add_argument('--domains', action='store_true', help='List Domains as JSON')
|
2017-12-09 06:57:18 +01:00
|
|
|
parser.add_argument('--tags', action='store_true', help='List Tags as JSON')
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2017-05-09 23:38:08 +02:00
|
|
|
parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results')
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2015-05-13 17:12:48 +02:00
|
|
|
parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
|
|
|
|
parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
|
|
|
|
parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
|
2017-05-09 23:38:08 +02:00
|
|
|
parser.add_argument('--refresh-cache', '-r', action='store_true', default=False,
|
2015-05-13 17:12:48 +02:00
|
|
|
help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
|
|
|
|
|
2017-05-09 23:38:08 +02:00
|
|
|
parser.add_argument('--env', '-e', action='store_true', help='Display DO_API_TOKEN')
|
|
|
|
parser.add_argument('--api-token', '-a', action='store', help='DigitalOcean API Token')
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
self.args = parser.parse_args()
|
|
|
|
|
2015-05-07 21:53:10 +02:00
|
|
|
if self.args.api_token:
|
|
|
|
self.api_token = self.args.api_token
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2013-07-02 21:49:43 +02:00
|
|
|
# Make --list default if none of the other commands are specified
|
2015-05-07 21:53:10 +02:00
|
|
|
if (not self.args.droplets and not self.args.regions and
|
|
|
|
not self.args.images and not self.args.sizes and
|
|
|
|
not self.args.ssh_keys and not self.args.domains and
|
2017-12-09 06:57:18 +01:00
|
|
|
not self.args.tags and
|
2015-05-07 21:53:10 +02:00
|
|
|
not self.args.all and not self.args.host):
|
|
|
|
self.args.list = True
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Data Management
|
|
|
|
###########################################################################
|
|
|
|
|
2015-05-07 21:53:10 +02:00
|
|
|
def load_from_digital_ocean(self, resource=None):
|
2017-12-09 06:57:18 +01:00
|
|
|
"""Get JSON from DigitalOcean API """
|
2015-10-07 14:42:37 +02:00
|
|
|
if self.args.force_cache and os.path.isfile(self.cache_filename):
|
2015-05-13 17:12:48 +02:00
|
|
|
return
|
2015-05-14 08:42:48 +02:00
|
|
|
# We always get fresh droplets
|
2017-05-09 23:38:08 +02:00
|
|
|
if self.is_cache_valid() and not (resource == 'droplets' or resource is None):
|
2015-05-14 08:42:48 +02:00
|
|
|
return
|
2015-05-13 17:12:48 +02:00
|
|
|
if self.args.refresh_cache:
|
2017-05-09 23:38:08 +02:00
|
|
|
resource = None
|
2015-05-13 17:12:48 +02:00
|
|
|
|
2015-05-07 21:53:10 +02:00
|
|
|
if resource == 'droplets' or resource is None:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.data['droplets'] = self.manager.all_active_droplets()
|
2015-05-14 08:42:48 +02:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 21:53:10 +02:00
|
|
|
if resource == 'regions' or resource is None:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.data['regions'] = self.manager.all_regions()
|
2015-05-14 08:42:48 +02:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 21:53:10 +02:00
|
|
|
if resource == 'images' or resource is None:
|
2017-12-09 06:57:18 +01:00
|
|
|
self.data['images'] = self.manager.all_images()
|
2015-05-14 08:42:48 +02:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 21:53:10 +02:00
|
|
|
if resource == 'sizes' or resource is None:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.data['sizes'] = self.manager.sizes()
|
2015-05-14 08:42:48 +02:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 21:53:10 +02:00
|
|
|
if resource == 'ssh_keys' or resource is None:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.data['ssh_keys'] = self.manager.all_ssh_keys()
|
2015-05-14 08:42:48 +02:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 21:53:10 +02:00
|
|
|
if resource == 'domains' or resource is None:
|
2015-05-13 17:12:48 +02:00
|
|
|
self.data['domains'] = self.manager.all_domains()
|
2015-05-14 08:42:48 +02:00
|
|
|
self.cache_refreshed = True
|
2017-12-09 06:57:18 +01:00
|
|
|
if resource == 'tags' or resource is None:
|
|
|
|
self.data['tags'] = self.manager.all_tags()
|
|
|
|
self.cache_refreshed = True
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
def build_inventory(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Build Ansible inventory of droplets """
|
2016-06-03 21:13:13 +02:00
|
|
|
self.inventory = {
|
2017-01-29 08:28:53 +01:00
|
|
|
'all': {
|
|
|
|
'hosts': [],
|
|
|
|
'vars': self.group_variables
|
2017-05-09 23:38:08 +02:00
|
|
|
},
|
2017-01-29 08:28:53 +01:00
|
|
|
'_meta': {'hostvars': {}}
|
2017-05-09 23:38:08 +02:00
|
|
|
}
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
# add all droplets by id and name
|
|
|
|
for droplet in self.data['droplets']:
|
2017-12-09 06:57:18 +01:00
|
|
|
for net in droplet['networks']['v4']:
|
|
|
|
if net['type'] == 'public':
|
|
|
|
dest = net['ip_address']
|
|
|
|
else:
|
|
|
|
continue
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2016-06-03 21:13:13 +02:00
|
|
|
self.inventory['all']['hosts'].append(dest)
|
2016-02-14 17:04:26 +01:00
|
|
|
|
2016-06-15 15:49:14 +02:00
|
|
|
self.inventory[droplet['id']] = [dest]
|
|
|
|
self.inventory[droplet['name']] = [dest]
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2016-06-03 21:13:13 +02:00
|
|
|
# groups that are always present
|
2018-02-10 03:18:03 +01:00
|
|
|
for group in ('digital_ocean',
|
|
|
|
'region_' + droplet['region']['slug'],
|
2017-02-07 16:49:55 +01:00
|
|
|
'image_' + str(droplet['image']['id']),
|
|
|
|
'size_' + droplet['size']['slug'],
|
2017-12-09 06:57:18 +01:00
|
|
|
'distro_' + DigitalOceanInventory.to_safe(droplet['image']['distribution']),
|
2017-02-07 16:49:55 +01:00
|
|
|
'status_' + droplet['status']):
|
2016-06-03 21:13:13 +02:00
|
|
|
if group not in self.inventory:
|
2017-05-09 23:38:08 +02:00
|
|
|
self.inventory[group] = {'hosts': [], 'vars': {}}
|
2016-06-03 21:13:13 +02:00
|
|
|
self.inventory[group]['hosts'].append(dest)
|
|
|
|
|
|
|
|
# groups that are not always present
|
2017-02-07 16:49:55 +01:00
|
|
|
for group in (droplet['image']['slug'],
|
|
|
|
droplet['image']['name']):
|
2016-06-03 21:13:13 +02:00
|
|
|
if group:
|
2017-12-09 06:57:18 +01:00
|
|
|
image = 'image_' + DigitalOceanInventory.to_safe(group)
|
2016-06-03 21:13:13 +02:00
|
|
|
if image not in self.inventory:
|
2017-05-09 23:38:08 +02:00
|
|
|
self.inventory[image] = {'hosts': [], 'vars': {}}
|
2016-06-03 21:13:13 +02:00
|
|
|
self.inventory[image]['hosts'].append(dest)
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2016-09-28 00:16:38 +02:00
|
|
|
if droplet['tags']:
|
|
|
|
for tag in droplet['tags']:
|
|
|
|
if tag not in self.inventory:
|
2017-05-09 23:38:08 +02:00
|
|
|
self.inventory[tag] = {'hosts': [], 'vars': {}}
|
2016-09-28 00:16:38 +02:00
|
|
|
self.inventory[tag]['hosts'].append(dest)
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2017-01-05 15:48:46 +01:00
|
|
|
# hostvars
|
|
|
|
info = self.do_namespace(droplet)
|
|
|
|
self.inventory['_meta']['hostvars'][dest] = info
|
|
|
|
|
2013-06-27 05:01:30 +02:00
|
|
|
def load_droplet_variables_for_host(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Generate a JSON response to a --host call """
|
2015-05-07 21:53:10 +02:00
|
|
|
host = int(self.args.host)
|
2015-05-13 17:12:48 +02:00
|
|
|
droplet = self.manager.show_droplet(host)
|
2017-01-05 15:48:46 +01:00
|
|
|
info = self.do_namespace(droplet)
|
2015-05-13 17:12:48 +02:00
|
|
|
return {'droplet': info}
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Cache Management
|
|
|
|
###########################################################################
|
2013-06-27 05:01:30 +02:00
|
|
|
|
2015-05-13 17:12:48 +02:00
|
|
|
def is_cache_valid(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Determines if the cache files have expired, or if it is still valid """
|
2015-05-13 17:12:48 +02:00
|
|
|
if os.path.isfile(self.cache_filename):
|
|
|
|
mod_time = os.path.getmtime(self.cache_filename)
|
|
|
|
current_time = time()
|
|
|
|
if (mod_time + self.cache_max_age) > current_time:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def load_from_cache(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Reads the data from the cache file and assigns it to member variables as Python Objects """
|
2015-05-13 17:12:48 +02:00
|
|
|
try:
|
2017-12-09 06:57:18 +01:00
|
|
|
with open(self.cache_filename, 'r') as cache:
|
|
|
|
json_data = cache.read()
|
2015-05-13 17:12:48 +02:00
|
|
|
data = json.loads(json_data)
|
|
|
|
except IOError:
|
|
|
|
data = {'data': {}, 'inventory': {}}
|
|
|
|
|
|
|
|
self.data = data['data']
|
|
|
|
self.inventory = data['inventory']
|
|
|
|
|
|
|
|
def write_to_cache(self):
|
2017-12-09 06:57:18 +01:00
|
|
|
""" Writes data in JSON format to a file """
|
2017-05-09 23:38:08 +02:00
|
|
|
data = {'data': self.data, 'inventory': self.inventory}
|
2018-02-10 03:22:10 +01:00
|
|
|
json_data = json.dumps(data, indent=2)
|
2015-05-13 17:12:48 +02:00
|
|
|
|
2017-12-09 06:57:18 +01:00
|
|
|
with open(self.cache_filename, 'w') as cache:
|
|
|
|
cache.write(json_data)
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Utilities
|
|
|
|
###########################################################################
|
2017-12-09 06:57:18 +01:00
|
|
|
@staticmethod
|
|
|
|
def to_safe(word):
|
|
|
|
""" Converts 'bad' characters in a string to underscores so they can be used as Ansible groups """
|
|
|
|
return re.sub(r"[^A-Za-z0-9\-.]", "_", word)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def do_namespace(data):
|
|
|
|
""" Returns a copy of the dictionary with all the keys put in a 'do_' namespace """
|
2017-01-05 15:48:46 +01:00
|
|
|
info = {}
|
|
|
|
for k, v in data.items():
|
2017-05-09 23:38:08 +02:00
|
|
|
info['do_' + k] = v
|
2017-01-05 15:48:46 +01:00
|
|
|
return info
|
|
|
|
|
2013-06-27 05:01:30 +02:00
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Run the script
|
|
|
|
DigitalOceanInventory()
|