[cloud] new module aws_batch_job_queue: support for AWS Batch Job Queues (#26701)

1. Removed redundant default values in argument_spec
2. Enclosed non standard python libraries in Try Except ImportError block
3. Added region as a mandatory element in argument_spec
4. Updated the typo in api_params while accessing job queue. Changed it from job_queue to jobQueue
5. Updated the set_api_params() method to use snake_dict_to_camel_dict()
6. Updated the ANSIBLE_METADATA version from 1.0 to 1.1
7. Updated the version_added in Documentation from 2.4 to 2.5
8. Initialized response value to None
9. Updated the RETUN documentation
10. Updated the return from manage_state() method to not embed batch_job_queue_action in ansible_facts
11. Updated the module.exit_json()  in main() to convert any keys back to snake_dict format using camel_dict_to_snake_dict to maintain consistency throughout the module
12. Changed the filename from batch_job_queue.py to aws_batch_job_queue.py
This commit is contained in:
Jonathan Meran 2018-01-30 17:22:06 -05:00 committed by Ryan Brown
parent 287f3a609e
commit 8578b3c1c5
2 changed files with 336 additions and 1 deletions

View file

@ -29,7 +29,7 @@
This module adds shared support for Batch modules. This module adds shared support for Batch modules.
""" """
from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn, snake_dict_to_camel_dict
try: try:
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
@ -84,3 +84,14 @@ def cc(key):
""" """
components = key.split('_') components = key.split('_')
return components[0] + "".join([token.capitalize() for token in components[1:]]) return components[0] + "".join([token.capitalize() for token in components[1:]])
def set_api_params(module, module_params):
"""
Sets module parameters to those expected by the boto3 API.
:param module:
:param module_params:
:return:
"""
api_params = dict((k, v) for k, v in dict(module.params).items() if k in module_params and v is not None)
return snake_dict_to_camel_dict(api_params)

View file

@ -0,0 +1,324 @@
#!/usr/bin/python
# Copyright (c) 2017 Jon Meran <jonathan.meran@sonos.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aws_batch_job_queue
short_description: Manage AWS Batch Job Queues
description:
- This module allows the management of AWS Batch Job Queues.
It is idempotent and supports "Check" mode. Use module M(batch_compute_environment) to manage the compute
environment, M(batch_job_queue) to manage job queues, M(batch_job_definition) to manage job definitions.
version_added: "2.5"
author: Jon Meran (@jonmer85)
options:
job_queue_name:
description:
- The name for the job queue
required: true
state:
description:
- Describes the desired state.
required: true
default: "present"
choices: ["present", "absent"]
job_queue_state:
description:
- The state of the job queue. If the job queue state is ENABLED , it is able to accept jobs.
default: "ENABLED"
choices: ["ENABLED", "DISABLED"]
priority:
description:
- The priority of the job queue. Job queues with a higher priority (or a lower integer value for the priority
parameter) are evaluated first when associated with same compute environment. Priority is determined in
ascending order, for example, a job queue with a priority value of 1 is given scheduling preference over a job
queue with a priority value of 10.
required: true
compute_environment_order:
description:
- The set of compute environments mapped to a job queue and their order relative to each other. The job
scheduler uses this parameter to determine which compute environment should execute a given job. Compute
environments must be in the VALID state before you can associate them with a job queue. You can associate up to
3 compute environments with a job queue.
required: true
requirements:
- boto3
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
---
- hosts: localhost
gather_facts: no
vars:
state: present
tasks:
- name: My Batch Job Queue
batch_job_queue:
job_queue_name: jobQueueName
state: present
region: us-east-1
job_queue_state: ENABLED
priority: 1
compute_environment_order:
- order: 1
compute_environment: my_compute_env1
- order: 2
compute_environment: my_compute_env2
- name: show results
debug: var=batch_job_queue_action
'''
RETURN = '''
---
output:
description: "returns what action was taken, whether something was changed, invocation and response"
returned: always
sample:
batch_job_queue_action: updated
changed: false
response:
job_queue_arn: "arn:aws:batch:...."
job_queue_name: <name>
priority: 1
state: DISABLED
status: UPDATING
status_reason: "JobQueue Healthy"
type: dict
'''
from ansible.module_utils._text import to_native
from ansible.module_utils.aws.batch import AWSConnection, cc, set_api_params
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, get_aws_connection_info, boto3_conn, HAS_BOTO3
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
import re
import traceback
try:
from botocore.exceptions import ClientError, ParamValidationError, MissingParametersError
except ImportError:
pass # Handled by HAS_BOTO3
# ---------------------------------------------------------------------------------------------------
#
# Helper Functions & classes
#
# ---------------------------------------------------------------------------------------------------
def validate_params(module, aws):
"""
Performs basic parameter validation.
:param module:
:param aws:
:return:
"""
return
# ---------------------------------------------------------------------------------------------------
#
# Batch Job Queue functions
#
# ---------------------------------------------------------------------------------------------------
def get_current_job_queue(module, connection):
try:
environments = connection.client().describe_job_queues(
jobQueues=[module.params['job_queue_name']]
)
return environments['jobQueues'][0] if len(environments['jobQueues']) > 0 else None
except ClientError:
return None
def create_job_queue(module, aws):
"""
Adds a Batch job queue
:param module:
:param aws:
:return:
"""
client = aws.client('batch')
changed = False
# set API parameters
params = ('job_queue_name', 'priority')
api_params = set_api_params(module, params)
if module.params['job_queue_state'] is not None:
api_params['state'] = module.params['job_queue_state']
api_params['computeEnvironmentOrder'] = get_compute_environment_order_list(module)
try:
if not module.check_mode:
client.create_job_queue(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error creating compute environment: {0}'.format(to_native(e)),
exception=traceback.format_exc())
return changed
def get_compute_environment_order_list(module):
compute_environment_order_list = []
for ceo in module.params['compute_environment_order']:
compute_environment_order_list.append(dict(order=ceo['order'], computeEnvironment=ceo['compute_environment']))
return compute_environment_order_list
def remove_job_queue(module, aws):
"""
Remove a Batch job queue
:param module:
:param aws:
:return:
"""
client = aws.client('batch')
changed = False
# set API parameters
api_params = {'jobQueue': module.params['job_queue_name']}
try:
if not module.check_mode:
client.delete_job_queue(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error removing job queue: {0}'.format(to_native(e)),
exception=traceback.format_exc())
return changed
def manage_state(module, aws):
changed = False
current_state = 'absent'
state = module.params['state']
job_queue_state = module.params['job_queue_state']
job_queue_name = module.params['job_queue_name']
priority = module.params['priority']
action_taken = 'none'
response = None
check_mode = module.check_mode
# check if the job queue exists
current_job_queue = get_current_job_queue(module, aws)
if current_job_queue:
current_state = 'present'
if state == 'present':
if current_state == 'present':
updates = False
# Update Batch Job Queuet configuration
job_kwargs = {'jobQueue': job_queue_name}
# Update configuration if needed
if job_queue_state and current_job_queue['state'] != job_queue_state:
job_kwargs.update({'state': job_queue_state})
updates = True
if priority is not None and current_job_queue['priority'] != priority:
job_kwargs.update({'priority': priority})
updates = True
new_compute_environment_order_list = get_compute_environment_order_list(module)
if new_compute_environment_order_list != current_job_queue['computeEnvironmentOrder']:
job_kwargs['computeEnvironmentOrder'] = new_compute_environment_order_list
updates = True
if updates:
try:
if not check_mode:
aws.client().update_job_queue(**job_kwargs)
changed = True
action_taken = "updated"
except (ParamValidationError, ClientError) as e:
module.fail_json(msg="Unable to update job queue: {0}".format(to_native(e)),
exception=traceback.format_exc())
else:
# Create Job Queue
changed = create_job_queue(module, aws)
action_taken = 'added'
# Describe job queue
response = get_current_job_queue(module, aws)
if not response:
module.fail_json(msg='Unable to get job queue information after creating/updating')
else:
if current_state == 'present':
# remove the Job Queue
changed = remove_job_queue(module, aws)
action_taken = 'deleted'
return dict(changed=changed, batch_job_queue_action=action_taken, response=response)
# ---------------------------------------------------------------------------------------------------
#
# MAIN
#
# ---------------------------------------------------------------------------------------------------
def main():
"""
Main entry point.
:return dict: changed, batch_job_queue_action, response
"""
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
state=dict(required=False, default='present', choices=['present', 'absent']),
job_queue_name=dict(required=True),
job_queue_state=dict(required=False, default='ENABLED', choices=['ENABLED', 'DISABLED']),
priority=dict(type='int', required=True),
compute_environment_order=dict(type='list', required=True),
region=dict(aliases=['aws_region', 'ec2_region'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
# validate dependencies
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required for this module.')
aws = AWSConnection(module, ['batch'])
validate_params(module, aws)
results = manage_state(module, aws)
module.exit_json(**camel_dict_to_snake_dict(results))
if __name__ == '__main__':
main()