Migrate a batch of AWS modules to AnsibleAWSModule (1) (#67191)
* Move various Amazon modules over to AnsibleAWSModule * Remove HAS_BOTO3 logic, it's handled by AnsibleAWSModule * Simplify argument_spec handling ec2_argument_spec is automatically added by AnsibleAWSModule * Cleanup boto3 error handling using fail_json_aws * Migrate simple cases from boto3_conn to module.client() * Migrate batch modules over to module.client() * Migrate EFS module over to module.client() * Comment cleanup * Use deprecated_aliases in argument_spec rather than custom code * Cleanup unused imports * Make use of module.region * Deprecate batch.AWSConnection it's now dead code
This commit is contained in:
parent
7b792e0a53
commit
f520238d60
13 changed files with 268 additions and 489 deletions
|
@ -47,6 +47,9 @@ class AWSConnection(object):
|
|||
|
||||
def __init__(self, ansible_obj, resources, boto3=True):
|
||||
|
||||
ansible_obj.deprecate("The 'ansible.module_utils.aws.batch.AWSConnection' class is deprecated please use 'AnsibleAWSModule.client()'",
|
||||
version='2.14')
|
||||
|
||||
self.region, self.endpoint, aws_connect_kwargs = get_aws_connection_info(ansible_obj, boto3=boto3)
|
||||
|
||||
self.resource_client = dict()
|
||||
|
|
|
@ -70,43 +70,25 @@ availability_zones:
|
|||
]"
|
||||
'''
|
||||
|
||||
|
||||
import traceback
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import get_aws_connection_info, ec2_argument_spec, boto3_conn
|
||||
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, camel_dict_to_snake_dict, HAS_BOTO3
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import AWSRetry, ansible_dict_to_boto3_filter_list, camel_dict_to_snake_dict
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError, BotoCoreError
|
||||
except ImportError:
|
||||
pass # will be detected by imported HAS_BOTO3
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
filters=dict(default={}, type='dict')
|
||||
)
|
||||
argument_spec = dict(
|
||||
filters=dict(default={}, type='dict')
|
||||
)
|
||||
|
||||
module = AnsibleModule(argument_spec=argument_spec)
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec)
|
||||
if module._name == 'aws_az_facts':
|
||||
module.deprecate("The 'aws_az_facts' module has been renamed to 'aws_az_info'", version='2.14')
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 required for this module')
|
||||
|
||||
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
|
||||
connection = boto3_conn(
|
||||
module,
|
||||
conn_type='client',
|
||||
resource='ec2',
|
||||
region=region,
|
||||
endpoint=ec2_url,
|
||||
**aws_connect_params
|
||||
)
|
||||
connection = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff())
|
||||
|
||||
# Replace filter key underscores with dashes, for compatibility
|
||||
sanitized_filters = dict((k.replace('_', '-'), v) for k, v in module.params.get('filters').items())
|
||||
|
@ -115,12 +97,8 @@ def main():
|
|||
availability_zones = connection.describe_availability_zones(
|
||||
Filters=ansible_dict_to_boto3_filter_list(sanitized_filters)
|
||||
)
|
||||
except ClientError as e:
|
||||
module.fail_json(msg="Unable to describe availability zones: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
|
||||
except BotoCoreError as e:
|
||||
module.fail_json(msg="Unable to describe availability zones: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg="Unable to describe availability zones.")
|
||||
|
||||
# Turn the boto3 result into ansible_friendly_snaked_names
|
||||
snaked_availability_zones = [camel_dict_to_snake_dict(az) for az in availability_zones['AvailabilityZones']]
|
||||
|
|
|
@ -231,18 +231,14 @@ output:
|
|||
type: dict
|
||||
'''
|
||||
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.aws.batch import AWSConnection
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import ec2_argument_spec, HAS_BOTO3
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import snake_dict_to_camel_dict, camel_dict_to_snake_dict
|
||||
import re
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError, ParamValidationError, MissingParametersError
|
||||
from botocore.exceptions import ClientError, BotoCoreError
|
||||
except ImportError:
|
||||
pass # Handled by HAS_BOTO3
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------------
|
||||
|
@ -263,12 +259,11 @@ def set_api_params(module, module_params):
|
|||
return snake_dict_to_camel_dict(api_params)
|
||||
|
||||
|
||||
def validate_params(module, aws):
|
||||
def validate_params(module):
|
||||
"""
|
||||
Performs basic parameter validation.
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:return:
|
||||
"""
|
||||
|
||||
|
@ -294,9 +289,9 @@ def validate_params(module, aws):
|
|||
#
|
||||
# ---------------------------------------------------------------------------------------------------
|
||||
|
||||
def get_current_compute_environment(module, connection):
|
||||
def get_current_compute_environment(module, client):
|
||||
try:
|
||||
environments = connection.client().describe_compute_environments(
|
||||
environments = client.describe_compute_environments(
|
||||
computeEnvironments=[module.params['compute_environment_name']]
|
||||
)
|
||||
if len(environments['computeEnvironments']) > 0:
|
||||
|
@ -307,16 +302,15 @@ def get_current_compute_environment(module, connection):
|
|||
return None
|
||||
|
||||
|
||||
def create_compute_environment(module, aws):
|
||||
def create_compute_environment(module, client):
|
||||
"""
|
||||
Adds a Batch compute environment
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:param client:
|
||||
:return:
|
||||
"""
|
||||
|
||||
client = aws.client('batch')
|
||||
changed = False
|
||||
|
||||
# set API parameters
|
||||
|
@ -344,23 +338,21 @@ def create_compute_environment(module, aws):
|
|||
if not module.check_mode:
|
||||
client.create_compute_environment(**api_params)
|
||||
changed = True
|
||||
except (ClientError, ParamValidationError, MissingParametersError) as e:
|
||||
module.fail_json(msg='Error creating compute environment: {0}'.format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (ClientError, BotoCoreError) as e:
|
||||
module.fail_json_aws(e, msg='Error creating compute environment')
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def remove_compute_environment(module, aws):
|
||||
def remove_compute_environment(module, client):
|
||||
"""
|
||||
Remove a Batch compute environment
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:param client:
|
||||
:return:
|
||||
"""
|
||||
|
||||
client = aws.client('batch')
|
||||
changed = False
|
||||
|
||||
# set API parameters
|
||||
|
@ -370,13 +362,12 @@ def remove_compute_environment(module, aws):
|
|||
if not module.check_mode:
|
||||
client.delete_compute_environment(**api_params)
|
||||
changed = True
|
||||
except (ClientError, ParamValidationError, MissingParametersError) as e:
|
||||
module.fail_json(msg='Error removing compute environment: {0}'.format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (ClientError, BotoCoreError) as e:
|
||||
module.fail_json_aws(e, msg='Error removing compute environment')
|
||||
return changed
|
||||
|
||||
|
||||
def manage_state(module, aws):
|
||||
def manage_state(module, client):
|
||||
changed = False
|
||||
current_state = 'absent'
|
||||
state = module.params['state']
|
||||
|
@ -392,7 +383,7 @@ def manage_state(module, aws):
|
|||
check_mode = module.check_mode
|
||||
|
||||
# check if the compute environment exists
|
||||
current_compute_environment = get_current_compute_environment(module, aws)
|
||||
current_compute_environment = get_current_compute_environment(module, client)
|
||||
response = current_compute_environment
|
||||
if current_compute_environment:
|
||||
current_state = 'present'
|
||||
|
@ -423,27 +414,26 @@ def manage_state(module, aws):
|
|||
if updates:
|
||||
try:
|
||||
if not check_mode:
|
||||
update_env_response = aws.client().update_compute_environment(**compute_kwargs)
|
||||
update_env_response = client.update_compute_environment(**compute_kwargs)
|
||||
if not update_env_response:
|
||||
module.fail_json(msg='Unable to get compute environment information after creating')
|
||||
changed = True
|
||||
action_taken = "updated"
|
||||
except (ParamValidationError, ClientError) as e:
|
||||
module.fail_json(msg="Unable to update environment: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg="Unable to update environment.")
|
||||
|
||||
else:
|
||||
# Create Batch Compute Environment
|
||||
changed = create_compute_environment(module, aws)
|
||||
changed = create_compute_environment(module, client)
|
||||
# Describe compute environment
|
||||
action_taken = 'added'
|
||||
response = get_current_compute_environment(module, aws)
|
||||
response = get_current_compute_environment(module, client)
|
||||
if not response:
|
||||
module.fail_json(msg='Unable to get compute environment information after creating')
|
||||
else:
|
||||
if current_state == 'present':
|
||||
# remove the compute environment
|
||||
changed = remove_compute_environment(module, aws)
|
||||
changed = remove_compute_environment(module, client)
|
||||
action_taken = 'deleted'
|
||||
return dict(changed=changed, batch_compute_environment_action=action_taken, response=response)
|
||||
|
||||
|
@ -461,45 +451,37 @@ def main():
|
|||
:return dict: changed, batch_compute_environment_action, response
|
||||
"""
|
||||
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
state=dict(default='present', choices=['present', 'absent']),
|
||||
compute_environment_name=dict(required=True),
|
||||
type=dict(required=True, choices=['MANAGED', 'UNMANAGED']),
|
||||
compute_environment_state=dict(required=False, default='ENABLED', choices=['ENABLED', 'DISABLED']),
|
||||
service_role=dict(required=True),
|
||||
compute_resource_type=dict(required=True, choices=['EC2', 'SPOT']),
|
||||
minv_cpus=dict(type='int', required=True),
|
||||
maxv_cpus=dict(type='int', required=True),
|
||||
desiredv_cpus=dict(type='int'),
|
||||
instance_types=dict(type='list', required=True),
|
||||
image_id=dict(),
|
||||
subnets=dict(type='list', required=True),
|
||||
security_group_ids=dict(type='list', required=True),
|
||||
ec2_key_pair=dict(),
|
||||
instance_role=dict(required=True),
|
||||
tags=dict(type='dict'),
|
||||
bid_percentage=dict(type='int'),
|
||||
spot_iam_fleet_role=dict(),
|
||||
region=dict(aliases=['aws_region', 'ec2_region'])
|
||||
)
|
||||
argument_spec = dict(
|
||||
state=dict(default='present', choices=['present', 'absent']),
|
||||
compute_environment_name=dict(required=True),
|
||||
type=dict(required=True, choices=['MANAGED', 'UNMANAGED']),
|
||||
compute_environment_state=dict(required=False, default='ENABLED', choices=['ENABLED', 'DISABLED']),
|
||||
service_role=dict(required=True),
|
||||
compute_resource_type=dict(required=True, choices=['EC2', 'SPOT']),
|
||||
minv_cpus=dict(type='int', required=True),
|
||||
maxv_cpus=dict(type='int', required=True),
|
||||
desiredv_cpus=dict(type='int'),
|
||||
instance_types=dict(type='list', required=True),
|
||||
image_id=dict(),
|
||||
subnets=dict(type='list', required=True),
|
||||
security_group_ids=dict(type='list', required=True),
|
||||
ec2_key_pair=dict(),
|
||||
instance_role=dict(required=True),
|
||||
tags=dict(type='dict'),
|
||||
bid_percentage=dict(type='int'),
|
||||
spot_iam_fleet_role=dict(),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
module = AnsibleAWSModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
# validate dependencies
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 is required for this module.')
|
||||
client = module.client('batch')
|
||||
|
||||
aws = AWSConnection(module, ['batch'])
|
||||
validate_params(module)
|
||||
|
||||
validate_params(module, aws)
|
||||
|
||||
results = manage_state(module, aws)
|
||||
results = manage_state(module, client)
|
||||
|
||||
module.exit_json(**camel_dict_to_snake_dict(results, ignore_list=['Tags']))
|
||||
|
||||
|
|
|
@ -226,18 +226,14 @@ output:
|
|||
type: dict
|
||||
'''
|
||||
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.aws.batch import AWSConnection, cc, set_api_params
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import ec2_argument_spec, HAS_BOTO3
|
||||
from ansible.module_utils.aws.batch import cc, set_api_params
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
|
||||
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError, ParamValidationError, MissingParametersError
|
||||
from botocore.exceptions import ClientError, BotoCoreError
|
||||
except ImportError:
|
||||
pass # Handled by HAS_BOTO3
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------------
|
||||
|
@ -251,12 +247,12 @@ except ImportError:
|
|||
# logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
def validate_params(module, aws):
|
||||
def validate_params(module, batch_client):
|
||||
"""
|
||||
Performs basic parameter validation.
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:param batch_client:
|
||||
:return:
|
||||
"""
|
||||
return
|
||||
|
@ -268,9 +264,9 @@ def validate_params(module, aws):
|
|||
#
|
||||
# ---------------------------------------------------------------------------------------------------
|
||||
|
||||
def get_current_job_definition(module, connection):
|
||||
def get_current_job_definition(module, batch_client):
|
||||
try:
|
||||
environments = connection.client().describe_job_definitions(
|
||||
environments = batch_client.describe_job_definitions(
|
||||
jobDefinitionName=module.params['job_definition_name']
|
||||
)
|
||||
if len(environments['jobDefinitions']) > 0:
|
||||
|
@ -283,16 +279,15 @@ def get_current_job_definition(module, connection):
|
|||
return None
|
||||
|
||||
|
||||
def create_job_definition(module, aws):
|
||||
def create_job_definition(module, batch_client):
|
||||
"""
|
||||
Adds a Batch job definition
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:param batch_client:
|
||||
:return:
|
||||
"""
|
||||
|
||||
client = aws.client('batch')
|
||||
changed = False
|
||||
|
||||
# set API parameters
|
||||
|
@ -305,11 +300,10 @@ def create_job_definition(module, aws):
|
|||
|
||||
try:
|
||||
if not module.check_mode:
|
||||
client.register_job_definition(**api_params)
|
||||
batch_client.register_job_definition(**api_params)
|
||||
changed = True
|
||||
except (ClientError, ParamValidationError, MissingParametersError) as e:
|
||||
module.fail_json(msg='Error registering job definition: {0}'.format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg='Error registering job definition')
|
||||
|
||||
return changed
|
||||
|
||||
|
@ -334,25 +328,23 @@ def get_compute_environment_order_list(module):
|
|||
return compute_environment_order_list
|
||||
|
||||
|
||||
def remove_job_definition(module, aws):
|
||||
def remove_job_definition(module, batch_client):
|
||||
"""
|
||||
Remove a Batch job definition
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:param batch_client:
|
||||
:return:
|
||||
"""
|
||||
|
||||
client = aws.client('batch')
|
||||
changed = False
|
||||
|
||||
try:
|
||||
if not module.check_mode:
|
||||
client.deregister_job_definition(jobDefinition=module.params['job_definition_arn'])
|
||||
batch_client.deregister_job_definition(jobDefinition=module.params['job_definition_arn'])
|
||||
changed = True
|
||||
except (ClientError, ParamValidationError, MissingParametersError) as e:
|
||||
module.fail_json(msg='Error removing job definition: {0}'.format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg='Error removing job definition')
|
||||
return changed
|
||||
|
||||
|
||||
|
@ -377,7 +369,7 @@ def job_definition_equal(module, current_definition):
|
|||
return equal
|
||||
|
||||
|
||||
def manage_state(module, aws):
|
||||
def manage_state(module, batch_client):
|
||||
changed = False
|
||||
current_state = 'absent'
|
||||
state = module.params['state']
|
||||
|
@ -388,7 +380,7 @@ def manage_state(module, aws):
|
|||
check_mode = module.check_mode
|
||||
|
||||
# check if the job definition exists
|
||||
current_job_definition = get_current_job_definition(module, aws)
|
||||
current_job_definition = get_current_job_definition(module, batch_client)
|
||||
if current_job_definition:
|
||||
current_state = 'present'
|
||||
|
||||
|
@ -396,21 +388,21 @@ def manage_state(module, aws):
|
|||
if current_state == 'present':
|
||||
# check if definition has changed and register a new version if necessary
|
||||
if not job_definition_equal(module, current_job_definition):
|
||||
create_job_definition(module, aws)
|
||||
create_job_definition(module, batch_client)
|
||||
action_taken = 'updated with new version'
|
||||
changed = True
|
||||
else:
|
||||
# Create Job definition
|
||||
changed = create_job_definition(module, aws)
|
||||
changed = create_job_definition(module, batch_client)
|
||||
action_taken = 'added'
|
||||
|
||||
response = get_current_job_definition(module, aws)
|
||||
response = get_current_job_definition(module, batch_client)
|
||||
if not response:
|
||||
module.fail_json(msg='Unable to get job definition information after creating/updating')
|
||||
else:
|
||||
if current_state == 'present':
|
||||
# remove the Job definition
|
||||
changed = remove_job_definition(module, aws)
|
||||
changed = remove_job_definition(module, batch_client)
|
||||
action_taken = 'deregistered'
|
||||
return dict(changed=changed, batch_job_definition_action=action_taken, response=response)
|
||||
|
||||
|
@ -428,45 +420,37 @@ def main():
|
|||
:return dict: ansible facts
|
||||
"""
|
||||
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
state=dict(required=False, default='present', choices=['present', 'absent']),
|
||||
job_definition_name=dict(required=True),
|
||||
job_definition_arn=dict(),
|
||||
type=dict(required=True),
|
||||
parameters=dict(type='dict'),
|
||||
image=dict(required=True),
|
||||
vcpus=dict(type='int', required=True),
|
||||
memory=dict(type='int', required=True),
|
||||
command=dict(type='list', default=[]),
|
||||
job_role_arn=dict(),
|
||||
volumes=dict(type='list', default=[]),
|
||||
environment=dict(type='list', default=[]),
|
||||
mount_points=dict(type='list', default=[]),
|
||||
readonly_root_filesystem=dict(),
|
||||
privileged=dict(),
|
||||
ulimits=dict(type='list', default=[]),
|
||||
user=dict(),
|
||||
attempts=dict(type='int'),
|
||||
region=dict(aliases=['aws_region', 'ec2_region'])
|
||||
)
|
||||
argument_spec = dict(
|
||||
state=dict(required=False, default='present', choices=['present', 'absent']),
|
||||
job_definition_name=dict(required=True),
|
||||
job_definition_arn=dict(),
|
||||
type=dict(required=True),
|
||||
parameters=dict(type='dict'),
|
||||
image=dict(required=True),
|
||||
vcpus=dict(type='int', required=True),
|
||||
memory=dict(type='int', required=True),
|
||||
command=dict(type='list', default=[]),
|
||||
job_role_arn=dict(),
|
||||
volumes=dict(type='list', default=[]),
|
||||
environment=dict(type='list', default=[]),
|
||||
mount_points=dict(type='list', default=[]),
|
||||
readonly_root_filesystem=dict(),
|
||||
privileged=dict(),
|
||||
ulimits=dict(type='list', default=[]),
|
||||
user=dict(),
|
||||
attempts=dict(type='int')
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
module = AnsibleAWSModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
# validate dependencies
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 is required for this module.')
|
||||
batch_client = module.client('batch')
|
||||
|
||||
aws = AWSConnection(module, ['batch'])
|
||||
validate_params(module, batch_client)
|
||||
|
||||
validate_params(module, aws)
|
||||
|
||||
results = manage_state(module, aws)
|
||||
results = manage_state(module, batch_client)
|
||||
|
||||
module.exit_json(**camel_dict_to_snake_dict(results))
|
||||
|
||||
|
|
|
@ -114,19 +114,14 @@ output:
|
|||
type: dict
|
||||
'''
|
||||
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.aws.batch import AWSConnection, cc, set_api_params
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import ec2_argument_spec, get_aws_connection_info, boto3_conn, HAS_BOTO3
|
||||
from ansible.module_utils.aws.batch import set_api_params
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
|
||||
|
||||
import re
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError, ParamValidationError, MissingParametersError
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
except ImportError:
|
||||
pass # Handled by HAS_BOTO3
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
# ---------------------------------------------------------------------------------------------------
|
||||
#
|
||||
|
@ -135,13 +130,11 @@ except ImportError:
|
|||
# ---------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def validate_params(module, aws):
|
||||
def validate_params(module):
|
||||
"""
|
||||
Performs basic parameter validation.
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:return:
|
||||
"""
|
||||
return
|
||||
|
||||
|
@ -152,9 +145,9 @@ def validate_params(module, aws):
|
|||
#
|
||||
# ---------------------------------------------------------------------------------------------------
|
||||
|
||||
def get_current_job_queue(module, connection):
|
||||
def get_current_job_queue(module, client):
|
||||
try:
|
||||
environments = connection.client().describe_job_queues(
|
||||
environments = client.describe_job_queues(
|
||||
jobQueues=[module.params['job_queue_name']]
|
||||
)
|
||||
return environments['jobQueues'][0] if len(environments['jobQueues']) > 0 else None
|
||||
|
@ -162,16 +155,15 @@ def get_current_job_queue(module, connection):
|
|||
return None
|
||||
|
||||
|
||||
def create_job_queue(module, aws):
|
||||
def create_job_queue(module, client):
|
||||
"""
|
||||
Adds a Batch job queue
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:param client:
|
||||
:return:
|
||||
"""
|
||||
|
||||
client = aws.client('batch')
|
||||
changed = False
|
||||
|
||||
# set API parameters
|
||||
|
@ -187,9 +179,8 @@ def create_job_queue(module, aws):
|
|||
if not module.check_mode:
|
||||
client.create_job_queue(**api_params)
|
||||
changed = True
|
||||
except (ClientError, ParamValidationError, MissingParametersError) as e:
|
||||
module.fail_json(msg='Error creating compute environment: {0}'.format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg='Error creating compute environment')
|
||||
|
||||
return changed
|
||||
|
||||
|
@ -201,16 +192,15 @@ def get_compute_environment_order_list(module):
|
|||
return compute_environment_order_list
|
||||
|
||||
|
||||
def remove_job_queue(module, aws):
|
||||
def remove_job_queue(module, client):
|
||||
"""
|
||||
Remove a Batch job queue
|
||||
|
||||
:param module:
|
||||
:param aws:
|
||||
:param client:
|
||||
:return:
|
||||
"""
|
||||
|
||||
client = aws.client('batch')
|
||||
changed = False
|
||||
|
||||
# set API parameters
|
||||
|
@ -220,13 +210,12 @@ def remove_job_queue(module, aws):
|
|||
if not module.check_mode:
|
||||
client.delete_job_queue(**api_params)
|
||||
changed = True
|
||||
except (ClientError, ParamValidationError, MissingParametersError) as e:
|
||||
module.fail_json(msg='Error removing job queue: {0}'.format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg='Error removing job queue')
|
||||
return changed
|
||||
|
||||
|
||||
def manage_state(module, aws):
|
||||
def manage_state(module, client):
|
||||
changed = False
|
||||
current_state = 'absent'
|
||||
state = module.params['state']
|
||||
|
@ -239,7 +228,7 @@ def manage_state(module, aws):
|
|||
check_mode = module.check_mode
|
||||
|
||||
# check if the job queue exists
|
||||
current_job_queue = get_current_job_queue(module, aws)
|
||||
current_job_queue = get_current_job_queue(module, client)
|
||||
if current_job_queue:
|
||||
current_state = 'present'
|
||||
|
||||
|
@ -265,26 +254,25 @@ def manage_state(module, aws):
|
|||
if updates:
|
||||
try:
|
||||
if not check_mode:
|
||||
aws.client().update_job_queue(**job_kwargs)
|
||||
client.update_job_queue(**job_kwargs)
|
||||
changed = True
|
||||
action_taken = "updated"
|
||||
except (ParamValidationError, ClientError) as e:
|
||||
module.fail_json(msg="Unable to update job queue: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg="Unable to update job queue")
|
||||
|
||||
else:
|
||||
# Create Job Queue
|
||||
changed = create_job_queue(module, aws)
|
||||
changed = create_job_queue(module, client)
|
||||
action_taken = 'added'
|
||||
|
||||
# Describe job queue
|
||||
response = get_current_job_queue(module, aws)
|
||||
response = get_current_job_queue(module, client)
|
||||
if not response:
|
||||
module.fail_json(msg='Unable to get job queue information after creating/updating')
|
||||
else:
|
||||
if current_state == 'present':
|
||||
# remove the Job Queue
|
||||
changed = remove_job_queue(module, aws)
|
||||
changed = remove_job_queue(module, client)
|
||||
action_taken = 'deleted'
|
||||
return dict(changed=changed, batch_job_queue_action=action_taken, response=response)
|
||||
|
||||
|
@ -302,32 +290,24 @@ def main():
|
|||
:return dict: changed, batch_job_queue_action, response
|
||||
"""
|
||||
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
state=dict(required=False, default='present', choices=['present', 'absent']),
|
||||
job_queue_name=dict(required=True),
|
||||
job_queue_state=dict(required=False, default='ENABLED', choices=['ENABLED', 'DISABLED']),
|
||||
priority=dict(type='int', required=True),
|
||||
compute_environment_order=dict(type='list', required=True),
|
||||
region=dict(aliases=['aws_region', 'ec2_region'])
|
||||
)
|
||||
argument_spec = dict(
|
||||
state=dict(required=False, default='present', choices=['present', 'absent']),
|
||||
job_queue_name=dict(required=True),
|
||||
job_queue_state=dict(required=False, default='ENABLED', choices=['ENABLED', 'DISABLED']),
|
||||
priority=dict(type='int', required=True),
|
||||
compute_environment_order=dict(type='list', required=True),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
module = AnsibleAWSModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
# validate dependencies
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 is required for this module.')
|
||||
client = module.client('batch')
|
||||
|
||||
aws = AWSConnection(module, ['batch'])
|
||||
validate_params(module)
|
||||
|
||||
validate_params(module, aws)
|
||||
|
||||
results = manage_state(module, aws)
|
||||
results = manage_state(module, client)
|
||||
|
||||
module.exit_json(**camel_dict_to_snake_dict(results))
|
||||
|
||||
|
|
|
@ -59,43 +59,25 @@ regions:
|
|||
}]"
|
||||
'''
|
||||
|
||||
|
||||
import traceback
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import get_aws_connection_info, ec2_argument_spec, boto3_conn
|
||||
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, camel_dict_to_snake_dict, HAS_BOTO3
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import AWSRetry, ansible_dict_to_boto3_filter_list, camel_dict_to_snake_dict
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError, BotoCoreError
|
||||
except ImportError:
|
||||
pass # will be detected by imported HAS_BOTO3
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
filters=dict(default={}, type='dict')
|
||||
)
|
||||
argument_spec = dict(
|
||||
filters=dict(default={}, type='dict')
|
||||
)
|
||||
|
||||
module = AnsibleModule(argument_spec=argument_spec)
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec)
|
||||
if module._name == 'aws_region_facts':
|
||||
module.deprecate("The 'aws_region_facts' module has been renamed to 'aws_region_info'", version='2.13')
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 required for this module')
|
||||
|
||||
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
|
||||
connection = boto3_conn(
|
||||
module,
|
||||
conn_type='client',
|
||||
resource='ec2',
|
||||
region=region,
|
||||
endpoint=ec2_url,
|
||||
**aws_connect_params
|
||||
)
|
||||
connection = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff())
|
||||
|
||||
# Replace filter key underscores with dashes, for compatibility
|
||||
sanitized_filters = dict((k.replace('_', '-'), v) for k, v in module.params.get('filters').items())
|
||||
|
@ -104,13 +86,8 @@ def main():
|
|||
regions = connection.describe_regions(
|
||||
Filters=ansible_dict_to_boto3_filter_list(sanitized_filters)
|
||||
)
|
||||
except ClientError as e:
|
||||
module.fail_json(msg="Unable to describe regions: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc(),
|
||||
**camel_dict_to_snake_dict(e.response))
|
||||
except BotoCoreError as e:
|
||||
module.fail_json(msg="Unable to describe regions: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg="Unable to describe regions.")
|
||||
|
||||
module.exit_json(regions=[camel_dict_to_snake_dict(r) for r in regions['Regions']])
|
||||
|
||||
|
|
|
@ -99,15 +99,10 @@ rules:
|
|||
try:
|
||||
from botocore.exceptions import ClientError, BotoCoreError
|
||||
except Exception:
|
||||
# handled by HAS_BOTO3 check in main
|
||||
pass
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import (HAS_BOTO3, boto3_conn, ec2_argument_spec, get_aws_connection_info,
|
||||
camel_dict_to_snake_dict, snake_dict_to_camel_dict, compare_policies)
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import snake_dict_to_camel_dict, compare_policies
|
||||
|
||||
|
||||
def create_or_update_bucket_cors(connection, module):
|
||||
|
@ -129,17 +124,8 @@ def create_or_update_bucket_cors(connection, module):
|
|||
if changed:
|
||||
try:
|
||||
cors = connection.put_bucket_cors(Bucket=name, CORSConfiguration={'CORSRules': new_camel_rules})
|
||||
except ClientError as e:
|
||||
module.fail_json(
|
||||
msg="Unable to update CORS for bucket {0}: {1}".format(name, to_native(e)),
|
||||
exception=traceback.format_exc(),
|
||||
**camel_dict_to_snake_dict(e.response)
|
||||
)
|
||||
except BotoCoreError as e:
|
||||
module.fail_json(
|
||||
msg=to_native(e),
|
||||
exception=traceback.format_exc()
|
||||
)
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg="Unable to update CORS for bucket {0}".format(name))
|
||||
|
||||
module.exit_json(changed=changed, name=name, rules=rules)
|
||||
|
||||
|
@ -152,40 +138,23 @@ def destroy_bucket_cors(connection, module):
|
|||
try:
|
||||
cors = connection.delete_bucket_cors(Bucket=name)
|
||||
changed = True
|
||||
except ClientError as e:
|
||||
module.fail_json(
|
||||
msg="Unable to delete CORS for bucket {0}: {1}".format(name, to_native(e)),
|
||||
exception=traceback.format_exc(),
|
||||
**camel_dict_to_snake_dict(e.response)
|
||||
)
|
||||
except BotoCoreError as e:
|
||||
module.fail_json(
|
||||
msg=to_native(e),
|
||||
exception=traceback.format_exc()
|
||||
)
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg="Unable to delete CORS for bucket {0}".format(name))
|
||||
|
||||
module.exit_json(changed=changed)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
name=dict(required=True, type='str'),
|
||||
rules=dict(type='list'),
|
||||
state=dict(type='str', choices=['present', 'absent'], required=True)
|
||||
)
|
||||
argument_spec = dict(
|
||||
name=dict(required=True, type='str'),
|
||||
rules=dict(type='list'),
|
||||
state=dict(type='str', choices=['present', 'absent'], required=True)
|
||||
)
|
||||
|
||||
module = AnsibleModule(argument_spec=argument_spec)
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec)
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 is required.')
|
||||
|
||||
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
|
||||
client = boto3_conn(module, conn_type='client', resource='s3',
|
||||
region=region, endpoint=ec2_url, **aws_connect_kwargs)
|
||||
client = module.client('s3')
|
||||
|
||||
state = module.params.get("state")
|
||||
|
||||
|
|
|
@ -262,33 +262,29 @@ trail:
|
|||
sample: {'environment': 'dev', 'Name': 'default'}
|
||||
'''
|
||||
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError
|
||||
from botocore.exceptions import ClientError, BotoCoreError
|
||||
except ImportError:
|
||||
# Handled in main() by imported HAS_BOTO3
|
||||
pass
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import (boto3_conn, ec2_argument_spec, get_aws_connection_info,
|
||||
HAS_BOTO3, ansible_dict_to_boto3_tag_list,
|
||||
boto3_tag_list_to_ansible_dict, camel_dict_to_snake_dict)
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import (camel_dict_to_snake_dict,
|
||||
ansible_dict_to_boto3_tag_list, boto3_tag_list_to_ansible_dict)
|
||||
|
||||
|
||||
def create_trail(module, client, ct_params):
|
||||
"""
|
||||
Creates a CloudTrail
|
||||
|
||||
module : AnsibleModule object
|
||||
module : AnsibleAWSModule object
|
||||
client : boto3 client connection object
|
||||
ct_params : The parameters for the Trail to create
|
||||
"""
|
||||
resp = {}
|
||||
try:
|
||||
resp = client.create_trail(**ct_params)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to create Trail")
|
||||
|
||||
return resp
|
||||
|
||||
|
@ -297,7 +293,7 @@ def tag_trail(module, client, tags, trail_arn, curr_tags=None, dry_run=False):
|
|||
"""
|
||||
Creates, updates, removes tags on a CloudTrail resource
|
||||
|
||||
module : AnsibleModule object
|
||||
module : AnsibleAWSModule object
|
||||
client : boto3 client connection object
|
||||
tags : Dict of tags converted from ansible_dict to boto3 list of dicts
|
||||
trail_arn : The ARN of the CloudTrail to operate on
|
||||
|
@ -331,16 +327,16 @@ def tag_trail(module, client, tags, trail_arn, curr_tags=None, dry_run=False):
|
|||
if not dry_run:
|
||||
try:
|
||||
client.remove_tags(ResourceId=trail_arn, TagsList=removes + updates)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to remove tags from Trail")
|
||||
|
||||
if updates or adds:
|
||||
changed = True
|
||||
if not dry_run:
|
||||
try:
|
||||
client.add_tags(ResourceId=trail_arn, TagsList=updates + adds)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to add tags to Trail")
|
||||
|
||||
return changed
|
||||
|
||||
|
@ -362,7 +358,7 @@ def set_logging(module, client, name, action):
|
|||
"""
|
||||
Starts or stops logging based on given state
|
||||
|
||||
module : AnsibleModule object
|
||||
module : AnsibleAWSModule object
|
||||
client : boto3 client connection object
|
||||
name : The name or ARN of the CloudTrail to operate on
|
||||
action : start or stop
|
||||
|
@ -371,14 +367,14 @@ def set_logging(module, client, name, action):
|
|||
try:
|
||||
client.start_logging(Name=name)
|
||||
return client.get_trail_status(Name=name)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to start logging")
|
||||
elif action == 'stop':
|
||||
try:
|
||||
client.stop_logging(Name=name)
|
||||
return client.get_trail_status(Name=name)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to stop logging")
|
||||
else:
|
||||
module.fail_json(msg="Unsupported logging action")
|
||||
|
||||
|
@ -387,15 +383,15 @@ def get_trail_facts(module, client, name):
|
|||
"""
|
||||
Describes existing trail in an account
|
||||
|
||||
module : AnsibleModule object
|
||||
module : AnsibleAWSModule object
|
||||
client : boto3 client connection object
|
||||
name : Name of the trail
|
||||
"""
|
||||
# get Trail info
|
||||
try:
|
||||
trail_resp = client.describe_trails(trailNameList=[name])
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to describe Trail")
|
||||
|
||||
# Now check to see if our trail exists and get status and tags
|
||||
if len(trail_resp['trailList']):
|
||||
|
@ -403,8 +399,8 @@ def get_trail_facts(module, client, name):
|
|||
try:
|
||||
status_resp = client.get_trail_status(Name=trail['Name'])
|
||||
tags_list = client.list_tags(ResourceIdList=[trail['TrailARN']])
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to describe Trail")
|
||||
|
||||
trail['IsLogging'] = status_resp['IsLogging']
|
||||
trail['tags'] = boto3_tag_list_to_ansible_dict(tags_list['ResourceTagList'][0]['TagsList'])
|
||||
|
@ -423,33 +419,32 @@ def delete_trail(module, client, trail_arn):
|
|||
"""
|
||||
Delete a CloudTrail
|
||||
|
||||
module : AnsibleModule object
|
||||
module : AnsibleAWSModule object
|
||||
client : boto3 client connection object
|
||||
trail_arn : Full CloudTrail ARN
|
||||
"""
|
||||
try:
|
||||
client.delete_trail(Name=trail_arn)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to delete Trail")
|
||||
|
||||
|
||||
def update_trail(module, client, ct_params):
|
||||
"""
|
||||
Delete a CloudTrail
|
||||
|
||||
module : AnsibleModule object
|
||||
module : AnsibleAWSModule object
|
||||
client : boto3 client connection object
|
||||
ct_params : The parameters for the Trail to update
|
||||
"""
|
||||
try:
|
||||
client.update_trail(**ct_params)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to update Trail")
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
argument_spec = dict(
|
||||
state=dict(default='present', choices=['present', 'absent', 'enabled', 'disabled']),
|
||||
name=dict(default='default'),
|
||||
enable_logging=dict(default=True, type='bool'),
|
||||
|
@ -463,15 +458,12 @@ def main():
|
|||
cloudwatch_logs_log_group_arn=dict(),
|
||||
kms_key_id=dict(),
|
||||
tags=dict(default={}, type='dict'),
|
||||
))
|
||||
)
|
||||
|
||||
required_if = [('state', 'present', ['s3_bucket_name']), ('state', 'enabled', ['s3_bucket_name'])]
|
||||
required_together = [('cloudwatch_logs_role_arn', 'cloudwatch_logs_log_group_arn')]
|
||||
|
||||
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_together=required_together, required_if=required_if)
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 is required for this module')
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True, required_together=required_together, required_if=required_if)
|
||||
|
||||
# collect parameters
|
||||
if module.params['state'] in ('present', 'enabled'):
|
||||
|
@ -505,11 +497,8 @@ def main():
|
|||
if module.params['kms_key_id']:
|
||||
ct_params['KmsKeyId'] = module.params['kms_key_id']
|
||||
|
||||
try:
|
||||
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
|
||||
client = boto3_conn(module, conn_type='client', resource='cloudtrail', region=region, endpoint=ec2_url, **aws_connect_params)
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
client = module.client('cloudtrail')
|
||||
region = module.region
|
||||
|
||||
results = dict(
|
||||
changed=False,
|
||||
|
@ -589,8 +578,8 @@ def main():
|
|||
# Get the trail status
|
||||
try:
|
||||
status_resp = client.get_trail_status(Name=created_trail['Name'])
|
||||
except ClientError as err:
|
||||
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
except (BotoCoreError, ClientError) as err:
|
||||
module.fail_json_aws(err, msg="Failed to fetch Trail statuc")
|
||||
# Set the logging state for the trail to desired value
|
||||
if enable_logging and not status_resp['IsLogging']:
|
||||
set_logging(module, client, name=ct_params['Name'], action='start')
|
||||
|
@ -603,9 +592,9 @@ def main():
|
|||
if module.check_mode:
|
||||
acct_id = '123456789012'
|
||||
try:
|
||||
sts_client = boto3_conn(module, conn_type='client', resource='sts', region=region, endpoint=ec2_url, **aws_connect_params)
|
||||
sts_client = module.client('sts')
|
||||
acct_id = sts_client.get_caller_identity()['Account']
|
||||
except ClientError:
|
||||
except (BotoCoreError, ClientError):
|
||||
pass
|
||||
trail = dict()
|
||||
trail.update(ct_params)
|
||||
|
|
|
@ -223,13 +223,12 @@ termination_policies:
|
|||
import re
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
except ImportError:
|
||||
pass # caught by imported HAS_BOTO3
|
||||
pass # caught by AnsibleAWSModule
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import (get_aws_connection_info, boto3_conn, ec2_argument_spec,
|
||||
camel_dict_to_snake_dict, HAS_BOTO3)
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
|
||||
|
||||
|
||||
def match_asg_tags(tags_to_match, asg):
|
||||
|
@ -335,14 +334,14 @@ def find_asgs(conn, module, name=None, tags=None):
|
|||
try:
|
||||
asgs_paginator = conn.get_paginator('describe_auto_scaling_groups')
|
||||
asgs = asgs_paginator.paginate().build_full_result()
|
||||
except ClientError as e:
|
||||
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg='Failed to describe AutoScalingGroups')
|
||||
|
||||
if not asgs:
|
||||
return asgs
|
||||
|
||||
try:
|
||||
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
|
||||
elbv2 = boto3_conn(module, conn_type='client', resource='elbv2', region=region, endpoint=ec2_url, **aws_connect_kwargs)
|
||||
elbv2 = module.client('elbv2')
|
||||
except ClientError as e:
|
||||
# This is nice to have, not essential
|
||||
elbv2 = None
|
||||
|
@ -381,6 +380,10 @@ def find_asgs(conn, module, name=None, tags=None):
|
|||
except ClientError as e:
|
||||
if e.response['Error']['Code'] == 'TargetGroupNotFound':
|
||||
asg['target_group_names'] = []
|
||||
else:
|
||||
module.fail_json_aws(e, msg="Failed to describe Target Groups")
|
||||
except BotoCoreError as e:
|
||||
module.fail_json_aws(e, msg="Failed to describe Target Groups")
|
||||
else:
|
||||
asg['target_group_names'] = []
|
||||
matched_asgs.append(asg)
|
||||
|
@ -390,28 +393,18 @@ def find_asgs(conn, module, name=None, tags=None):
|
|||
|
||||
def main():
|
||||
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
name=dict(type='str'),
|
||||
tags=dict(type='dict'),
|
||||
)
|
||||
argument_spec = dict(
|
||||
name=dict(type='str'),
|
||||
tags=dict(type='dict'),
|
||||
)
|
||||
module = AnsibleModule(argument_spec=argument_spec)
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec)
|
||||
if module._name == 'ec2_asg_facts':
|
||||
module.deprecate("The 'ec2_asg_facts' module has been renamed to 'ec2_asg_info'", version='2.13')
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 required for this module')
|
||||
|
||||
asg_name = module.params.get('name')
|
||||
asg_tags = module.params.get('tags')
|
||||
|
||||
try:
|
||||
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
|
||||
autoscaling = boto3_conn(module, conn_type='client', resource='autoscaling', region=region, endpoint=ec2_url, **aws_connect_kwargs)
|
||||
except ClientError as e:
|
||||
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
|
||||
autoscaling = module.client('autoscaling')
|
||||
|
||||
results = find_asgs(autoscaling, module, name=asg_name, tags=asg_tags)
|
||||
module.exit_json(results=results)
|
||||
|
|
|
@ -95,48 +95,25 @@ security_groups:
|
|||
sample:
|
||||
'''
|
||||
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
except ImportError:
|
||||
pass # caught by imported HAS_BOTO3
|
||||
pass # caught by AnsibleAWSModule
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import (ec2_argument_spec, boto3_conn, HAS_BOTO3, get_aws_connection_info,
|
||||
boto3_tag_list_to_ansible_dict, ansible_dict_to_boto3_filter_list,
|
||||
camel_dict_to_snake_dict)
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import (boto3_tag_list_to_ansible_dict, ansible_dict_to_boto3_filter_list, camel_dict_to_snake_dict)
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
filters=dict(default={}, type='dict')
|
||||
)
|
||||
argument_spec = dict(
|
||||
filters=dict(default={}, type='dict')
|
||||
)
|
||||
|
||||
module = AnsibleModule(argument_spec=argument_spec,
|
||||
supports_check_mode=True)
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)
|
||||
if module._name == 'ec2_group_facts':
|
||||
module.deprecate("The 'ec2_group_facts' module has been renamed to 'ec2_group_info'", version='2.13')
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 required for this module')
|
||||
|
||||
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
|
||||
|
||||
if region:
|
||||
connection = boto3_conn(
|
||||
module,
|
||||
conn_type='client',
|
||||
resource='ec2',
|
||||
region=region,
|
||||
endpoint=ec2_url,
|
||||
**aws_connect_params
|
||||
)
|
||||
else:
|
||||
module.fail_json(msg="region must be specified")
|
||||
connection = module.client('ec2')
|
||||
|
||||
# Replace filter key underscores with dashes, for compatibility, except if we're dealing with tags
|
||||
sanitized_filters = module.params.get("filters")
|
||||
|
@ -148,8 +125,8 @@ def main():
|
|||
security_groups = connection.describe_security_groups(
|
||||
Filters=ansible_dict_to_boto3_filter_list(sanitized_filters)
|
||||
)
|
||||
except ClientError as e:
|
||||
module.fail_json(msg=e.message, exception=traceback.format_exc())
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg='Failed to describe security groups')
|
||||
|
||||
snaked_security_groups = []
|
||||
for security_group in security_groups['SecurityGroups']:
|
||||
|
|
|
@ -181,12 +181,10 @@ import traceback
|
|||
try:
|
||||
from botocore.exceptions import ClientError
|
||||
except ImportError:
|
||||
pass # Taken care of by ec2.HAS_BOTO3
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import (HAS_BOTO3, boto3_conn, boto_exception, ec2_argument_spec,
|
||||
get_aws_connection_info, compare_policies,
|
||||
sort_json_policy_dict)
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import boto_exception, compare_policies, sort_json_policy_dict
|
||||
from ansible.module_utils.six import string_types
|
||||
|
||||
|
||||
|
@ -205,15 +203,8 @@ def build_kwargs(registry_id):
|
|||
|
||||
class EcsEcr:
|
||||
def __init__(self, module):
|
||||
region, ec2_url, aws_connect_kwargs = \
|
||||
get_aws_connection_info(module, boto3=True)
|
||||
|
||||
self.ecr = boto3_conn(module, conn_type='client',
|
||||
resource='ecr', region=region,
|
||||
endpoint=ec2_url, **aws_connect_kwargs)
|
||||
self.sts = boto3_conn(module, conn_type='client',
|
||||
resource='sts', region=region,
|
||||
endpoint=ec2_url, **aws_connect_kwargs)
|
||||
self.ecr = module.client('ecr')
|
||||
self.sts = module.client('sts')
|
||||
self.check_mode = module.check_mode
|
||||
self.changed = False
|
||||
self.skipped = False
|
||||
|
@ -507,8 +498,7 @@ def run(ecr, params):
|
|||
|
||||
|
||||
def main():
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
argument_spec = dict(
|
||||
name=dict(required=True),
|
||||
registry_id=dict(required=False),
|
||||
state=dict(required=False, choices=['present', 'absent'],
|
||||
|
@ -517,23 +507,16 @@ def main():
|
|||
policy=dict(required=False, type='json'),
|
||||
image_tag_mutability=dict(required=False, choices=['mutable', 'immutable'],
|
||||
default='mutable'),
|
||||
purge_policy=dict(required=False, type='bool', aliases=['delete_policy']),
|
||||
purge_policy=dict(required=False, type='bool', aliases=['delete_policy'],
|
||||
deprecated_aliases=[dict(name='delete_policy', version='2.14')]),
|
||||
lifecycle_policy=dict(required=False, type='json'),
|
||||
purge_lifecycle_policy=dict(required=False, type='bool')))
|
||||
purge_lifecycle_policy=dict(required=False, type='bool')
|
||||
)
|
||||
mutually_exclusive = [
|
||||
['policy', 'purge_policy'],
|
||||
['lifecycle_policy', 'purge_lifecycle_policy']]
|
||||
|
||||
module = AnsibleModule(argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
mutually_exclusive=[
|
||||
['policy', 'purge_policy'],
|
||||
['lifecycle_policy', 'purge_lifecycle_policy']])
|
||||
if module.params.get('delete_policy'):
|
||||
module.deprecate(
|
||||
'The alias "delete_policy" has been deprecated and will be removed, '
|
||||
'use "purge_policy" instead',
|
||||
version='2.14')
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 required for this module')
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True, mutually_exclusive=mutually_exclusive)
|
||||
|
||||
ecr = EcsEcr(module)
|
||||
passed, result = run(ecr, module.params)
|
||||
|
|
|
@ -237,18 +237,15 @@ tags:
|
|||
|
||||
from time import sleep
|
||||
from time import time as timestamp
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from botocore.exceptions import ClientError, BotoCoreError
|
||||
except ImportError as e:
|
||||
pass # Taken care of by ec2.HAS_BOTO3
|
||||
pass # Handled by AnsibleAWSModule
|
||||
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import (HAS_BOTO3, boto3_conn, camel_dict_to_snake_dict,
|
||||
ec2_argument_spec, get_aws_connection_info, ansible_dict_to_boto3_tag_list,
|
||||
compare_aws_tags, boto3_tag_list_to_ansible_dict)
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import (compare_aws_tags, camel_dict_to_snake_dict,
|
||||
ansible_dict_to_boto3_tag_list, boto3_tag_list_to_ansible_dict)
|
||||
|
||||
|
||||
def _index_by_key(key, items):
|
||||
|
@ -264,10 +261,9 @@ class EFSConnection(object):
|
|||
STATE_DELETING = 'deleting'
|
||||
STATE_DELETED = 'deleted'
|
||||
|
||||
def __init__(self, module, region, **aws_connect_params):
|
||||
self.connection = boto3_conn(module, conn_type='client',
|
||||
resource='efs', region=region,
|
||||
**aws_connect_params)
|
||||
def __init__(self, module):
|
||||
self.connection = module.client('efs')
|
||||
region = module.region
|
||||
|
||||
self.module = module
|
||||
self.region = region
|
||||
|
@ -441,12 +437,8 @@ class EFSConnection(object):
|
|||
try:
|
||||
self.connection.create_file_system(**params)
|
||||
changed = True
|
||||
except ClientError as e:
|
||||
self.module.fail_json(msg="Unable to create file system: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
|
||||
except BotoCoreError as e:
|
||||
self.module.fail_json(msg="Unable to create file system: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (ClientError, BotoCoreError) as e:
|
||||
self.module.fail_json_aws(e, msg="Unable to create file system.")
|
||||
|
||||
# we always wait for the state to be available when creating.
|
||||
# if we try to take any actions on the file system before it's available
|
||||
|
@ -483,12 +475,8 @@ class EFSConnection(object):
|
|||
try:
|
||||
self.connection.update_file_system(FileSystemId=fs_id, **params)
|
||||
changed = True
|
||||
except ClientError as e:
|
||||
self.module.fail_json(msg="Unable to update file system: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
|
||||
except BotoCoreError as e:
|
||||
self.module.fail_json(msg="Unable to update file system: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (ClientError, BotoCoreError) as e:
|
||||
self.module.fail_json_aws(e, msg="Unable to update file system.")
|
||||
return changed
|
||||
|
||||
def converge_file_system(self, name, tags, purge_tags, targets, throughput_mode, provisioned_throughput_in_mibps):
|
||||
|
@ -507,12 +495,8 @@ class EFSConnection(object):
|
|||
FileSystemId=fs_id,
|
||||
TagKeys=tags_to_delete
|
||||
)
|
||||
except ClientError as e:
|
||||
self.module.fail_json(msg="Unable to delete tags: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
|
||||
except BotoCoreError as e:
|
||||
self.module.fail_json(msg="Unable to delete tags: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (ClientError, BotoCoreError) as e:
|
||||
self.module.fail_json_aws(e, msg="Unable to delete tags.")
|
||||
|
||||
result = True
|
||||
|
||||
|
@ -522,12 +506,8 @@ class EFSConnection(object):
|
|||
FileSystemId=fs_id,
|
||||
Tags=ansible_dict_to_boto3_tag_list(tags_need_modify)
|
||||
)
|
||||
except ClientError as e:
|
||||
self.module.fail_json(msg="Unable to create tags: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
|
||||
except BotoCoreError as e:
|
||||
self.module.fail_json(msg="Unable to create tags: {0}".format(to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
except (ClientError, BotoCoreError) as e:
|
||||
self.module.fail_json_aws(e, msg="Unable to create tags.")
|
||||
|
||||
result = True
|
||||
|
||||
|
@ -710,8 +690,7 @@ def main():
|
|||
"""
|
||||
Module action handler
|
||||
"""
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
argument_spec = dict(
|
||||
encrypt=dict(required=False, type="bool", default=False),
|
||||
state=dict(required=False, type='str', choices=["present", "absent"], default="present"),
|
||||
kms_key_id=dict(required=False, type='str', default=None),
|
||||
|
@ -725,14 +704,11 @@ def main():
|
|||
provisioned_throughput_in_mibps=dict(required=False, type='float'),
|
||||
wait=dict(required=False, type="bool", default=False),
|
||||
wait_timeout=dict(required=False, type="int", default=0)
|
||||
))
|
||||
)
|
||||
|
||||
module = AnsibleModule(argument_spec=argument_spec)
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 required for this module')
|
||||
module = AnsibleAWSModule(argument_spec=argument_spec)
|
||||
|
||||
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
|
||||
connection = EFSConnection(module, region, **aws_connect_params)
|
||||
connection = EFSConnection(module)
|
||||
|
||||
name = module.params.get('name')
|
||||
fs_id = module.params.get('id')
|
||||
|
|
|
@ -280,16 +280,14 @@ iam_roles:
|
|||
'''
|
||||
|
||||
import re
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from botocore.exception import ClientError
|
||||
from botocore.exception import BotoCoreError, ClientError
|
||||
except ImportError:
|
||||
pass # will be picked up from imported HAS_BOTO3
|
||||
pass # caught by AnsibleAWSModule
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.ec2 import ec2_argument_spec, boto3_conn, get_aws_connection_info
|
||||
from ansible.module_utils.ec2 import HAS_BOTO3, camel_dict_to_snake_dict
|
||||
from ansible.module_utils.aws.core import AnsibleAWSModule
|
||||
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
|
||||
|
||||
|
||||
def match_tags(tags_to_match, cluster):
|
||||
|
@ -306,8 +304,8 @@ def find_clusters(conn, module, identifier=None, tags=None):
|
|||
try:
|
||||
cluster_paginator = conn.get_paginator('describe_clusters')
|
||||
clusters = cluster_paginator.paginate().build_full_result()
|
||||
except ClientError as e:
|
||||
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
module.fail_json_aws(e, msg='Failed to fetch clusters.')
|
||||
|
||||
matched_clusters = []
|
||||
|
||||
|
@ -332,14 +330,11 @@ def find_clusters(conn, module, identifier=None, tags=None):
|
|||
|
||||
def main():
|
||||
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(
|
||||
dict(
|
||||
cluster_identifier=dict(type='str', aliases=['identifier', 'name']),
|
||||
tags=dict(type='dict')
|
||||
)
|
||||
argument_spec = dict(
|
||||
cluster_identifier=dict(type='str', aliases=['identifier', 'name']),
|
||||
tags=dict(type='dict')
|
||||
)
|
||||
module = AnsibleModule(
|
||||
module = AnsibleAWSModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
@ -349,14 +344,7 @@ def main():
|
|||
cluster_identifier = module.params.get('cluster_identifier')
|
||||
cluster_tags = module.params.get('tags')
|
||||
|
||||
if not HAS_BOTO3:
|
||||
module.fail_json(msg='boto3 required for this module')
|
||||
|
||||
try:
|
||||
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
|
||||
redshift = boto3_conn(module, conn_type='client', resource='redshift', region=region, endpoint=ec2_url, **aws_connect_kwargs)
|
||||
except ClientError as e:
|
||||
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
|
||||
redshift = module.client('redshift')
|
||||
|
||||
results = find_clusters(redshift, module, identifier=cluster_identifier, tags=cluster_tags)
|
||||
module.exit_json(results=results)
|
||||
|
|
Loading…
Reference in a new issue