Compare commits
54 commits
devel
...
release1.5
Author | SHA1 | Date | |
---|---|---|---|
|
f1f63829ef | ||
|
12b12bba92 | ||
|
998793fd0a | ||
|
157b783521 | ||
|
fc02883017 | ||
|
2cf0dadb47 | ||
|
13692311e8 | ||
|
824d42081c | ||
|
c96c3c483c | ||
|
48a1c156ee | ||
|
f641cb1665 | ||
|
3157dc5620 | ||
|
f0a89d5bdf | ||
|
763235a83d | ||
|
b489406507 | ||
|
4c3781899e | ||
|
1728734d69 | ||
|
ac34b67bf4 | ||
|
053a5fb46f | ||
|
64d69a88f7 | ||
|
c78a5681fc | ||
|
b5ce0526bc | ||
|
e6b05da55b | ||
|
6738737518 | ||
|
1be68b4821 | ||
|
d2036cedcc | ||
|
4392246b47 | ||
|
626f9629e2 | ||
|
80df6e4197 | ||
|
a401f9579d | ||
|
fbf36040f4 | ||
|
5c568bb00c | ||
|
baeabd9180 | ||
|
a28ff5f830 | ||
|
cb40013d15 | ||
|
f4a20feb4d | ||
|
02b2e532db | ||
|
eef4096e8f | ||
|
453bedcf87 | ||
|
dbfcf0325e | ||
|
72c7841c57 | ||
|
42ca3d8076 | ||
|
b0771f3cd2 | ||
|
521e135cb3 | ||
|
d808604a51 | ||
|
e509191b76 | ||
|
de753fe99c | ||
|
a9762357f9 | ||
|
8588ac5f60 | ||
|
3bb3ce4467 | ||
|
9d4589d574 | ||
|
1a470b1776 | ||
|
4ec3f3c1d5 | ||
|
7ad9598fe8 |
64 changed files with 597 additions and 254 deletions
CHANGELOG.mdRELEASES.txtVERSION
docs/man/man1
docsite/rst
lib/ansible
library
cloud
database
monitoring
net_infrastructure
network
notification
packaging
aptapt_keyapt_repositorygemmacportsopkgpacmanpkginpkgutilportinstallredhat_subscriptionrpm_keyswdepoturpmi
source_control
system
packaging
test/integration
14
CHANGELOG.md
14
CHANGELOG.md
|
@ -1,6 +1,20 @@
|
|||
Ansible Changes By Release
|
||||
==========================
|
||||
|
||||
## 1.5.4 "Love Walks In" - April 1, 2014
|
||||
|
||||
- Security fix for safe_eval, which further hardens the checking of the evaluation function.
|
||||
- Changing order of variable precendence for system facts, to ensure that inventory variables take precedence over any facts that may be set on a host.
|
||||
|
||||
## 1.5.3 "Love Walks In" - March 13, 2014
|
||||
|
||||
- Fix validate_certs and run_command errors from previous release
|
||||
- Fixes to the git module related to host key checking
|
||||
|
||||
## 1.5.2 "Love Walks In" - March 11, 2014
|
||||
|
||||
- Fix module errors in airbrake and apt from previous release
|
||||
|
||||
## 1.5.1 "Love Walks In" - March 10, 2014
|
||||
|
||||
- Force command action to not be executed by the shell unless specifically enabled.
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
Ansible Releases at a Glance
|
||||
============================
|
||||
|
||||
1.6 "The Cradle Will Rock" - NEXT
|
||||
1.6 "The Cradle Will Rock" - NEXT
|
||||
1.5.4 "Love Walks In" -------- 04-01-2014
|
||||
1.5.3 "Love Walks In" -------- 03-13-2014
|
||||
1.5.2 "Love Walks In" -------- 03-11-2014
|
||||
1.5.1 "Love Walks In" -------- 03-10-2014
|
||||
1.5 "Love Walks In" -------- 02-28-2014
|
||||
1.4.5 "Could This Be Magic?" - 02-12-2014
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
1.5.1
|
||||
1.5.4
|
||||
|
|
|
@ -1,13 +1,22 @@
|
|||
'\" t
|
||||
.\" Title: ansible-doc
|
||||
.\" Author: [see the "AUTHOR" section]
|
||||
.\" Generator: DocBook XSL Stylesheets v1.75.2 <http://docbook.sf.net/>
|
||||
.\" Date: 11/27/2013
|
||||
.\" Author: :doctype:manpage
|
||||
.\" Generator: DocBook XSL Stylesheets v1.76.1 <http://docbook.sf.net/>
|
||||
.\" Date: 03/11/2014
|
||||
.\" Manual: System administration commands
|
||||
.\" Source: Ansible 1.4.1
|
||||
.\" Source: Ansible 1.5.2
|
||||
.\" Language: English
|
||||
.\"
|
||||
.TH "ANSIBLE\-DOC" "1" "11/27/2013" "Ansible 1\&.4\&.1" "System administration commands"
|
||||
.TH "ANSIBLE\-DOC" "1" "03/11/2014" "Ansible 1\&.5\&.2" "System administration commands"
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" * Define some portability stuff
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.\" http://bugs.debian.org/507673
|
||||
.\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html
|
||||
.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" * set default formatting
|
||||
.\" -----------------------------------------------------------------
|
||||
|
@ -54,4 +63,10 @@ Ansible is released under the terms of the GPLv3 License\&.
|
|||
.sp
|
||||
\fBansible\-playbook\fR(1), \fBansible\fR(1), \fBansible\-pull\fR(1)
|
||||
.sp
|
||||
Extensive documentation as well as IRC and mailing list info is available on the ansible home page: https://ansible\&.github\&.com/
|
||||
Extensive documentation is available in the documentation site: http://docs\&.ansible\&.com\&. IRC and mailing list info can be found in file CONTRIBUTING\&.md, available in: https://github\&.com/ansible/ansible
|
||||
.SH "AUTHOR"
|
||||
.PP
|
||||
\fB:doctype:manpage\fR
|
||||
.RS 4
|
||||
Author.
|
||||
.RE
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
'\" t
|
||||
.\" Title: ansible-playbook
|
||||
.\" Author: :doctype:manpage
|
||||
.\" Generator: DocBook XSL Stylesheets v1.78.1 <http://docbook.sf.net/>
|
||||
.\" Date: 02/12/2014
|
||||
.\" Generator: DocBook XSL Stylesheets v1.76.1 <http://docbook.sf.net/>
|
||||
.\" Date: 03/11/2014
|
||||
.\" Manual: System administration commands
|
||||
.\" Source: Ansible 1.5
|
||||
.\" Source: Ansible 1.5.2
|
||||
.\" Language: English
|
||||
.\"
|
||||
.TH "ANSIBLE\-PLAYBOOK" "1" "02/12/2014" "Ansible 1\&.5" "System administration commands"
|
||||
.TH "ANSIBLE\-PLAYBOOK" "1" "03/11/2014" "Ansible 1\&.5\&.2" "System administration commands"
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" * Define some portability stuff
|
||||
.\" -----------------------------------------------------------------
|
||||
|
|
|
@ -2,12 +2,21 @@
|
|||
.\" Title: ansible
|
||||
.\" Author: :doctype:manpage
|
||||
.\" Generator: DocBook XSL Stylesheets v1.76.1 <http://docbook.sf.net/>
|
||||
.\" Date: 01/02/2014
|
||||
.\" Date: 03/11/2014
|
||||
.\" Manual: System administration commands
|
||||
.\" Source: Ansible 1.5
|
||||
.\" Source: Ansible 1.5.2
|
||||
.\" Language: English
|
||||
.\"
|
||||
.TH "ANSIBLE" "1" "01/03/2014" "Ansible 1\&.5" "System administration commands"
|
||||
.TH "ANSIBLE" "1" "03/11/2014" "Ansible 1\&.5\&.2" "System administration commands"
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" * Define some portability stuff
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.\" http://bugs.debian.org/507673
|
||||
.\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html
|
||||
.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" * set default formatting
|
||||
.\" -----------------------------------------------------------------
|
||||
|
@ -94,7 +103,7 @@ Ansible is released under the terms of the GPLv3 License\&.
|
|||
.sp
|
||||
\fBansible\fR(1), \fBansible\-playbook\fR(1), \fBansible\-doc\fR(1)
|
||||
.sp
|
||||
Extensive documentation as well as IRC and mailing list info is available on the ansible home page: https://ansible\&.github\&.com/
|
||||
Extensive documentation is available in the documentation site: http://docs\&.ansible\&.com\&. IRC and mailing list info can be found in file CONTRIBUTING\&.md, available in: https://github\&.com/ansible/ansible
|
||||
.SH "AUTHOR"
|
||||
.PP
|
||||
\fB:doctype:manpage\fR
|
||||
|
|
|
@ -1,13 +1,22 @@
|
|||
'\" t
|
||||
.\" Title: ansible
|
||||
.\" Author: [see the "AUTHOR" section]
|
||||
.\" Generator: DocBook XSL Stylesheets v1.75.2 <http://docbook.sf.net/>
|
||||
.\" Date: 11/27/2013
|
||||
.\" Author: :doctype:manpage
|
||||
.\" Generator: DocBook XSL Stylesheets v1.76.1 <http://docbook.sf.net/>
|
||||
.\" Date: 03/11/2014
|
||||
.\" Manual: System administration commands
|
||||
.\" Source: Ansible 1.4.1
|
||||
.\" Source: Ansible 1.5.2
|
||||
.\" Language: English
|
||||
.\"
|
||||
.TH "ANSIBLE" "1" "11/27/2013" "Ansible 1\&.4\&.1" "System administration commands"
|
||||
.TH "ANSIBLE" "1" "03/11/2014" "Ansible 1\&.5\&.2" "System administration commands"
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" * Define some portability stuff
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.\" http://bugs.debian.org/507673
|
||||
.\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html
|
||||
.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.\" -----------------------------------------------------------------
|
||||
.\" * set default formatting
|
||||
.\" -----------------------------------------------------------------
|
||||
|
@ -25,7 +34,7 @@ ansible \- run a command somewhere else
|
|||
ansible <host\-pattern> [\-f forks] [\-m module_name] [\-a args]
|
||||
.SH "DESCRIPTION"
|
||||
.sp
|
||||
\fBAnsible\fR is an extra\-simple tool/framework/API for doing \'remote things\' over SSH\&.
|
||||
\fBAnsible\fR is an extra\-simple tool/framework/API for doing \*(Aqremote things\*(Aq over SSH\&.
|
||||
.SH "ARGUMENTS"
|
||||
.PP
|
||||
\fBhost\-pattern\fR
|
||||
|
@ -73,7 +82,7 @@ search path to load modules from\&. The default is
|
|||
\fI/usr/share/ansible\fR\&. This can also be set with the ANSIBLE_LIBRARY environment variable\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-a\fR \'\fIARGUMENTS\fR\', \fB\-\-args=\fR\'\fIARGUMENTS\fR\'
|
||||
\fB\-a\fR \*(Aq\fIARGUMENTS\fR\*(Aq, \fB\-\-args=\fR\*(Aq\fIARGUMENTS\fR\*(Aq
|
||||
.RS 4
|
||||
The
|
||||
\fIARGUMENTS\fR
|
||||
|
@ -165,7 +174,7 @@ Further limits hosts with a regex pattern\&.
|
|||
.sp
|
||||
Ansible stores the hosts it can potentially operate on in an inventory file\&. The syntax is one host per line\&. Groups headers are allowed and are included on their own line, enclosed in square brackets that start the line\&.
|
||||
.sp
|
||||
Ranges of hosts are also supported\&. For more information and additional options, see the documentation on http://ansible\&.github\&.com/\&.
|
||||
Ranges of hosts are also supported\&. For more information and additional options, see the documentation on http://docs\&.ansible\&.com/\&.
|
||||
.SH "FILES"
|
||||
.sp
|
||||
/etc/ansible/hosts \(em Default inventory file
|
||||
|
@ -196,4 +205,10 @@ Ansible is released under the terms of the GPLv3 License\&.
|
|||
.sp
|
||||
\fBansible\-playbook\fR(1), \fBansible\-pull\fR(1), \fBansible\-doc\fR(1)
|
||||
.sp
|
||||
Extensive documentation as well as IRC and mailing list info is available on the ansible home page: https://ansible\&.github\&.com/
|
||||
Extensive documentation is available in the documentation site: http://docs\&.ansible\&.com\&. IRC and mailing list info can be found in file CONTRIBUTING\&.md, available in: https://github\&.com/ansible/ansible
|
||||
.SH "AUTHOR"
|
||||
.PP
|
||||
\fB:doctype:manpage\fR
|
||||
.RS 4
|
||||
Author.
|
||||
.RE
|
||||
|
|
|
@ -837,8 +837,11 @@ If multiple variables of the same name are defined in different places, they win
|
|||
* -e variables always win
|
||||
* then comes "most everything else"
|
||||
* then comes variables defined in inventory
|
||||
* then comes facts discovered about a system
|
||||
* then "role defaults", which are the most "defaulty" and lose in priority to everything.
|
||||
|
||||
.. note:: In versions prior to 1.5.4, facts discovered about a system were in the "most everything else" category above.
|
||||
|
||||
That seems a little theoretical. Let's show some examples and where you would choose to put what based on the kind of
|
||||
control you might want over values.
|
||||
|
||||
|
|
|
@ -14,5 +14,5 @@
|
|||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
__version__ = '1.5.1'
|
||||
__version__ = '1.5.4'
|
||||
__author__ = 'Michael DeHaan'
|
||||
|
|
|
@ -1017,6 +1017,10 @@ class AnsibleModule(object):
|
|||
msg = "Argument 'args' to run_command must be list or string"
|
||||
self.fail_json(rc=257, cmd=args, msg=msg)
|
||||
|
||||
# expand things like $HOME and ~
|
||||
if not shell:
|
||||
args = [ os.path.expandvars(os.path.expanduser(x)) for x in args ]
|
||||
|
||||
rc = 0
|
||||
msg = None
|
||||
st_in = None
|
||||
|
@ -1040,16 +1044,20 @@ class AnsibleModule(object):
|
|||
|
||||
if path_prefix:
|
||||
kwargs['env'] = env
|
||||
if cwd:
|
||||
if cwd and os.path.isdir(cwd):
|
||||
kwargs['cwd'] = cwd
|
||||
|
||||
|
||||
try:
|
||||
# make sure we're in the right working directory
|
||||
if cwd and os.path.isdir(cwd):
|
||||
os.chdir(cwd)
|
||||
|
||||
cmd = subprocess.Popen(args, **kwargs)
|
||||
|
||||
if data:
|
||||
if not binary_data:
|
||||
data += '\\n'
|
||||
data += '\n'
|
||||
out, err = cmd.communicate(input=data)
|
||||
rc = cmd.returncode
|
||||
except (OSError, IOError), e:
|
||||
|
@ -1061,6 +1069,12 @@ class AnsibleModule(object):
|
|||
self.fail_json(cmd=args, rc=rc, stdout=out, stderr=err, msg=msg)
|
||||
return (rc, out, err)
|
||||
|
||||
def append_to_file(self, filename, str):
|
||||
filename = os.path.expandvars(os.path.expanduser(filename))
|
||||
fh = open(filename, 'a')
|
||||
fh.write(str)
|
||||
fh.close()
|
||||
|
||||
def pretty_bytes(self,size):
|
||||
ranges = (
|
||||
(1<<70L, 'ZB'),
|
||||
|
|
|
@ -26,6 +26,9 @@
|
|||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import hmac
|
||||
HASHED_KEY_MAGIC = "|1|"
|
||||
|
||||
def add_git_host_key(module, url, accept_hostkey=True):
|
||||
|
||||
""" idempotently add a git url hostkey """
|
||||
|
@ -58,28 +61,56 @@ def get_fqdn(repo_url):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
def check_hostkey(module, fqdn):
|
||||
return not not_in_host_file(module, fqdn)
|
||||
|
||||
""" use ssh-keygen to check if key is known """
|
||||
# this is a variant of code found in connection_plugins/paramiko.py and we should modify
|
||||
# the paramiko code to import and use this.
|
||||
|
||||
result = False
|
||||
keygen_cmd = module.get_bin_path('ssh-keygen', True)
|
||||
this_cmd = keygen_cmd + " -H -F " + fqdn
|
||||
rc, out, err = module.run_command(this_cmd)
|
||||
def not_in_host_file(self, host):
|
||||
|
||||
if rc == 0 and out != "":
|
||||
result = True
|
||||
|
||||
if 'USER' in os.environ:
|
||||
user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts")
|
||||
else:
|
||||
# Check the main system location
|
||||
this_cmd = keygen_cmd + " -H -f /etc/ssh/ssh_known_hosts -F " + fqdn
|
||||
rc, out, err = module.run_command(this_cmd)
|
||||
user_host_file = "~/.ssh/known_hosts"
|
||||
user_host_file = os.path.expanduser(user_host_file)
|
||||
|
||||
if rc == 0:
|
||||
if out != "":
|
||||
result = True
|
||||
host_file_list = []
|
||||
host_file_list.append(user_host_file)
|
||||
host_file_list.append("/etc/ssh/ssh_known_hosts")
|
||||
host_file_list.append("/etc/ssh/ssh_known_hosts2")
|
||||
|
||||
hfiles_not_found = 0
|
||||
for hf in host_file_list:
|
||||
if not os.path.exists(hf):
|
||||
hfiles_not_found += 1
|
||||
continue
|
||||
host_fh = open(hf)
|
||||
data = host_fh.read()
|
||||
host_fh.close()
|
||||
for line in data.split("\n"):
|
||||
if line is None or line.find(" ") == -1:
|
||||
continue
|
||||
tokens = line.split()
|
||||
if tokens[0].find(HASHED_KEY_MAGIC) == 0:
|
||||
# this is a hashed known host entry
|
||||
try:
|
||||
(kn_salt,kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2)
|
||||
hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
|
||||
hash.update(host)
|
||||
if hash.digest() == kn_host.decode('base64'):
|
||||
return False
|
||||
except:
|
||||
# invalid hashed host key, skip it
|
||||
continue
|
||||
else:
|
||||
# standard host file entry
|
||||
if host in tokens[0]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return result
|
||||
|
||||
def add_host_key(module, fqdn, key_type="rsa"):
|
||||
|
||||
|
@ -91,8 +122,10 @@ def add_host_key(module, fqdn, key_type="rsa"):
|
|||
if not os.path.exists(os.path.expanduser("~/.ssh/")):
|
||||
module.fail_json(msg="%s does not exist" % os.path.expanduser("~/.ssh/"))
|
||||
|
||||
this_cmd = "%s -t %s %s >> ~/.ssh/known_hosts" % (keyscan_cmd, key_type, fqdn)
|
||||
this_cmd = "%s -t %s %s" % (keyscan_cmd, key_type, fqdn)
|
||||
|
||||
rc, out, err = module.run_command(this_cmd)
|
||||
module.append_to_file("~/.ssh/known_hosts", out)
|
||||
|
||||
return rc, out, err
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@ try:
|
|||
except:
|
||||
HAS_SSL=False
|
||||
|
||||
import tempfile
|
||||
|
||||
class RequestWithMethod(urllib2.Request):
|
||||
'''
|
||||
|
@ -77,59 +78,74 @@ class SSLValidationHandler(urllib2.BaseHandler):
|
|||
http://techknack.net/python-urllib2-handlers/
|
||||
'''
|
||||
|
||||
def __init__(self, module, hostname, port, ca_cert=None):
|
||||
def __init__(self, module, hostname, port):
|
||||
self.module = module
|
||||
self.hostname = hostname
|
||||
self.port = port
|
||||
self.ca_cert = ca_cert
|
||||
|
||||
def get_ca_cert(self):
|
||||
def get_ca_certs(self):
|
||||
# tries to find a valid CA cert in one of the
|
||||
# standard locations for the current distribution
|
||||
|
||||
if self.ca_cert and os.path.exists(self.ca_cert):
|
||||
# the user provided a custom CA cert (ie. one they
|
||||
# uploaded themselves), so use it
|
||||
return self.ca_cert
|
||||
|
||||
ca_cert = None
|
||||
ca_certs = []
|
||||
paths_checked = []
|
||||
platform = get_platform()
|
||||
distribution = get_distribution()
|
||||
if platform == 'Linux':
|
||||
if distribution in ('Fedora',):
|
||||
ca_cert = '/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem'
|
||||
elif distribution in ('RHEL','CentOS','ScientificLinux'):
|
||||
ca_cert = '/etc/pki/tls/certs/ca-bundle.crt'
|
||||
elif distribution in ('Ubuntu','Debian'):
|
||||
ca_cert = '/usr/share/ca-certificates/cacert.org/cacert.org.crt'
|
||||
elif platform == 'FreeBSD':
|
||||
ca_cert = '/usr/local/share/certs/ca-root.crt'
|
||||
elif platform == 'OpenBSD':
|
||||
ca_cert = '/etc/ssl/cert.pem'
|
||||
elif platform == 'NetBSD':
|
||||
ca_cert = '/etc/openssl/certs/ca-cert.pem'
|
||||
elif platform == 'SunOS':
|
||||
# FIXME?
|
||||
pass
|
||||
elif platform == 'AIX':
|
||||
# FIXME?
|
||||
pass
|
||||
|
||||
if ca_cert and os.path.exists(ca_cert):
|
||||
return ca_cert
|
||||
elif os.path.exists('/etc/ansible/ca-cert.pem'):
|
||||
# fall back to a user-deployed cert in a standard
|
||||
# location if the OS platform one is not available
|
||||
return '/etc/ansible/ca-cert.pem'
|
||||
else:
|
||||
# CA cert isn't available, no validation
|
||||
return None
|
||||
# build a list of paths to check for .crt/.pem files
|
||||
# based on the platform type
|
||||
paths_checked.append('/etc/ssl/certs')
|
||||
if platform == 'Linux':
|
||||
paths_checked.append('/etc/pki/ca-trust/extracted/pem')
|
||||
paths_checked.append('/etc/pki/tls/certs')
|
||||
paths_checked.append('/usr/share/ca-certificates/cacert.org')
|
||||
elif platform == 'FreeBSD':
|
||||
paths_checked.append('/usr/local/share/certs')
|
||||
elif platform == 'OpenBSD':
|
||||
paths_checked.append('/etc/ssl')
|
||||
elif platform == 'NetBSD':
|
||||
ca_certs.append('/etc/openssl/certs')
|
||||
|
||||
# fall back to a user-deployed cert in a standard
|
||||
# location if the OS platform one is not available
|
||||
paths_checked.append('/etc/ansible')
|
||||
|
||||
tmp_fd, tmp_path = tempfile.mkstemp()
|
||||
|
||||
# for all of the paths, find any .crt or .pem files
|
||||
# and compile them into single temp file for use
|
||||
# in the ssl check to speed up the test
|
||||
for path in paths_checked:
|
||||
if os.path.exists(path) and os.path.isdir(path):
|
||||
dir_contents = os.listdir(path)
|
||||
for f in dir_contents:
|
||||
full_path = os.path.join(path, f)
|
||||
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'):
|
||||
try:
|
||||
cert_file = open(full_path, 'r')
|
||||
os.write(tmp_fd, cert_file.read())
|
||||
cert_file.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
return (tmp_path, paths_checked)
|
||||
|
||||
def http_request(self, req):
|
||||
tmp_ca_cert_path, paths_checked = self.get_ca_certs()
|
||||
try:
|
||||
server_cert = ssl.get_server_certificate((self.hostname, self.port), ca_certs=self.get_ca_cert())
|
||||
server_cert = ssl.get_server_certificate((self.hostname, self.port), ca_certs=tmp_ca_cert_path)
|
||||
except ssl.SSLError:
|
||||
self.module.fail_json(msg='failed to validate the SSL certificate for %s:%s. You can use validate_certs=no, however this is unsafe and not recommended' % (self.hostname, self.port))
|
||||
# fail if we tried all of the certs but none worked
|
||||
self.module.fail_json(msg='Failed to validate the SSL certificate for %s:%s. ' % (self.hostname, self.port) + \
|
||||
'Use validate_certs=no or make sure your managed systems have a valid CA certificate installed. ' + \
|
||||
'Paths checked for this platform: %s' % ", ".join(paths_checked))
|
||||
try:
|
||||
# cleanup the temp file created, don't worry
|
||||
# if it fails for some reason
|
||||
os.remove(tmp_ca_cert_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
return req
|
||||
|
||||
https_request = http_request
|
||||
|
@ -150,7 +166,7 @@ def url_argument_spec():
|
|||
|
||||
|
||||
def fetch_url(module, url, data=None, headers=None, method=None,
|
||||
use_proxy=False, validate_certs=True, force=False, last_mod_time=None, timeout=10):
|
||||
use_proxy=False, force=False, last_mod_time=None, timeout=10):
|
||||
'''
|
||||
Fetches a file from an HTTP/FTP server using urllib2
|
||||
'''
|
||||
|
@ -166,6 +182,9 @@ def fetch_url(module, url, data=None, headers=None, method=None,
|
|||
handlers = []
|
||||
info = dict(url=url)
|
||||
|
||||
# Get validate_certs from the module params
|
||||
validate_certs = module.params.get('validate_certs', True)
|
||||
|
||||
parsed = urlparse.urlparse(url)
|
||||
if parsed[0] == 'https':
|
||||
if not HAS_SSL and validate_certs:
|
||||
|
|
|
@ -29,7 +29,11 @@ from play import Play
|
|||
import StringIO
|
||||
import pipes
|
||||
|
||||
# the setup cache stores all variables about a host
|
||||
# gathered during the setup step, while the vars cache
|
||||
# holds all other variables about a host
|
||||
SETUP_CACHE = collections.defaultdict(dict)
|
||||
VARS_CACHE = collections.defaultdict(dict)
|
||||
|
||||
class PlayBook(object):
|
||||
'''
|
||||
|
@ -95,6 +99,7 @@ class PlayBook(object):
|
|||
"""
|
||||
|
||||
self.SETUP_CACHE = SETUP_CACHE
|
||||
self.VARS_CACHE = VARS_CACHE
|
||||
|
||||
arguments = []
|
||||
if playbook is None:
|
||||
|
@ -335,6 +340,7 @@ class PlayBook(object):
|
|||
default_vars=task.default_vars,
|
||||
private_key_file=self.private_key_file,
|
||||
setup_cache=self.SETUP_CACHE,
|
||||
vars_cache=self.VARS_CACHE,
|
||||
basedir=task.play.basedir,
|
||||
conditional=task.when,
|
||||
callbacks=self.runner_callbacks,
|
||||
|
@ -426,8 +432,6 @@ class PlayBook(object):
|
|||
else:
|
||||
facts = result.get('ansible_facts', {})
|
||||
self.SETUP_CACHE[host].update(facts)
|
||||
# extra vars need to always trump - so update again following the facts
|
||||
self.SETUP_CACHE[host].update(self.extra_vars)
|
||||
if task.register:
|
||||
if 'stdout' in result and 'stdout_lines' not in result:
|
||||
result['stdout_lines'] = result['stdout'].splitlines()
|
||||
|
@ -500,6 +504,7 @@ class PlayBook(object):
|
|||
remote_port=play.remote_port,
|
||||
private_key_file=self.private_key_file,
|
||||
setup_cache=self.SETUP_CACHE,
|
||||
vars_cache=self.VARS_CACHE,
|
||||
callbacks=self.runner_callbacks,
|
||||
sudo=play.sudo,
|
||||
sudo_user=play.sudo_user,
|
||||
|
|
|
@ -700,7 +700,7 @@ class Play(object):
|
|||
if host is not None:
|
||||
inject = {}
|
||||
inject.update(self.playbook.inventory.get_variables(host, vault_password=vault_password))
|
||||
inject.update(self.playbook.SETUP_CACHE[host])
|
||||
inject.update(self.playbook.VARS_CACHE[host])
|
||||
|
||||
for filename in self.vars_files:
|
||||
|
||||
|
@ -724,8 +724,9 @@ class Play(object):
|
|||
if host is not None:
|
||||
if self._has_vars_in(filename2) and not self._has_vars_in(filename3):
|
||||
# this filename has variables in it that were fact specific
|
||||
# so it needs to be loaded into the per host SETUP_CACHE
|
||||
self.playbook.SETUP_CACHE[host].update(data)
|
||||
# so it needs to be loaded into the per host VARS_CACHE
|
||||
data = utils.combine_vars(inject, data)
|
||||
self.playbook.VARS_CACHE[host].update(data)
|
||||
self.playbook.callbacks.on_import_for_host(host, filename4)
|
||||
elif not self._has_vars_in(filename4):
|
||||
# found a non-host specific variable, load into vars and NOT
|
||||
|
@ -757,9 +758,14 @@ class Play(object):
|
|||
if host is not None and self._has_vars_in(filename2) and not self._has_vars_in(filename3):
|
||||
# running a host specific pass and has host specific variables
|
||||
# load into setup cache
|
||||
self.playbook.SETUP_CACHE[host] = utils.combine_vars(
|
||||
self.playbook.SETUP_CACHE[host], new_vars)
|
||||
new_vars = utils.combine_vars(inject, new_vars)
|
||||
self.playbook.VARS_CACHE[host] = utils.combine_vars(
|
||||
self.playbook.VARS_CACHE[host], new_vars)
|
||||
self.playbook.callbacks.on_import_for_host(host, filename4)
|
||||
elif host is None:
|
||||
# running a non-host specific pass and we can update the global vars instead
|
||||
self.vars = utils.combine_vars(self.vars, new_vars)
|
||||
|
||||
# finally, update the VARS_CACHE for the host, if it is set
|
||||
if host is not None:
|
||||
self.playbook.VARS_CACHE[host].update(self.playbook.extra_vars)
|
||||
|
|
|
@ -86,18 +86,18 @@ def _executor_hook(job_queue, result_queue, new_stdin):
|
|||
traceback.print_exc()
|
||||
|
||||
class HostVars(dict):
|
||||
''' A special view of setup_cache that adds values from the inventory when needed. '''
|
||||
''' A special view of vars_cache that adds values from the inventory when needed. '''
|
||||
|
||||
def __init__(self, setup_cache, inventory):
|
||||
self.setup_cache = setup_cache
|
||||
def __init__(self, vars_cache, inventory):
|
||||
self.vars_cache = vars_cache
|
||||
self.inventory = inventory
|
||||
self.lookup = dict()
|
||||
self.update(setup_cache)
|
||||
self.update(vars_cache)
|
||||
|
||||
def __getitem__(self, host):
|
||||
if host not in self.lookup:
|
||||
result = self.inventory.get_variables(host)
|
||||
result.update(self.setup_cache.get(host, {}))
|
||||
result.update(self.vars_cache.get(host, {}))
|
||||
self.lookup[host] = result
|
||||
return self.lookup[host]
|
||||
|
||||
|
@ -123,6 +123,7 @@ class Runner(object):
|
|||
background=0, # async poll every X seconds, else 0 for non-async
|
||||
basedir=None, # directory of playbook, if applicable
|
||||
setup_cache=None, # used to share fact data w/ other tasks
|
||||
vars_cache=None, # used to store variables about hosts
|
||||
transport=C.DEFAULT_TRANSPORT, # 'ssh', 'paramiko', 'local'
|
||||
conditional='True', # run only if this fact expression evals to true
|
||||
callbacks=None, # used for output
|
||||
|
@ -160,6 +161,7 @@ class Runner(object):
|
|||
self.check = check
|
||||
self.diff = diff
|
||||
self.setup_cache = utils.default(setup_cache, lambda: collections.defaultdict(dict))
|
||||
self.vars_cache = utils.default(vars_cache, lambda: collections.defaultdict(dict))
|
||||
self.basedir = utils.default(basedir, lambda: os.getcwd())
|
||||
self.callbacks = utils.default(callbacks, lambda: DefaultRunnerCallbacks())
|
||||
self.generated_jid = str(random.randint(0, 999999999999))
|
||||
|
@ -559,13 +561,19 @@ class Runner(object):
|
|||
# fireball, local, etc
|
||||
port = self.remote_port
|
||||
|
||||
module_vars = template.template(self.basedir, self.module_vars, host_variables)
|
||||
|
||||
# merge the VARS and SETUP caches for this host
|
||||
combined_cache = self.setup_cache.copy()
|
||||
combined_cache.get(host, {}).update(self.vars_cache.get(host, {}))
|
||||
|
||||
inject = {}
|
||||
inject = utils.combine_vars(inject, self.default_vars)
|
||||
inject = utils.combine_vars(inject, host_variables)
|
||||
inject = utils.combine_vars(inject, self.module_vars)
|
||||
inject = utils.combine_vars(inject, self.setup_cache[host])
|
||||
inject = utils.combine_vars(inject, module_vars)
|
||||
inject = utils.combine_vars(inject, combined_cache.get(host, {}))
|
||||
inject.setdefault('ansible_ssh_user', self.remote_user)
|
||||
inject['hostvars'] = HostVars(self.setup_cache, self.inventory)
|
||||
inject['hostvars'] = HostVars(combined_cache, self.inventory)
|
||||
inject['group_names'] = host_variables.get('group_names', [])
|
||||
inject['groups'] = self.inventory.groups_list()
|
||||
inject['vars'] = self.module_vars
|
||||
|
|
|
@ -29,6 +29,7 @@ from ansible.utils.plugins import *
|
|||
from ansible.utils import template
|
||||
from ansible.callbacks import display
|
||||
import ansible.constants as C
|
||||
import ast
|
||||
import time
|
||||
import StringIO
|
||||
import stat
|
||||
|
@ -945,51 +946,95 @@ def is_list_of_strings(items):
|
|||
return False
|
||||
return True
|
||||
|
||||
def safe_eval(str, locals=None, include_exceptions=False):
|
||||
def safe_eval(expr, locals={}, include_exceptions=False):
|
||||
'''
|
||||
this is intended for allowing things like:
|
||||
with_items: a_list_variable
|
||||
where Jinja2 would return a string
|
||||
but we do not want to allow it to call functions (outside of Jinja2, where
|
||||
the env is constrained)
|
||||
|
||||
Based on:
|
||||
http://stackoverflow.com/questions/12523516/using-ast-and-whitelists-to-make-pythons-eval-safe
|
||||
'''
|
||||
# FIXME: is there a more native way to do this?
|
||||
|
||||
def is_set(var):
|
||||
return not var.startswith("$") and not '{{' in var
|
||||
# this is the whitelist of AST nodes we are going to
|
||||
# allow in the evaluation. Any node type other than
|
||||
# those listed here will raise an exception in our custom
|
||||
# visitor class defined below.
|
||||
SAFE_NODES = set(
|
||||
(
|
||||
ast.Expression,
|
||||
ast.Compare,
|
||||
ast.Str,
|
||||
ast.List,
|
||||
ast.Tuple,
|
||||
ast.Dict,
|
||||
ast.Call,
|
||||
ast.Load,
|
||||
ast.BinOp,
|
||||
ast.UnaryOp,
|
||||
ast.Num,
|
||||
ast.Name,
|
||||
ast.Add,
|
||||
ast.Sub,
|
||||
ast.Mult,
|
||||
ast.Div,
|
||||
)
|
||||
)
|
||||
|
||||
def is_unset(var):
|
||||
return var.startswith("$") or '{{' in var
|
||||
# AST node types were expanded after 2.6
|
||||
if not sys.version.startswith('2.6'):
|
||||
SAFE_NODES.union(
|
||||
set(
|
||||
(ast.Set,)
|
||||
)
|
||||
)
|
||||
|
||||
# do not allow method calls to modules
|
||||
if not isinstance(str, basestring):
|
||||
# builtin functions that are not safe to call
|
||||
INVALID_CALLS = (
|
||||
'classmethod', 'compile', 'delattr', 'eval', 'execfile', 'file',
|
||||
'filter', 'help', 'input', 'object', 'open', 'raw_input', 'reduce',
|
||||
'reload', 'repr', 'setattr', 'staticmethod', 'super', 'type',
|
||||
)
|
||||
|
||||
class CleansingNodeVisitor(ast.NodeVisitor):
|
||||
def generic_visit(self, node):
|
||||
if type(node) not in SAFE_NODES:
|
||||
#raise Exception("invalid expression (%s) type=%s" % (expr, type(node)))
|
||||
raise Exception("invalid expression (%s)" % expr)
|
||||
super(CleansingNodeVisitor, self).generic_visit(node)
|
||||
def visit_Call(self, call):
|
||||
if call.func.id in INVALID_CALLS:
|
||||
raise Exception("invalid function: %s" % call.func.id)
|
||||
|
||||
if not isinstance(expr, basestring):
|
||||
# already templated to a datastructure, perhaps?
|
||||
if include_exceptions:
|
||||
return (str, None)
|
||||
return str
|
||||
if re.search(r'\w\.\w+\(', str):
|
||||
if include_exceptions:
|
||||
return (str, None)
|
||||
return str
|
||||
# do not allow imports
|
||||
if re.search(r'import \w+', str):
|
||||
if include_exceptions:
|
||||
return (str, None)
|
||||
return str
|
||||
return (expr, None)
|
||||
return expr
|
||||
|
||||
try:
|
||||
result = None
|
||||
if not locals:
|
||||
result = eval(str)
|
||||
else:
|
||||
result = eval(str, None, locals)
|
||||
parsed_tree = ast.parse(expr, mode='eval')
|
||||
cnv = CleansingNodeVisitor()
|
||||
cnv.visit(parsed_tree)
|
||||
compiled = compile(parsed_tree, expr, 'eval')
|
||||
result = eval(compiled, {}, locals)
|
||||
|
||||
if include_exceptions:
|
||||
return (result, None)
|
||||
else:
|
||||
return result
|
||||
except SyntaxError, e:
|
||||
# special handling for syntax errors, we just return
|
||||
# the expression string back as-is
|
||||
if include_exceptions:
|
||||
return (expr, None)
|
||||
return expr
|
||||
except Exception, e:
|
||||
if include_exceptions:
|
||||
return (str, e)
|
||||
return str
|
||||
return (expr, e)
|
||||
return expr
|
||||
|
||||
|
||||
def listify_lookup_plugin_terms(terms, basedir, inject):
|
||||
|
@ -1001,7 +1046,7 @@ def listify_lookup_plugin_terms(terms, basedir, inject):
|
|||
# with_items: {{ alist }}
|
||||
|
||||
stripped = terms.strip()
|
||||
if not (stripped.startswith('{') or stripped.startswith('[')) and not stripped.startswith("/"):
|
||||
if not (stripped.startswith('{') or stripped.startswith('[')) and not stripped.startswith("/") and not stripped.startswith('set(['):
|
||||
# if not already a list, get ready to evaluate with Jinja2
|
||||
# not sure why the "/" is in above code :)
|
||||
try:
|
||||
|
|
|
@ -30,7 +30,11 @@ from binascii import hexlify
|
|||
from binascii import unhexlify
|
||||
from ansible import constants as C
|
||||
|
||||
from Crypto.Hash import SHA256, HMAC
|
||||
try:
|
||||
from Crypto.Hash import SHA256, HMAC
|
||||
HAS_HASH = True
|
||||
except ImportError:
|
||||
HAS_HASH = False
|
||||
|
||||
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
|
||||
try:
|
||||
|
@ -178,7 +182,7 @@ class VaultEditor(object):
|
|||
def create_file(self):
|
||||
""" create a new encrypted file """
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
if os.path.isfile(self.filename):
|
||||
|
@ -195,7 +199,7 @@ class VaultEditor(object):
|
|||
|
||||
def decrypt_file(self):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
if not os.path.isfile(self.filename):
|
||||
|
@ -211,7 +215,7 @@ class VaultEditor(object):
|
|||
|
||||
def edit_file(self):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
# decrypt to tmpfile
|
||||
|
@ -241,7 +245,7 @@ class VaultEditor(object):
|
|||
|
||||
def encrypt_file(self):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
if not os.path.isfile(self.filename):
|
||||
|
@ -258,7 +262,7 @@ class VaultEditor(object):
|
|||
|
||||
def rekey_file(self, new_password):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
# decrypt
|
||||
|
@ -416,6 +420,11 @@ class VaultAES256(object):
|
|||
|
||||
# http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
|
||||
|
||||
def __init__(self):
|
||||
|
||||
if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
def gen_key_initctr(self, password, salt):
|
||||
# 16 for AES 128, 32 for AES256
|
||||
keylength = 32
|
||||
|
@ -428,8 +437,6 @@ class VaultAES256(object):
|
|||
# make two keys and one iv
|
||||
pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
|
||||
|
||||
if not HAS_PBKDF2:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
|
||||
count=10000, prf=pbkdf2_prf)
|
||||
|
@ -456,8 +463,6 @@ class VaultAES256(object):
|
|||
# 1) nbits (integer) - Length of the counter, in bits.
|
||||
# 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
|
||||
|
||||
if not HAS_COUNTER:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
ctr = Counter.new(128, initial_value=long(iv, 16))
|
||||
|
||||
# AES.new PARAMETERS
|
||||
|
@ -493,8 +498,6 @@ class VaultAES256(object):
|
|||
return None
|
||||
|
||||
# SET THE COUNTER AND THE CIPHER
|
||||
if not HAS_COUNTER:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
ctr = Counter.new(128, initial_value=long(iv, 16))
|
||||
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
|
||||
|
||||
|
|
|
@ -21,7 +21,15 @@ DOCUMENTATION = '''
|
|||
module: ec2_facts
|
||||
short_description: Gathers facts about remote hosts within ec2 (aws)
|
||||
version_added: "1.0"
|
||||
options: {}
|
||||
options:
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
description:
|
||||
- This module fetches data from the metadata servers in ec2 (aws).
|
||||
Eucalyptus cloud provides a similar service and this module should
|
||||
|
@ -70,9 +78,12 @@ class Ec2Metadata(object):
|
|||
self._prefix = 'ansible_ec2_%s'
|
||||
|
||||
def _fetch(self, url):
|
||||
self.module.fail_json(msg="url is %s" % url)
|
||||
(response, info) = fetch_url(self.module, url, force=True)
|
||||
return response.read()
|
||||
if response:
|
||||
data = response.read()
|
||||
else:
|
||||
data = None
|
||||
return data
|
||||
|
||||
def _mangle_fields(self, fields, uri, filter_patterns=['public-keys-0']):
|
||||
new_fields = {}
|
||||
|
|
|
@ -157,7 +157,7 @@ def commit(changes):
|
|||
time.sleep(500)
|
||||
|
||||
def main():
|
||||
argument_spec = ec2_argument_keys_spec()
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
command = dict(choices=['get', 'create', 'delete'], required=True),
|
||||
zone = dict(required=True),
|
||||
|
|
|
@ -249,7 +249,7 @@ def is_walrus(s3_url):
|
|||
return False
|
||||
|
||||
def main():
|
||||
argument_spec = ec2_argument_keys_spec()
|
||||
argument_spec = ec2_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
bucket = dict(required=True),
|
||||
object = dict(),
|
||||
|
|
|
@ -101,6 +101,7 @@ EXAMPLES = '''
|
|||
|
||||
import ConfigParser
|
||||
import os
|
||||
import pipes
|
||||
try:
|
||||
import MySQLdb
|
||||
except ImportError:
|
||||
|
@ -123,36 +124,36 @@ def db_delete(cursor, db):
|
|||
|
||||
def db_dump(module, host, user, password, db_name, target, port, socket=None):
|
||||
cmd = module.get_bin_path('mysqldump', True)
|
||||
cmd += " --quick --user=%s --password='%s'" %(user, password)
|
||||
cmd += " --quick --user=%s --password='%s'" % (pipes.quote(user), pipes.quote(password))
|
||||
if socket is not None:
|
||||
cmd += " --socket=%s" % socket
|
||||
cmd += " --socket=%s" % pipes.quote(socket)
|
||||
else:
|
||||
cmd += " --host=%s --port=%s" % (host, port)
|
||||
cmd += " %s" % db_name
|
||||
cmd += " --host=%s --port=%s" % (pipes.quote(host), pipes.quote(port))
|
||||
cmd += " %s" % pipes.quote(db_name)
|
||||
if os.path.splitext(target)[-1] == '.gz':
|
||||
cmd = cmd + ' | gzip > ' + target
|
||||
cmd = cmd + ' | gzip > ' + pipes.quote(target)
|
||||
elif os.path.splitext(target)[-1] == '.bz2':
|
||||
cmd = cmd + ' | bzip2 > ' + target
|
||||
cmd = cmd + ' | bzip2 > ' + pipes.quote(target)
|
||||
else:
|
||||
cmd += " > %s" % target
|
||||
rc, stdout, stderr = module.run_command(cmd)
|
||||
cmd += " > %s" % pipes.quote(target)
|
||||
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
|
||||
return rc, stdout, stderr
|
||||
|
||||
def db_import(module, host, user, password, db_name, target, port, socket=None):
|
||||
cmd = module.get_bin_path('mysql', True)
|
||||
cmd += " --user=%s --password='%s'" %(user, password)
|
||||
cmd += " --user=%s --password='%s'" % (pipes.quote(user), pipes.quote(password))
|
||||
if socket is not None:
|
||||
cmd += " --socket=%s" % socket
|
||||
cmd += " --socket=%s" % pipes.quote(socket)
|
||||
else:
|
||||
cmd += " --host=%s --port=%s" % (host, port)
|
||||
cmd += " -D %s" % db_name
|
||||
cmd += " --host=%s --port=%s" % (pipes.quote(host), pipes.quote(port))
|
||||
cmd += " -D %s" % pipes.quote(db_name)
|
||||
if os.path.splitext(target)[-1] == '.gz':
|
||||
cmd = 'gunzip < ' + target + ' | ' + cmd
|
||||
cmd = 'gunzip < ' + pipes.quote(target) + ' | ' + cmd
|
||||
elif os.path.splitext(target)[-1] == '.bz2':
|
||||
cmd = 'bunzip2 < ' + target + ' | ' + cmd
|
||||
cmd = 'bunzip2 < ' + pipes.quote(target) + ' | ' + cmd
|
||||
else:
|
||||
cmd += " < %s" % target
|
||||
rc, stdout, stderr = module.run_command(cmd)
|
||||
cmd += " < %s" % pipes.quote(target)
|
||||
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
|
||||
return rc, stdout, stderr
|
||||
|
||||
def db_create(cursor, db, encoding, collation):
|
||||
|
|
|
@ -73,6 +73,14 @@ options:
|
|||
default: None
|
||||
aliases: []
|
||||
choices: ['kv']
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
|
@ -97,7 +105,7 @@ except ImportError:
|
|||
|
||||
|
||||
def ring_check(module, riak_admin_bin):
|
||||
cmd = '%s ringready 2> /dev/null' % riak_admin_bin
|
||||
cmd = '%s ringready' % riak_admin_bin
|
||||
rc, out, err = module.run_command(cmd)
|
||||
if rc == 0 and 'TRUE All nodes agree on the ring' in out:
|
||||
return True
|
||||
|
@ -116,8 +124,8 @@ def main():
|
|||
wait_for_handoffs=dict(default=False, type='int'),
|
||||
wait_for_ring=dict(default=False, type='int'),
|
||||
wait_for_service=dict(
|
||||
required=False, default=None, choices=['kv'])
|
||||
)
|
||||
required=False, default=None, choices=['kv']),
|
||||
validate_certs = dict(default='yes', type='bool'))
|
||||
)
|
||||
|
||||
|
||||
|
@ -128,6 +136,7 @@ def main():
|
|||
wait_for_handoffs = module.params.get('wait_for_handoffs')
|
||||
wait_for_ring = module.params.get('wait_for_ring')
|
||||
wait_for_service = module.params.get('wait_for_service')
|
||||
validate_certs = module.params.get('validate_certs')
|
||||
|
||||
|
||||
#make sure riak commands are on the path
|
||||
|
@ -212,7 +221,7 @@ def main():
|
|||
if wait_for_handoffs:
|
||||
timeout = time.time() + wait_for_handoffs
|
||||
while True:
|
||||
cmd = '%s transfers 2> /dev/null' % riak_admin_bin
|
||||
cmd = '%s transfers' % riak_admin_bin
|
||||
rc, out, err = module.run_command(cmd)
|
||||
if 'No transfers active' in out:
|
||||
result['handoffs'] = 'No transfers active.'
|
||||
|
@ -222,7 +231,7 @@ def main():
|
|||
module.fail_json(msg='Timeout waiting for handoffs.')
|
||||
|
||||
if wait_for_service:
|
||||
cmd = '%s wait_for_service riak_%s %s' % ( riak_admin_bin, wait_for_service, node_name)
|
||||
cmd = [riak_admin_bin, 'wait_for_service', 'riak_%s' % wait_for_service, node_name ]
|
||||
rc, out, err = module.run_command(cmd)
|
||||
result['service'] = out
|
||||
|
||||
|
@ -241,5 +250,6 @@ def main():
|
|||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.module_utils.urls import *
|
||||
|
||||
main()
|
||||
|
|
|
@ -51,7 +51,7 @@ options:
|
|||
description:
|
||||
- Optional URL to submit the notification to. Use to send notifications to Airbrake-compliant tools like Errbit.
|
||||
required: false
|
||||
default: https://airbrake.io/deploys
|
||||
default: "https://airbrake.io/deploys"
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates for the target url will not be validated. This should only be used
|
||||
|
@ -84,7 +84,7 @@ def main():
|
|||
user=dict(required=False),
|
||||
repo=dict(required=False),
|
||||
revision=dict(required=False),
|
||||
url=dict(required=False, default='https://api.airbrake.io/deploys.txt')
|
||||
url=dict(required=False, default='https://api.airbrake.io/deploys.txt'),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
|
@ -115,7 +115,7 @@ def main():
|
|||
|
||||
# Send the data to airbrake
|
||||
data = urllib.urlencode(params)
|
||||
response, info = fetch_url(module, url, data=data, validate_certs=module.params['validate_certs'])
|
||||
response, info = fetch_url(module, url, data=data)
|
||||
if info['status'] == 200:
|
||||
module.exit_json(changed=True)
|
||||
else:
|
||||
|
|
|
@ -58,6 +58,14 @@ options:
|
|||
description:
|
||||
- Organizations boundary API KEY
|
||||
required: true
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
|
||||
notes:
|
||||
- This module does not yet support boundary tags.
|
||||
|
@ -221,6 +229,7 @@ def main():
|
|||
name=dict(required=False),
|
||||
apikey=dict(required=True),
|
||||
apiid=dict(required=True),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -54,6 +54,14 @@ options:
|
|||
description: ["An arbitrary string to use for aggregation."]
|
||||
required: false
|
||||
default: null
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
|
@ -89,7 +97,8 @@ def main():
|
|||
choices=['nagios', 'hudson', 'jenkins', 'user', 'my apps',
|
||||
'feed', 'chef', 'puppet', 'git', 'bitbucket', 'fabric',
|
||||
'capistrano']
|
||||
)
|
||||
),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ EXAMPLES = '''
|
|||
- monit: name=httpd state=started
|
||||
'''
|
||||
|
||||
import pipes
|
||||
|
||||
def main():
|
||||
arg_spec = dict(
|
||||
|
@ -67,7 +68,7 @@ def main():
|
|||
rc, out, err = module.run_command('%s reload' % MONIT)
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
|
||||
rc, out, err = module.run_command('%s summary | grep "Process \'%s\'"' % (MONIT, name))
|
||||
rc, out, err = module.run_command('%s summary | grep "Process \'%s\'"' % (MONIT, pipes.quote(name)), use_unsafe_shell=True)
|
||||
present = name in out
|
||||
|
||||
if not present and not state == 'present':
|
||||
|
@ -78,7 +79,7 @@ def main():
|
|||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
module.run_command('%s reload' % MONIT, check_rc=True)
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, name))
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, pipes.quote(name)), use_unsafe_shell=True)
|
||||
if name in out:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
else:
|
||||
|
@ -86,7 +87,7 @@ def main():
|
|||
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, name))
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, pipes.quote(name)), use_unsafe_shell=True)
|
||||
running = 'running' in out.lower()
|
||||
|
||||
if running and (state == 'started' or state == 'monitored'):
|
||||
|
@ -99,7 +100,7 @@ def main():
|
|||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
module.run_command('%s stop %s' % (MONIT, name))
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, name))
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, pipes.quote(name)), use_unsafe_shell=True)
|
||||
if 'not monitored' in out.lower() or 'stop pending' in out.lower():
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.fail_json(msg=out)
|
||||
|
@ -108,7 +109,8 @@ def main():
|
|||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
module.run_command('%s unmonitor %s' % (MONIT, name))
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, name))
|
||||
# FIXME: DRY FOLKS!
|
||||
rc, out, err = module.run_command('%s summary | grep %s' % (MONIT, pipes.quote(name)), use_unsafe_shell=True)
|
||||
if 'not monitored' in out.lower():
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.fail_json(msg=out)
|
||||
|
|
|
@ -63,6 +63,14 @@ options:
|
|||
description:
|
||||
- The environment for this deployment
|
||||
required: false
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
|
||||
# informational: requirements for nodes
|
||||
requirements: [ urllib, urllib2 ]
|
||||
|
@ -92,6 +100,7 @@ def main():
|
|||
user=dict(required=False),
|
||||
appname=dict(required=False),
|
||||
environment=dict(required=False),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
|
|
@ -60,6 +60,15 @@ options:
|
|||
default: Created by Ansible
|
||||
choices: []
|
||||
aliases: []
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
|
||||
notes:
|
||||
- This module does not yet have support to end maintenance windows.
|
||||
'''
|
||||
|
@ -135,7 +144,8 @@ def main():
|
|||
passwd=dict(required=True),
|
||||
service=dict(required=False),
|
||||
hours=dict(default='1', required=False),
|
||||
desc=dict(default='Created by Ansible', required=False)
|
||||
desc=dict(default='Created by Ansible', required=False),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -73,6 +73,15 @@ options:
|
|||
choices: [ 'present', 'absent' ]
|
||||
default: null
|
||||
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
|
||||
notes:
|
||||
- The DNS Made Easy service requires that machines interacting with the API have the proper time and timezone set. Be sure you are within a few seconds of actual time by using NTP.
|
||||
- This module returns record(s) in the "result" element when 'state' is set to 'present'. This value can be be registered and used in your playbooks.
|
||||
|
@ -120,7 +129,7 @@ class DME2:
|
|||
|
||||
self.api = apikey
|
||||
self.secret = secret
|
||||
self.baseurl = 'http://api.dnsmadeeasy.com/V2.0/'
|
||||
self.baseurl = 'https://api.dnsmadeeasy.com/V2.0/'
|
||||
self.domain = str(domain)
|
||||
self.domain_map = None # ["domain_name"] => ID
|
||||
self.record_map = None # ["record_name"] => ID
|
||||
|
@ -152,14 +161,14 @@ class DME2:
|
|||
if data and not isinstance(data, basestring):
|
||||
data = urllib.urlencode(data)
|
||||
|
||||
response, info = fetch_url(self.module, url, data=data, method=method)
|
||||
response, info = fetch_url(self.module, url, data=data, method=method, headers=self._headers())
|
||||
if info['status'] not in (200, 201, 204):
|
||||
self.module.fail_json(msg="%s returned %s, with body: %s" % (url, info['status'], info['msg']))
|
||||
|
||||
try:
|
||||
return json.load(response)
|
||||
except Exception, e:
|
||||
return False
|
||||
return {}
|
||||
|
||||
def getDomain(self, domain_id):
|
||||
if not self.domain_map:
|
||||
|
@ -239,6 +248,7 @@ def main():
|
|||
'A', 'AAAA', 'CNAME', 'HTTPRED', 'MX', 'NS', 'PTR', 'SRV', 'TXT']),
|
||||
record_value=dict(required=False),
|
||||
record_ttl=dict(required=False, default=1800, type='int'),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
),
|
||||
required_together=(
|
||||
['record_value', 'record_ttl', 'record_type']
|
||||
|
@ -258,7 +268,7 @@ def main():
|
|||
domain_records = DME.getRecords()
|
||||
if not domain_records:
|
||||
module.fail_json(
|
||||
msg="The %s domain name is not accessible with this api_key; try using its ID if known." % domain)
|
||||
msg="The requested domain name is not accessible with this api_key; try using its ID if known.")
|
||||
module.exit_json(changed=False, result=domain_records)
|
||||
|
||||
# Fetch existing record + Build new one
|
||||
|
|
|
@ -122,7 +122,7 @@ class netscaler(object):
|
|||
'Content-Type' : 'application/x-www-form-urlencoded',
|
||||
}
|
||||
|
||||
response, info = fetch_url(self.module, request_url, data=data_json, validate_certs=self.module.params['validate_certs'])
|
||||
response, info = fetch_url(self.module, request_url, data=data_json)
|
||||
|
||||
return json.load(response.read())
|
||||
|
||||
|
|
|
@ -124,14 +124,14 @@ def url_filename(url):
|
|||
return 'index.html'
|
||||
return fn
|
||||
|
||||
def url_get(module, url, dest, use_proxy, last_mod_time, force, validate_certs):
|
||||
def url_get(module, url, dest, use_proxy, last_mod_time, force):
|
||||
"""
|
||||
Download data from the url and store in a temporary file.
|
||||
|
||||
Return (tempfile, info about the request)
|
||||
"""
|
||||
|
||||
rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, validate_certs=validate_certs)
|
||||
rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time)
|
||||
|
||||
if info['status'] == 304:
|
||||
module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', ''))
|
||||
|
@ -192,7 +192,6 @@ def main():
|
|||
force = module.params['force']
|
||||
sha256sum = module.params['sha256sum']
|
||||
use_proxy = module.params['use_proxy']
|
||||
validate_certs = module.params['validate_certs']
|
||||
|
||||
dest_is_dir = os.path.isdir(dest)
|
||||
last_mod_time = None
|
||||
|
@ -207,7 +206,7 @@ def main():
|
|||
last_mod_time = datetime.datetime.utcfromtimestamp(mtime)
|
||||
|
||||
# download to tmpsrc
|
||||
tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, validate_certs)
|
||||
tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force)
|
||||
|
||||
# Now the request has completed, we can finally generate the final
|
||||
# destination file name from the info dict.
|
||||
|
|
|
@ -76,6 +76,14 @@ options:
|
|||
description:
|
||||
- (inbox only) Link associated with the message. This will be used to link the message subject in Team Inbox.
|
||||
required: false
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
|
||||
# informational: requirements for nodes
|
||||
requirements: [ urllib, urllib2 ]
|
||||
|
@ -116,6 +124,7 @@ def main():
|
|||
project=dict(required=False),
|
||||
tags=dict(required=False),
|
||||
link=dict(required=False),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
|
|
@ -31,6 +31,14 @@ options:
|
|||
description:
|
||||
- Icon for the service
|
||||
required: false
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
author: Jonas Pfenniger <zimbatm@zimbatm.com>
|
||||
'''
|
||||
|
||||
|
@ -71,6 +79,7 @@ def main():
|
|||
service = dict(type='str', default='ansible'),
|
||||
url = dict(type='str', default=None),
|
||||
icon_url = dict(type='str', default=None),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -46,6 +46,14 @@ options:
|
|||
required: false
|
||||
default: 'yes'
|
||||
choices: [ "yes", "no" ]
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: 1.5.1
|
||||
|
||||
# informational: requirements for nodes
|
||||
requirements: [ urllib, urllib2 ]
|
||||
|
@ -104,6 +112,7 @@ def main():
|
|||
"purple", "gray", "random"]),
|
||||
msg_format=dict(default="text", choices=["text", "html"]),
|
||||
notify=dict(default=True, type='bool'),
|
||||
validate_certs = dict(default='yes', type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
|
|
@ -138,7 +138,11 @@ import datetime
|
|||
import fnmatch
|
||||
|
||||
# APT related constants
|
||||
APT_ENVVARS = "DEBIAN_FRONTEND=noninteractive DEBIAN_PRIORITY=critical"
|
||||
APT_ENV_VARS = dict(
|
||||
DEBIAN_FRONTEND = 'noninteractive',
|
||||
DEBIAN_PRIORITY = 'critical'
|
||||
)
|
||||
|
||||
DPKG_OPTIONS = 'force-confdef,force-confold'
|
||||
APT_GET_ZERO = "0 upgraded, 0 newly installed"
|
||||
APTITUDE_ZERO = "0 packages upgraded, 0 newly installed"
|
||||
|
@ -260,7 +264,10 @@ def install(m, pkgspec, cache, upgrade=False, default_release=None,
|
|||
else:
|
||||
check_arg = ''
|
||||
|
||||
cmd = "%s %s -y %s %s %s install %s" % (APT_ENVVARS, APT_GET_CMD, dpkg_options, force_yes, check_arg, packages)
|
||||
for (k,v) in APT_ENV_VARS.iteritems():
|
||||
os.environ[k] = v
|
||||
|
||||
cmd = "%s -y %s %s %s install %s" % (APT_GET_CMD, dpkg_options, force_yes, check_arg, packages)
|
||||
|
||||
if default_release:
|
||||
cmd += " -t '%s'" % (default_release,)
|
||||
|
@ -292,7 +299,11 @@ def remove(m, pkgspec, cache, purge=False,
|
|||
purge = '--purge'
|
||||
else:
|
||||
purge = ''
|
||||
cmd = "%s %s -q -y %s %s remove %s" % (APT_ENVVARS, APT_GET_CMD, dpkg_options, purge, packages)
|
||||
|
||||
for (k,v) in APT_ENV_VARS.iteritems():
|
||||
os.environ[k] = v
|
||||
|
||||
cmd = "%s -q -y %s %s remove %s" % (APT_GET_CMD, dpkg_options, purge, packages)
|
||||
|
||||
if m.check_mode:
|
||||
m.exit_json(changed=True)
|
||||
|
@ -332,7 +343,11 @@ def upgrade(m, mode="yes", force=False,
|
|||
force_yes = ''
|
||||
|
||||
apt_cmd_path = m.get_bin_path(apt_cmd, required=True)
|
||||
cmd = '%s %s -y %s %s %s %s' % (APT_ENVVARS, apt_cmd_path, dpkg_options,
|
||||
|
||||
for (k,v) in APT_ENV_VARS.iteritems():
|
||||
os.environ[k] = v
|
||||
|
||||
cmd = '%s -y %s %s %s %s' % (apt_cmd_path, dpkg_options,
|
||||
force_yes, check_arg, upgrade_command)
|
||||
rc, out, err = m.run_command(cmd)
|
||||
if rc:
|
||||
|
|
|
@ -131,7 +131,7 @@ def all_keys(module, keyring):
|
|||
return results
|
||||
|
||||
def key_present(module, key_id):
|
||||
(rc, out, err) = module.run_command("apt-key list | 2>&1 grep -i -q %s" % key_id)
|
||||
(rc, out, err) = module.run_command("apt-key list | 2>&1 grep -i -q %s" % pipes.quote(key_id), use_unsafe_shell=True)
|
||||
return rc == 0
|
||||
|
||||
def download_key(module, url):
|
||||
|
@ -140,7 +140,7 @@ def download_key(module, url):
|
|||
if url is None:
|
||||
module.fail_json(msg="needed a URL but was not specified")
|
||||
try:
|
||||
rsp, info = fetch_url(module, url, validate_certs=module.params['validate_certs'])
|
||||
rsp, info = fetch_url(module, url)
|
||||
return rsp.read()
|
||||
except Exception:
|
||||
module.fail_json(msg="error getting key id from url", traceback=format_exc())
|
||||
|
|
|
@ -352,7 +352,10 @@ def get_add_ppa_signing_key_callback(module):
|
|||
def _run_command(command):
|
||||
module.run_command(command, check_rc=True)
|
||||
|
||||
return _run_command if not module.check_mode else None
|
||||
if module.check_mode:
|
||||
return None
|
||||
else:
|
||||
return _run_command
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -89,7 +89,7 @@ def get_rubygems_path(module):
|
|||
return module.get_bin_path('gem', True)
|
||||
|
||||
def get_rubygems_version(module):
|
||||
cmd = [get_rubygems_path(module), '--version']
|
||||
cmd = [ get_rubygems_path(module), '--version' ]
|
||||
(rc, out, err) = module.run_command(cmd, check_rc=True)
|
||||
|
||||
match = re.match(r'^(\d+)\.(\d+)\.(\d+)', out)
|
||||
|
|
|
@ -53,6 +53,7 @@ EXAMPLES = '''
|
|||
- macports: name=foo state=inactive
|
||||
'''
|
||||
|
||||
import pipes
|
||||
|
||||
def update_package_db(module, port_path):
|
||||
""" Updates packages list. """
|
||||
|
@ -68,7 +69,7 @@ def query_package(module, port_path, name, state="present"):
|
|||
|
||||
if state == "present":
|
||||
|
||||
rc, out, err = module.run_command("%s installed | grep -q ^.*%s" % (port_path, name))
|
||||
rc, out, err = module.run_command("%s installed | grep -q ^.*%s" % (pipes.quote(port_path), pipes.quote(name)), use_unsafe_shell=True)
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
|
@ -76,7 +77,8 @@ def query_package(module, port_path, name, state="present"):
|
|||
|
||||
elif state == "active":
|
||||
|
||||
rc, out, err = module.run_command("%s installed %s | grep -q active" % (port_path, name))
|
||||
rc, out, err = module.run_command("%s installed %s | grep -q active" % (pipes.quote(port_path), pipes.quote(name)), use_unsafe_shell=True)
|
||||
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
|
|
|
@ -51,6 +51,7 @@ EXAMPLES = '''
|
|||
- opkg: name=foo,bar state=absent
|
||||
'''
|
||||
|
||||
import pipes
|
||||
|
||||
def update_package_db(module, opkg_path):
|
||||
""" Updates packages list. """
|
||||
|
@ -66,7 +67,7 @@ def query_package(module, opkg_path, name, state="present"):
|
|||
|
||||
if state == "present":
|
||||
|
||||
rc, out, err = module.run_command("%s list-installed | grep -q ^%s" % (opkg_path, name))
|
||||
rc, out, err = module.run_command("%s list-installed | grep -q ^%s" % (pipes.quote(opkg_path), pipes.quote(name)), use_unsafe_shell=True)
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ def query_package(module, name, state="installed"):
|
|||
|
||||
|
||||
def update_package_db(module):
|
||||
cmd = "pacman -Syy > /dev/null"
|
||||
cmd = "pacman -Syy"
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
|
@ -120,7 +120,7 @@ def remove_packages(module, packages):
|
|||
if not query_package(module, package):
|
||||
continue
|
||||
|
||||
cmd = "pacman -%s %s --noconfirm > /dev/null" % (args, package)
|
||||
cmd = "pacman -%s %s --noconfirm" % (args, package)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
|
@ -148,7 +148,7 @@ def install_packages(module, packages, package_files):
|
|||
else:
|
||||
params = '-S %s' % package
|
||||
|
||||
cmd = "pacman %s --noconfirm > /dev/null" % (params)
|
||||
cmd = "pacman %s --noconfirm" % (params)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
|
|
|
@ -58,13 +58,13 @@ import json
|
|||
import shlex
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pipes
|
||||
|
||||
def query_package(module, pkgin_path, name, state="present"):
|
||||
|
||||
if state == "present":
|
||||
|
||||
rc, out, err = module.run_command("%s -y list | grep ^%s" % (pkgin_path, name))
|
||||
rc, out, err = module.run_command("%s -y list | grep ^%s" % (pipes.quote(pkgin_path), pipes.quote(name)), use_unsafe_shell=True)
|
||||
|
||||
if rc == 0:
|
||||
# At least one package with a package name that starts with ``name``
|
||||
|
|
|
@ -58,13 +58,14 @@ pkgutil: name=CSWcommon state=present
|
|||
# Install a package from a specific repository
|
||||
pkgutil: name=CSWnrpe site='ftp://myinternal.repo/opencsw/kiel state=latest'
|
||||
'''
|
||||
|
||||
import os
|
||||
import pipes
|
||||
|
||||
def package_installed(module, name):
|
||||
cmd = [module.get_bin_path('pkginfo', True)]
|
||||
cmd.append('-q')
|
||||
cmd.append(name)
|
||||
#rc, out, err = module.run_command(' '.join(cmd), shell=False)
|
||||
rc, out, err = module.run_command(' '.join(cmd))
|
||||
if rc == 0:
|
||||
return True
|
||||
|
@ -73,12 +74,14 @@ def package_installed(module, name):
|
|||
|
||||
def package_latest(module, name, site):
|
||||
# Only supports one package
|
||||
name = pipes.quote(name)
|
||||
site = pipes.quote(site)
|
||||
cmd = [ 'pkgutil', '--single', '-c' ]
|
||||
if site is not None:
|
||||
cmd += [ '-t', site ]
|
||||
cmd.append(name)
|
||||
cmd += [ '| tail -1 | grep -v SAME' ]
|
||||
rc, out, err = module.run_command(' '.join(cmd))
|
||||
rc, out, err = module.run_command(' '.join(cmd), use_unsafe_shell=True)
|
||||
if rc == 1:
|
||||
return True
|
||||
else:
|
||||
|
|
|
@ -71,7 +71,7 @@ def query_package(module, name):
|
|||
if pkg_info_path:
|
||||
pkgng = False
|
||||
pkg_glob_path = module.get_bin_path('pkg_glob', True)
|
||||
rc, out, err = module.run_command("%s -e `pkg_glob %s`" % (pkg_info_path, name))
|
||||
rc, out, err = module.run_command("%s -e `pkg_glob %s`" % (pkg_info_path, pipes.quote(name)), use_unsafe_shell=True)
|
||||
else:
|
||||
pkgng = True
|
||||
pkg_info_path = module.get_bin_path('pkg', True)
|
||||
|
@ -128,11 +128,11 @@ def remove_packages(module, packages):
|
|||
if not query_package(module, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, package))
|
||||
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, pipes.quote(package)), use_unsafe_shell=True)
|
||||
|
||||
if query_package(module, package):
|
||||
name_without_digits = re.sub('[0-9]', '', package)
|
||||
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, name_without_digits))
|
||||
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, pipes.quote(name_without_digits)),use_unsafe_shell=True)
|
||||
if query_package(module, package):
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, out))
|
||||
|
||||
|
|
|
@ -216,7 +216,6 @@ class Rhsm(RegistrationBase):
|
|||
if password:
|
||||
args.extend(['--password', password])
|
||||
|
||||
# Do the needful...
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
|
||||
|
||||
def unsubscribe(self):
|
||||
|
|
|
@ -123,7 +123,7 @@ class RpmKey:
|
|||
def fetch_key(self, url, maxbytes=MAXBYTES):
|
||||
"""Downloads a key from url, returns a valid path to a gpg key"""
|
||||
try:
|
||||
rsp, info = fetch_url(self.module, url, validate_certs=self.module.params['validate_certs'])
|
||||
rsp, info = fetch_url(self.module, url)
|
||||
key = rsp.read(maxbytes)
|
||||
if not is_pubkey(key):
|
||||
self.module.fail_json(msg="Not a public key: %s" % url)
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
# along with this software. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import pipes
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
|
@ -78,9 +79,9 @@ def query_package(module, name, depot=None):
|
|||
|
||||
cmd_list = '/usr/sbin/swlist -a revision -l product'
|
||||
if depot:
|
||||
rc, stdout, stderr = module.run_command("%s -s %s %s | grep %s" % (cmd_list, depot, name, name))
|
||||
rc, stdout, stderr = module.run_command("%s -s %s %s | grep %s" % (cmd_list, pipes.quote(depot), pipes.quote(name), pipes.quote(name)), use_unsafe_shell=True)
|
||||
else:
|
||||
rc, stdout, stderr = module.run_command("%s %s | grep %s" % (cmd_list, name, name))
|
||||
rc, stdout, stderr = module.run_command("%s %s | grep %s" % (cmd_list, pipes.quote(name), pipes.quote(name)), use_unsafe_shell=True)
|
||||
if rc == 0:
|
||||
version = re.sub("\s\s+|\t" , " ", stdout).strip().split()[1]
|
||||
else:
|
||||
|
|
|
@ -104,7 +104,7 @@ def query_package_provides(module, name):
|
|||
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
cmd = "rpm -q --provides %s >/dev/null" % (name)
|
||||
cmd = "rpm -q --provides %s" % (name)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
return rc == 0
|
||||
|
||||
|
@ -125,7 +125,7 @@ def remove_packages(module, packages):
|
|||
if not query_package(module, package):
|
||||
continue
|
||||
|
||||
cmd = "%s --auto %s > /dev/null" % (URPME_PATH, package)
|
||||
cmd = "%s --auto %s" % (URPME_PATH, package)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
|
@ -158,7 +158,7 @@ def install_packages(module, pkgspec, force=True, no_suggests=True):
|
|||
else:
|
||||
force_yes = ''
|
||||
|
||||
cmd = ("%s --auto %s --quiet %s %s > /dev/null" % (URPMI_PATH, force_yes, no_suggests_yes, packages))
|
||||
cmd = ("%s --auto %s --quiet %s %s" % (URPMI_PATH, force_yes, no_suggests_yes, packages))
|
||||
|
||||
rc, out, err = module.run_command(cmd)
|
||||
|
||||
|
|
|
@ -76,8 +76,7 @@ class Bzr(object):
|
|||
self.bzr_path = bzr_path
|
||||
|
||||
def _command(self, args_list, cwd=None, **kwargs):
|
||||
(rc, out, err) = self.module.run_command(
|
||||
[self.bzr_path] + args_list, cwd=cwd, **kwargs)
|
||||
(rc, out, err) = self.module.run_command([self.bzr_path] + args_list, cwd=cwd, **kwargs)
|
||||
return (rc, out, err)
|
||||
|
||||
def get_version(self):
|
||||
|
|
|
@ -263,7 +263,7 @@ def get_remote_head(git_path, module, dest, version, remote, bare):
|
|||
|
||||
def is_remote_tag(git_path, module, dest, remote, version):
|
||||
cmd = '%s ls-remote %s -t refs/tags/%s' % (git_path, remote, version)
|
||||
(rc, out, err) = module.run_command(cmd, check_rc=True)
|
||||
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
|
||||
if version in out:
|
||||
return True
|
||||
else:
|
||||
|
@ -291,7 +291,7 @@ def get_tags(git_path, module, dest):
|
|||
|
||||
def is_remote_branch(git_path, module, dest, remote, version):
|
||||
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, version)
|
||||
(rc, out, err) = module.run_command(cmd, check_rc=True)
|
||||
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
|
||||
if version in out:
|
||||
return True
|
||||
else:
|
||||
|
|
|
@ -75,7 +75,7 @@ def list(module, hookurl, oauthkey, repo, user):
|
|||
headers = {
|
||||
'Authorization': 'Basic %s' % auth,
|
||||
}
|
||||
response, info = fetch_url(module, url, headers=headers, validate_certs=module.params['validate_certs'])
|
||||
response, info = fetch_url(module, url, headers=headers)
|
||||
if info['status'] != 200:
|
||||
return False, ''
|
||||
else:
|
||||
|
@ -120,7 +120,7 @@ def create(module, hookurl, oauthkey, repo, user):
|
|||
headers = {
|
||||
'Authorization': 'Basic %s' % auth,
|
||||
}
|
||||
response, info = fetch_url(module, url, data=data, headers=headers, validate_certs=module.params['validate_certs'])
|
||||
response, info = fetch_url(module, url, data=data, headers=headers)
|
||||
if info['status'] != 200:
|
||||
return 0, '[]'
|
||||
else:
|
||||
|
@ -132,7 +132,7 @@ def delete(module, hookurl, oauthkey, repo, user, hookid):
|
|||
headers = {
|
||||
'Authorization': 'Basic %s' % auth,
|
||||
}
|
||||
response, info = fetch_url(module, url, data=data, headers=headers, method='DELETE', validate_certs=module.params['validate_certs'])
|
||||
response, info = fetch_url(module, url, data=data, headers=headers, method='DELETE')
|
||||
return response.read()
|
||||
|
||||
def main():
|
||||
|
|
|
@ -145,6 +145,7 @@ import os
|
|||
import re
|
||||
import tempfile
|
||||
import platform
|
||||
import pipes
|
||||
|
||||
CRONCMD = "/usr/bin/crontab"
|
||||
|
||||
|
@ -190,7 +191,8 @@ class CronTab(object):
|
|||
except:
|
||||
raise CronTabError("Unexpected error:", sys.exc_info()[0])
|
||||
else:
|
||||
(rc, out, err) = self.module.run_command(self._read_user_execute())
|
||||
# using safely quoted shell for now, but this really should be two non-shell calls instead. FIXME
|
||||
(rc, out, err) = self.module.run_command(self._read_user_execute(), use_unsafe_shell=True)
|
||||
|
||||
if rc != 0 and rc != 1: # 1 can mean that there are no jobs.
|
||||
raise CronTabError("Unable to read crontab")
|
||||
|
@ -235,8 +237,8 @@ class CronTab(object):
|
|||
|
||||
# Add the entire crontab back to the user crontab
|
||||
if not self.cron_file:
|
||||
# os.system(self._write_execute(path))
|
||||
(rc, out, err) = self.module.run_command(self._write_execute(path))
|
||||
# quoting shell args for now but really this should be two non-shell calls. FIXME
|
||||
(rc, out, err) = self.module.run_command(self._write_execute(path), use_unsafe_shell=True)
|
||||
os.unlink(path)
|
||||
|
||||
if rc != 0:
|
||||
|
@ -350,9 +352,9 @@ class CronTab(object):
|
|||
user = ''
|
||||
if self.user:
|
||||
if platform.system() == 'SunOS':
|
||||
return "su '%s' -c '%s -l'" % (self.user, CRONCMD)
|
||||
return "su %s -c '%s -l'" % (pipes.quote(self.user), pipes.quote(CRONCMD))
|
||||
else:
|
||||
user = '-u %s' % self.user
|
||||
user = '-u %s' % pipes.quote(self.user)
|
||||
return "%s %s %s" % (CRONCMD , user, '-l')
|
||||
|
||||
def _write_execute(self, path):
|
||||
|
@ -362,10 +364,10 @@ class CronTab(object):
|
|||
user = ''
|
||||
if self.user:
|
||||
if platform.system() == 'SunOS':
|
||||
return "chown %s %s ; su '%s' -c '%s %s'" % (self.user, path, self.user, CRONCMD, path)
|
||||
return "chown %s %s ; su '%s' -c '%s %s'" % (pipes.quote(self.user), pipes.quote(path), pipes.quote(self.user), CRONCMD, pipes.quote(path))
|
||||
else:
|
||||
user = '-u %s' % self.user
|
||||
return "%s %s %s" % (CRONCMD , user, path)
|
||||
user = '-u %s' % pipes.quote(self.user)
|
||||
return "%s %s %s" % (CRONCMD , user, pipes.quote(path))
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -286,7 +286,7 @@ class FedoraStrategy(GenericStrategy):
|
|||
|
||||
def get_permanent_hostname(self):
|
||||
cmd = 'hostnamectl status | awk \'/^ *Static hostname:/{printf("%s", $3)}\''
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
rc, out, err = self.module.run_command(cmd, use_unsafe_shell=True)
|
||||
if rc != 0:
|
||||
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" %
|
||||
(rc, out, err))
|
||||
|
|
|
@ -162,13 +162,13 @@ def main():
|
|||
### create PV
|
||||
pvcreate_cmd = module.get_bin_path('pvcreate', True)
|
||||
for current_dev in dev_list:
|
||||
rc,_,err = module.run_command("%s %s"%(pvcreate_cmd,current_dev))
|
||||
rc,_,err = module.run_command("%s %s" % (pvcreate_cmd,current_dev))
|
||||
if rc == 0:
|
||||
changed = True
|
||||
else:
|
||||
module.fail_json(msg="Creating physical volume '%s' failed"%current_dev, rc=rc, err=err)
|
||||
module.fail_json(msg="Creating physical volume '%s' failed" % current_dev, rc=rc, err=err)
|
||||
vgcreate_cmd = module.get_bin_path('vgcreate')
|
||||
rc,_,err = module.run_command("%s -s %s %s %s"%(vgcreate_cmd, pesize, vg, dev_string))
|
||||
rc,_,err = module.run_command("%s -s %s %s %s" % (vgcreate_cmd, pesize, vg, dev_string))
|
||||
if rc == 0:
|
||||
changed = True
|
||||
else:
|
||||
|
@ -210,7 +210,7 @@ def main():
|
|||
module.fail_json(msg="Creating physical volume '%s' failed"%current_dev, rc=rc, err=err)
|
||||
### add PV to our VG
|
||||
vgextend_cmd = module.get_bin_path('vgextend', True)
|
||||
rc,_,err = module.run_command("%s %s %s"%(vgextend_cmd, vg, devs_to_add_string))
|
||||
rc,_,err = module.run_command("%s %s %s" % (vgextend_cmd, vg, devs_to_add_string))
|
||||
if rc == 0:
|
||||
changed = True
|
||||
else:
|
||||
|
|
|
@ -1413,7 +1413,8 @@ class Network(Facts):
|
|||
subclass = sc
|
||||
return super(cls, subclass).__new__(subclass, *arguments, **keyword)
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
Facts.__init__(self)
|
||||
|
||||
def populate(self):
|
||||
|
@ -1430,11 +1431,10 @@ class LinuxNetwork(Network):
|
|||
platform = 'Linux'
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
Network.__init__(self)
|
||||
Network.__init__(self, module)
|
||||
|
||||
def populate(self):
|
||||
ip_path = module.get_bin_path('ip')
|
||||
ip_path = self.module.get_bin_path('ip')
|
||||
if ip_path is None:
|
||||
return self.facts
|
||||
default_ipv4, default_ipv6 = self.get_default_interfaces(ip_path)
|
||||
|
@ -1650,8 +1650,8 @@ class GenericBsdIfconfigNetwork(Network):
|
|||
"""
|
||||
platform = 'Generic_BSD_Ifconfig'
|
||||
|
||||
def __init__(self):
|
||||
Network.__init__(self)
|
||||
def __init__(self, module):
|
||||
Network.__init__(self, module)
|
||||
|
||||
def populate(self):
|
||||
|
||||
|
|
|
@ -1,3 +1,21 @@
|
|||
ansible (1.5.4) unstable; urgency=low
|
||||
|
||||
* 1.5.4 release
|
||||
|
||||
-- Michael DeHaan <michael@ansible.com> Tue, 01 April 2014 08:46:00 -0500
|
||||
|
||||
ansible (1.5.3) unstable; urgency=low
|
||||
|
||||
* 1.5.3 release
|
||||
|
||||
-- Michael DeHaan <michael@ansible.com> Thu, 13 March 2014 08:46:00 -0500
|
||||
|
||||
ansible (1.5.2) unstable; urgency=low
|
||||
|
||||
* 1.5.2 release
|
||||
|
||||
-- Michael DeHaan <michael@ansible.com> Tue, 11 March 2014 08:46:00 -0500
|
||||
|
||||
ansible (1.5.1) unstable; urgency=low
|
||||
|
||||
* 1.5.1 release
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
%endif
|
||||
|
||||
Name: %{name}
|
||||
Version: 1.5.1
|
||||
Version: 1.5.4
|
||||
Release: 1%{?dist}
|
||||
Url: http://www.ansible.com
|
||||
Summary: SSH-based application deployment, configuration management, and IT orchestration platform
|
||||
|
@ -102,7 +102,16 @@ rm -rf %{buildroot}
|
|||
|
||||
%changelog
|
||||
|
||||
* Fri Mar 10 2014 Michael DeHaan <michael@ansible.com> - 1.5.1
|
||||
* Tue Apr 01 2014 Michael DeHaan <michael@ansible.com> - 1.5.4
|
||||
- Release 1.5.4
|
||||
|
||||
* Thu Mar 13 2014 Michael DeHaan <michael@ansible.com> - 1.5.3
|
||||
- Release 1.5.3
|
||||
|
||||
* Tue Mar 11 2014 Michael DeHaan <michael@ansible.com> - 1.5.2
|
||||
- Release 1.5.2
|
||||
|
||||
* Mon Mar 10 2014 Michael DeHaan <michael@ansible.com> - 1.5.1
|
||||
- Release 1.5.1
|
||||
|
||||
* Fri Feb 28 2014 Michael DeHaan <michael@ansible.com> - 1.5.0
|
||||
|
|
|
@ -15,12 +15,14 @@ def delete_aws_resources(get_func, attr, opts):
|
|||
for item in get_func():
|
||||
val = getattr(item, attr)
|
||||
if re.search(opts.match_re, val):
|
||||
prompt_and_delete("Delete object with %s=%s? [y/n]: " % (attr, val), opts.assumeyes)
|
||||
prompt_and_delete(item, "Delete object with %s=%s? [y/n]: " % (attr, val), opts.assumeyes)
|
||||
|
||||
def prompt_and_delete(prompt, assumeyes):
|
||||
while not assumeyes:
|
||||
assumeyes = raw_input(prompt)
|
||||
obj.delete()
|
||||
def prompt_and_delete(item, prompt, assumeyes):
|
||||
if not assumeyes:
|
||||
assumeyes = raw_input(prompt).lower() == 'y'
|
||||
assert hasattr(item, 'delete'), "Class <%s> has no delete attribute" % item.__class__
|
||||
if assumeyes:
|
||||
item.delete()
|
||||
|
||||
def parse_args():
|
||||
# Load details from credentials.yml
|
||||
|
|
|
@ -7,4 +7,4 @@ test_hash:
|
|||
host_vars_testhost: "this is in host_vars/testhost"
|
||||
|
||||
# Support execution from within a virtualenv
|
||||
ansible_python_interpreter: ${VIRTUAL_ENV-/usr}/bin/python
|
||||
ansible_python_interpreter: '/usr/bin/env python'
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
# fetch module.
|
||||
|
||||
- name: diff what we fetched with the original file
|
||||
shell: diff {{ output_dir }}/orig {{ output_dir }}/fetched/127.0.0.1/root/ansible_testing/orig
|
||||
shell: diff {{ output_dir }}/orig {{ output_dir }}/fetched/127.0.0.1{{ output_dir | expanduser }}/orig
|
||||
register: diff
|
||||
|
||||
- name: check the diff to make sure they are the same
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
- "install_result.mode == '0755'"
|
||||
|
||||
- include: 'sysv_setup.yml'
|
||||
when: ansible_distribution in ('RHEL', 'CentOS', 'ScientificLinux')
|
||||
when: ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
|
||||
- include: 'systemd_setup.yml'
|
||||
when: ansible_distribution == 'Fedora'
|
||||
- include: 'upstart_setup.yml'
|
||||
|
@ -101,7 +101,7 @@
|
|||
- "remove_result.state == 'absent'"
|
||||
|
||||
- include: 'sysv_cleanup.yml'
|
||||
when: ansible_distribution in ('RHEL', 'CentOS', 'ScientificLinux')
|
||||
when: ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
|
||||
- include: 'systemd_cleanup.yml'
|
||||
when: ansible_distribution == 'Fedora'
|
||||
- include: 'upstart_cleanup.yml'
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
- name: remove the upstart init file
|
||||
file: path=/etc/init/ansible_test state=absent
|
||||
file: path=/etc/init/ansible_test.conf state=absent
|
||||
register: remove_upstart_result
|
||||
|
||||
- name: assert that the upstart init file was removed
|
||||
assert:
|
||||
that:
|
||||
- "remove_upstart_result.path == '/etc/init/ansible_test'"
|
||||
- "remove_upstart_result.path == '/etc/init/ansible_test.conf'"
|
||||
- "remove_upstart_result.state == 'absent'"
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
- name: install the upstart init file
|
||||
copy: src=ansible.upstart dest=/etc/init/ansible_test mode=0755
|
||||
copy: src=ansible.upstart dest=/etc/init/ansible_test.conf mode=0644
|
||||
register: install_upstart_result
|
||||
|
||||
- name: assert that the upstart init file was installed
|
||||
assert:
|
||||
that:
|
||||
- "install_upstart_result.dest == '/etc/init/ansible_test'"
|
||||
- "install_upstart_result.dest == '/etc/init/ansible_test.conf'"
|
||||
- "install_upstart_result.state == 'file'"
|
||||
- "install_upstart_result.mode == '0755'"
|
||||
- "install_upstart_result.mode == '0644'"
|
||||
- "install_upstart_result.md5sum == 'ab3900ea4de8423add764c12aeb90c01'"
|
||||
|
||||
|
|
|
@ -17,5 +17,5 @@
|
|||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
- include: 'yum.yml'
|
||||
when: ansible_distribution in ('RHEL', 'CentOS', 'ScientificLinux', 'Fedora')
|
||||
when: ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue