2012-07-20 12:57:15 +02:00
|
|
|
#!/usr/bin/python
|
2012-08-03 03:29:10 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
# (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
2012-07-22 17:08:16 +02:00
|
|
|
# see examples/playbooks/get_url.yml
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
import shutil
|
|
|
|
import datetime
|
2013-09-27 22:37:21 +02:00
|
|
|
import re
|
2012-07-20 12:57:15 +02:00
|
|
|
import tempfile
|
|
|
|
|
2012-09-17 15:11:04 +02:00
|
|
|
DOCUMENTATION = '''
|
|
|
|
---
|
|
|
|
module: get_url
|
|
|
|
short_description: Downloads files from HTTP, HTTPS, or FTP to node
|
2012-09-19 16:09:26 +02:00
|
|
|
description:
|
|
|
|
- Downloads files from HTTP, HTTPS, or FTP to the remote server. The remote
|
2012-11-21 18:49:30 +01:00
|
|
|
server I(must) have direct access to the remote resource.
|
2013-03-02 19:42:49 +01:00
|
|
|
- By default, if an environment variable C(<protocol>_proxy) is set on
|
|
|
|
the target host, requests will be sent through that proxy. This
|
2013-06-05 07:28:28 +02:00
|
|
|
behaviour can be overridden by setting a variable for this task
|
2013-03-02 19:42:49 +01:00
|
|
|
(see `setting the environment
|
2014-01-28 17:20:36 +01:00
|
|
|
<http://docs.ansible.com/playbooks_environment.html>`_),
|
2013-03-02 19:42:49 +01:00
|
|
|
or by using the use_proxy option.
|
2013-11-28 03:23:03 +01:00
|
|
|
version_added: "0.6"
|
2012-09-17 15:11:04 +02:00
|
|
|
options:
|
2012-09-28 03:06:31 +02:00
|
|
|
url:
|
|
|
|
description:
|
2013-01-26 18:38:08 +01:00
|
|
|
- HTTP, HTTPS, or FTP URL in the form (http|https|ftp)://[user[:pass]]@host.domain[:port]/path
|
2012-09-28 03:06:31 +02:00
|
|
|
required: true
|
|
|
|
default: null
|
|
|
|
aliases: []
|
|
|
|
dest:
|
|
|
|
description:
|
|
|
|
- absolute path of where to download the file to.
|
2013-11-02 17:23:59 +01:00
|
|
|
- If C(dest) is a directory, either the server provided filename or, if
|
|
|
|
none provided, the base name of the URL on the remote server will be
|
|
|
|
used. If a directory, C(force) has no effect.
|
2014-02-05 19:58:48 +01:00
|
|
|
If C(dest) is a directory, the file will always be
|
|
|
|
downloaded (regardless of the force option), but replaced only if the contents changed.
|
2012-09-28 03:06:31 +02:00
|
|
|
required: true
|
|
|
|
default: null
|
2013-02-20 13:08:04 +01:00
|
|
|
force:
|
2012-09-28 03:06:31 +02:00
|
|
|
description:
|
2013-11-02 17:23:59 +01:00
|
|
|
- If C(yes) and C(dest) is not a directory, will download the file every
|
|
|
|
time and replace the file if the contents change. If C(no), the file
|
|
|
|
will only be downloaded if the destination does not exist. Generally
|
|
|
|
should be C(yes) only for small local files. Prior to 0.6, this module
|
|
|
|
behaved as if C(yes) was the default.
|
2012-09-28 03:06:31 +02:00
|
|
|
version_added: "0.7"
|
|
|
|
required: false
|
|
|
|
choices: [ "yes", "no" ]
|
|
|
|
default: "no"
|
2013-02-20 13:08:04 +01:00
|
|
|
aliases: [ "thirsty" ]
|
2013-06-05 07:28:28 +02:00
|
|
|
sha256sum:
|
|
|
|
description:
|
|
|
|
- If a SHA-256 checksum is passed to this parameter, the digest of the
|
|
|
|
destination file will be calculated after it is downloaded to ensure
|
|
|
|
its integrity and verify that the transfer completed successfully.
|
2013-06-07 19:37:41 +02:00
|
|
|
version_added: "1.3"
|
2013-06-05 07:28:28 +02:00
|
|
|
required: false
|
|
|
|
default: null
|
2013-03-02 08:30:28 +01:00
|
|
|
use_proxy:
|
|
|
|
description:
|
2013-06-05 07:28:28 +02:00
|
|
|
- if C(no), it will not use a proxy, even if one is defined in
|
|
|
|
an environment variable on the target hosts.
|
2013-03-02 08:30:28 +01:00
|
|
|
required: false
|
2013-06-01 18:13:21 +02:00
|
|
|
default: 'yes'
|
|
|
|
choices: ['yes', 'no']
|
2012-09-28 03:06:31 +02:00
|
|
|
others:
|
|
|
|
description:
|
|
|
|
- all arguments accepted by the M(file) module also work here
|
|
|
|
required: false
|
2012-09-19 16:09:26 +02:00
|
|
|
notes:
|
2013-01-24 21:19:14 +01:00
|
|
|
- This module doesn't yet support configuration for proxies.
|
2012-09-17 15:11:04 +02:00
|
|
|
# informational: requirements for nodes
|
|
|
|
requirements: [ urllib2, urlparse ]
|
2012-10-01 12:37:51 +02:00
|
|
|
author: Jan-Piet Mens
|
2012-09-17 15:11:04 +02:00
|
|
|
'''
|
|
|
|
|
2013-02-23 18:03:33 +01:00
|
|
|
EXAMPLES='''
|
2013-06-18 20:46:57 +02:00
|
|
|
- name: download foo.conf
|
|
|
|
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf mode=0440
|
2013-06-05 07:28:28 +02:00
|
|
|
|
2013-06-18 20:46:57 +02:00
|
|
|
- name: download file with sha256 check
|
|
|
|
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf sha256sum=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
|
2013-02-23 18:03:33 +01:00
|
|
|
'''
|
|
|
|
|
2013-06-05 07:28:28 +02:00
|
|
|
try:
|
|
|
|
import hashlib
|
|
|
|
HAS_HASHLIB=True
|
|
|
|
except ImportError:
|
|
|
|
HAS_HASHLIB=False
|
2013-02-23 18:03:33 +01:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
try:
|
|
|
|
import urllib2
|
2013-06-05 07:28:28 +02:00
|
|
|
HAS_URLLIB2 = True
|
2012-08-07 02:07:02 +02:00
|
|
|
except ImportError:
|
2013-01-24 21:19:14 +01:00
|
|
|
HAS_URLLIB2 = False
|
2012-07-22 17:08:16 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
try:
|
|
|
|
import urlparse
|
|
|
|
import socket
|
2013-06-05 07:28:28 +02:00
|
|
|
HAS_URLPARSE = True
|
2012-08-07 02:07:02 +02:00
|
|
|
except ImportError:
|
2012-07-20 12:57:15 +02:00
|
|
|
HAS_URLPARSE=False
|
|
|
|
|
|
|
|
# ==============================================================
|
|
|
|
# url handling
|
|
|
|
|
|
|
|
def url_filename(url):
|
2012-07-22 17:08:16 +02:00
|
|
|
fn = os.path.basename(urlparse.urlsplit(url)[2])
|
|
|
|
if fn == '':
|
|
|
|
return 'index.html'
|
|
|
|
return fn
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2014-02-05 16:45:24 +01:00
|
|
|
def url_do_get(module, url, dest, use_proxy, last_mod_time, force):
|
2012-07-22 17:08:16 +02:00
|
|
|
"""
|
|
|
|
Get url and return request and info
|
|
|
|
Credits: http://stackoverflow.com/questions/7006574/how-to-download-file-from-ftp
|
2012-07-20 12:57:15 +02:00
|
|
|
"""
|
2012-07-22 17:08:16 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
USERAGENT = 'ansible-httpget'
|
2012-11-06 23:32:49 +01:00
|
|
|
info = dict(url=url, dest=dest)
|
2012-07-20 12:57:15 +02:00
|
|
|
r = None
|
2013-03-02 08:30:28 +01:00
|
|
|
handlers = []
|
|
|
|
|
2013-01-24 16:16:23 +01:00
|
|
|
parsed = urlparse.urlparse(url)
|
2013-03-02 08:30:28 +01:00
|
|
|
|
2013-01-29 18:57:02 +01:00
|
|
|
if '@' in parsed[1]:
|
|
|
|
credentials, netloc = parsed[1].split('@', 1)
|
2013-01-24 16:16:23 +01:00
|
|
|
if ':' in credentials:
|
2013-01-29 18:57:02 +01:00
|
|
|
username, password = credentials.split(':', 1)
|
|
|
|
else:
|
|
|
|
username = credentials
|
|
|
|
password = ''
|
2013-01-24 16:16:23 +01:00
|
|
|
parsed = list(parsed)
|
|
|
|
parsed[1] = netloc
|
|
|
|
|
|
|
|
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
|
|
|
# this creates a password manager
|
|
|
|
passman.add_password(None, netloc, username, password)
|
|
|
|
# because we have put None at the start it will always
|
|
|
|
# use this username/password combination for urls
|
|
|
|
# for which `theurl` is a super-url
|
|
|
|
|
|
|
|
authhandler = urllib2.HTTPBasicAuthHandler(passman)
|
|
|
|
# create the AuthHandler
|
2013-03-02 08:30:28 +01:00
|
|
|
handlers.append(authhandler)
|
2013-01-24 16:16:23 +01:00
|
|
|
|
|
|
|
#reconstruct url without credentials
|
|
|
|
url = urlparse.urlunparse(parsed)
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2013-03-02 08:30:28 +01:00
|
|
|
if not use_proxy:
|
|
|
|
proxyhandler = urllib2.ProxyHandler({})
|
|
|
|
handlers.append(proxyhandler)
|
|
|
|
|
|
|
|
opener = urllib2.build_opener(*handlers)
|
|
|
|
urllib2.install_opener(opener)
|
2012-07-20 12:57:15 +02:00
|
|
|
request = urllib2.Request(url)
|
|
|
|
request.add_header('User-agent', USERAGENT)
|
|
|
|
|
2014-02-05 16:45:24 +01:00
|
|
|
if last_mod_time and not force:
|
2013-11-02 17:23:59 +01:00
|
|
|
tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
|
2012-07-20 12:57:15 +02:00
|
|
|
request.add_header('If-Modified-Since', tstamp)
|
2014-02-05 16:45:24 +01:00
|
|
|
else:
|
|
|
|
request.add_header('cache-control', 'no-cache')
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
r = urllib2.urlopen(request)
|
2012-07-22 17:08:16 +02:00
|
|
|
info.update(r.info())
|
2013-11-09 00:35:14 +01:00
|
|
|
info['url'] = r.geturl() # The URL goes in too, because of redirects.
|
2012-07-22 17:08:16 +02:00
|
|
|
info.update(dict(msg="OK (%s bytes)" % r.headers.get('Content-Length', 'unknown'), status=200))
|
2012-08-01 15:30:16 +02:00
|
|
|
except urllib2.HTTPError, e:
|
2012-07-20 12:57:15 +02:00
|
|
|
# Must not fail_json() here so caller can handle HTTP 304 unmodified
|
2012-07-22 17:08:16 +02:00
|
|
|
info.update(dict(msg=str(e), status=e.code))
|
2012-08-01 15:30:16 +02:00
|
|
|
except urllib2.URLError, e:
|
2012-07-22 17:08:16 +02:00
|
|
|
code = getattr(e, 'code', -1)
|
|
|
|
module.fail_json(msg="Request failed: %s" % str(e), status_code=code)
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
return r, info
|
|
|
|
|
2014-02-05 16:45:24 +01:00
|
|
|
def url_get(module, url, dest, use_proxy, last_mod_time, force):
|
2012-07-22 17:08:16 +02:00
|
|
|
"""
|
2013-11-02 17:23:59 +01:00
|
|
|
Download data from the url and store in a temporary file.
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
Return (tempfile, info about the request)
|
2012-07-20 12:57:15 +02:00
|
|
|
"""
|
|
|
|
|
2014-02-05 16:45:24 +01:00
|
|
|
req, info = url_do_get(module, url, dest, use_proxy, last_mod_time, force)
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
if info['status'] == 304:
|
2012-11-06 23:32:49 +01:00
|
|
|
module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', ''))
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
# create a temporary file and copy content to do md5-based replacement
|
|
|
|
if info['status'] != 200:
|
2012-11-06 23:32:49 +01:00
|
|
|
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], url=url, dest=dest)
|
2012-07-22 17:08:16 +02:00
|
|
|
|
|
|
|
fd, tempname = tempfile.mkstemp()
|
|
|
|
f = os.fdopen(fd, 'wb')
|
|
|
|
try:
|
|
|
|
shutil.copyfileobj(req, f)
|
|
|
|
except Exception, err:
|
|
|
|
os.remove(tempname)
|
|
|
|
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
|
|
|
|
f.close()
|
|
|
|
req.close()
|
|
|
|
return tempname, info
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
def extract_filename_from_headers(headers):
|
|
|
|
"""
|
|
|
|
Extracts a filename from the given dict of HTTP headers.
|
|
|
|
|
|
|
|
Looks for the content-disposition header and applies a regex.
|
|
|
|
Returns the filename if successful, else None."""
|
|
|
|
cont_disp_regex = 'attachment; ?filename="(.+)"'
|
|
|
|
res = None
|
|
|
|
|
|
|
|
if 'content-disposition' in headers:
|
|
|
|
cont_disp = headers['content-disposition']
|
|
|
|
match = re.match(cont_disp_regex, cont_disp)
|
|
|
|
if match:
|
|
|
|
res = match.group(1)
|
|
|
|
# Try preventing any funny business.
|
|
|
|
res = os.path.basename(res)
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
# ==============================================================
|
|
|
|
# main
|
|
|
|
|
|
|
|
def main():
|
2012-08-07 02:07:02 +02:00
|
|
|
|
|
|
|
# does this really happen on non-ancient python?
|
2012-07-22 17:08:16 +02:00
|
|
|
if not HAS_URLLIB2:
|
|
|
|
module.fail_json(msg="urllib2 is not installed")
|
|
|
|
if not HAS_URLPARSE:
|
|
|
|
module.fail_json(msg="urlparse is not installed")
|
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
module = AnsibleModule(
|
2012-08-02 01:42:31 +02:00
|
|
|
# not checking because of daisy chain to file module
|
2012-07-20 12:57:15 +02:00
|
|
|
argument_spec = dict(
|
|
|
|
url = dict(required=True),
|
|
|
|
dest = dict(required=True),
|
2013-03-02 08:30:28 +01:00
|
|
|
force = dict(default='no', aliases=['thirsty'], type='bool'),
|
2013-06-05 07:28:28 +02:00
|
|
|
sha256sum = dict(default=''),
|
2013-03-02 08:30:28 +01:00
|
|
|
use_proxy = dict(default='yes', type='bool')
|
2012-10-21 04:51:36 +02:00
|
|
|
),
|
|
|
|
add_file_common_args=True
|
2012-07-20 12:57:15 +02:00
|
|
|
)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
url = module.params['url']
|
|
|
|
dest = os.path.expanduser(module.params['dest'])
|
2013-02-23 19:59:52 +01:00
|
|
|
force = module.params['force']
|
2013-06-05 07:28:28 +02:00
|
|
|
sha256sum = module.params['sha256sum']
|
2013-03-02 08:30:28 +01:00
|
|
|
use_proxy = module.params['use_proxy']
|
2012-08-21 00:39:37 +02:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
dest_is_dir = os.path.isdir(dest)
|
|
|
|
last_mod_time = None
|
2012-11-06 23:32:49 +01:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
if not dest_is_dir and os.path.exists(dest):
|
|
|
|
if not force:
|
2012-09-14 09:34:41 +02:00
|
|
|
module.exit_json(msg="file already exists", dest=dest, url=url, changed=False)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
# If the file already exists, prepare the last modified time for the
|
|
|
|
# request.
|
|
|
|
mtime = os.path.getmtime(dest)
|
|
|
|
last_mod_time = datetime.datetime.utcfromtimestamp(mtime)
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
# download to tmpsrc
|
2014-02-05 16:45:24 +01:00
|
|
|
tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force)
|
2013-11-02 17:23:59 +01:00
|
|
|
|
|
|
|
# Now the request has completed, we can finally generate the final
|
|
|
|
# destination file name from the info dict.
|
2013-11-09 00:35:14 +01:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
if dest_is_dir:
|
|
|
|
filename = extract_filename_from_headers(info)
|
|
|
|
if not filename:
|
|
|
|
# Fall back to extracting the filename from the URL.
|
2013-11-09 00:35:14 +01:00
|
|
|
# Pluck the URL from the info, since a redirect could have changed
|
|
|
|
# it.
|
|
|
|
filename = url_filename(info['url'])
|
2013-11-02 17:23:59 +01:00
|
|
|
dest = os.path.join(dest, filename)
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
md5sum_src = None
|
|
|
|
md5sum_dest = None
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
# raise an error if there is no tmpsrc file
|
|
|
|
if not os.path.exists(tmpsrc):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'])
|
|
|
|
if not os.access(tmpsrc, os.R_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Source %s not readable" % (tmpsrc))
|
2012-07-22 17:08:16 +02:00
|
|
|
md5sum_src = module.md5(tmpsrc)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
# check if there is no dest file
|
|
|
|
if os.path.exists(dest):
|
|
|
|
# raise an error if copy has no permission on dest
|
|
|
|
if not os.access(dest, os.W_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Destination %s not writable" % (dest))
|
|
|
|
if not os.access(dest, os.R_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Destination %s not readable" % (dest))
|
2012-07-22 17:08:16 +02:00
|
|
|
md5sum_dest = module.md5(dest)
|
2012-07-20 12:57:15 +02:00
|
|
|
else:
|
|
|
|
if not os.access(os.path.dirname(dest), os.W_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Destination %s not writable" % (os.path.dirname(dest)))
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
if md5sum_src != md5sum_dest:
|
|
|
|
try:
|
|
|
|
shutil.copyfile(tmpsrc, dest)
|
|
|
|
except Exception, err:
|
|
|
|
os.remove(tmpsrc)
|
2012-08-07 02:07:02 +02:00
|
|
|
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
|
2012-07-20 12:57:15 +02:00
|
|
|
changed = True
|
|
|
|
else:
|
|
|
|
changed = False
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2014-02-05 19:58:48 +01:00
|
|
|
# Check the digest of the destination file and ensure that it matches the
|
2013-06-05 07:28:28 +02:00
|
|
|
# sha256sum parameter if it is present
|
|
|
|
if sha256sum != '':
|
2013-09-27 22:37:21 +02:00
|
|
|
# Remove any non-alphanumeric characters, including the infamous
|
|
|
|
# Unicode zero-width space
|
|
|
|
stripped_sha256sum = re.sub(r'\W+', '', sha256sum)
|
|
|
|
|
2013-06-05 07:28:28 +02:00
|
|
|
if not HAS_HASHLIB:
|
|
|
|
os.remove(dest)
|
|
|
|
module.fail_json(msg="The sha256sum parameter requires hashlib, which is available in Python 2.5 and higher")
|
2013-09-27 22:37:21 +02:00
|
|
|
else:
|
|
|
|
destination_checksum = module.sha256(dest)
|
|
|
|
|
|
|
|
if stripped_sha256sum != destination_checksum:
|
2013-06-05 07:28:28 +02:00
|
|
|
os.remove(dest)
|
2013-09-27 22:37:21 +02:00
|
|
|
module.fail_json(msg="The SHA-256 checksum for %s did not match %s; it was %s." % (dest, sha256sum, destination_checksum))
|
2013-06-05 07:28:28 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
os.remove(tmpsrc)
|
2012-07-22 17:08:16 +02:00
|
|
|
|
2012-10-21 04:51:36 +02:00
|
|
|
# allow file attribute changes
|
|
|
|
module.params['path'] = dest
|
|
|
|
file_args = module.load_file_common_arguments(module.params)
|
|
|
|
file_args['path'] = dest
|
|
|
|
changed = module.set_file_attributes_if_different(file_args, changed)
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
# Mission complete
|
2012-08-07 02:07:02 +02:00
|
|
|
module.exit_json(url=url, dest=dest, src=tmpsrc, md5sum=md5sum_src,
|
2013-06-05 07:28:28 +02:00
|
|
|
sha256sum=sha256sum, changed=changed, msg=info.get('msg', ''))
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2013-12-02 21:13:49 +01:00
|
|
|
# import module snippets
|
2013-12-02 21:11:23 +01:00
|
|
|
from ansible.module_utils.basic import *
|
2012-07-20 12:57:15 +02:00
|
|
|
main()
|