2012-07-20 12:57:15 +02:00
|
|
|
#!/usr/bin/python
|
2012-08-03 03:29:10 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
# (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
2012-07-22 17:08:16 +02:00
|
|
|
# see examples/playbooks/get_url.yml
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
import shutil
|
|
|
|
import datetime
|
2013-09-27 22:37:21 +02:00
|
|
|
import re
|
2012-07-20 12:57:15 +02:00
|
|
|
import tempfile
|
|
|
|
|
2012-09-17 15:11:04 +02:00
|
|
|
DOCUMENTATION = '''
|
|
|
|
---
|
|
|
|
module: get_url
|
|
|
|
short_description: Downloads files from HTTP, HTTPS, or FTP to node
|
2012-09-19 16:09:26 +02:00
|
|
|
description:
|
|
|
|
- Downloads files from HTTP, HTTPS, or FTP to the remote server. The remote
|
2012-11-21 18:49:30 +01:00
|
|
|
server I(must) have direct access to the remote resource.
|
2013-03-02 19:42:49 +01:00
|
|
|
- By default, if an environment variable C(<protocol>_proxy) is set on
|
|
|
|
the target host, requests will be sent through that proxy. This
|
2013-06-05 07:28:28 +02:00
|
|
|
behaviour can be overridden by setting a variable for this task
|
2013-03-02 19:42:49 +01:00
|
|
|
(see `setting the environment
|
2014-01-28 17:20:36 +01:00
|
|
|
<http://docs.ansible.com/playbooks_environment.html>`_),
|
2013-03-02 19:42:49 +01:00
|
|
|
or by using the use_proxy option.
|
2015-06-25 18:28:39 +02:00
|
|
|
- HTTP redirects can redirect from HTTP to HTTPS so you should be sure that
|
|
|
|
your proxy environment for both protocols is correct.
|
2013-11-28 03:23:03 +01:00
|
|
|
version_added: "0.6"
|
2012-09-17 15:11:04 +02:00
|
|
|
options:
|
2012-09-28 03:06:31 +02:00
|
|
|
url:
|
|
|
|
description:
|
2013-01-26 18:38:08 +01:00
|
|
|
- HTTP, HTTPS, or FTP URL in the form (http|https|ftp)://[user[:pass]]@host.domain[:port]/path
|
2012-09-28 03:06:31 +02:00
|
|
|
required: true
|
|
|
|
dest:
|
|
|
|
description:
|
|
|
|
- absolute path of where to download the file to.
|
2013-11-02 17:23:59 +01:00
|
|
|
- If C(dest) is a directory, either the server provided filename or, if
|
|
|
|
none provided, the base name of the URL on the remote server will be
|
|
|
|
used. If a directory, C(force) has no effect.
|
2014-02-05 19:58:48 +01:00
|
|
|
If C(dest) is a directory, the file will always be
|
|
|
|
downloaded (regardless of the force option), but replaced only if the contents changed.
|
2012-09-28 03:06:31 +02:00
|
|
|
required: true
|
2015-08-06 22:37:48 +02:00
|
|
|
tmp_dest:
|
|
|
|
description:
|
|
|
|
- absolute path of where temporary file is downloaded to.
|
|
|
|
- Defaults to TMPDIR, TEMP or TMP env variables or a platform specific value
|
|
|
|
- https://docs.python.org/2/library/tempfile.html#tempfile.tempdir
|
|
|
|
required: false
|
|
|
|
default: ''
|
2015-12-02 23:30:28 +01:00
|
|
|
version_added: '2.1'
|
2013-02-20 13:08:04 +01:00
|
|
|
force:
|
2012-09-28 03:06:31 +02:00
|
|
|
description:
|
2013-11-02 17:23:59 +01:00
|
|
|
- If C(yes) and C(dest) is not a directory, will download the file every
|
|
|
|
time and replace the file if the contents change. If C(no), the file
|
|
|
|
will only be downloaded if the destination does not exist. Generally
|
|
|
|
should be C(yes) only for small local files. Prior to 0.6, this module
|
|
|
|
behaved as if C(yes) was the default.
|
2012-09-28 03:06:31 +02:00
|
|
|
version_added: "0.7"
|
|
|
|
required: false
|
|
|
|
choices: [ "yes", "no" ]
|
|
|
|
default: "no"
|
2013-02-20 13:08:04 +01:00
|
|
|
aliases: [ "thirsty" ]
|
2016-01-11 16:16:19 +01:00
|
|
|
backup:
|
|
|
|
description:
|
|
|
|
- Create a backup file including the timestamp information so you can get
|
|
|
|
the original file back if you somehow clobbered it incorrectly.
|
|
|
|
required: false
|
|
|
|
choices: [ "yes", "no" ]
|
|
|
|
default: "no"
|
2016-01-19 02:46:44 +01:00
|
|
|
version_added: '2.1'
|
2013-06-05 07:28:28 +02:00
|
|
|
sha256sum:
|
|
|
|
description:
|
|
|
|
- If a SHA-256 checksum is passed to this parameter, the digest of the
|
|
|
|
destination file will be calculated after it is downloaded to ensure
|
|
|
|
its integrity and verify that the transfer completed successfully.
|
2014-10-06 09:53:35 +02:00
|
|
|
This option is deprecated. Use 'checksum'.
|
2013-06-07 19:37:41 +02:00
|
|
|
version_added: "1.3"
|
2013-06-05 07:28:28 +02:00
|
|
|
required: false
|
|
|
|
default: null
|
2014-10-06 09:53:35 +02:00
|
|
|
checksum:
|
|
|
|
description:
|
2015-10-05 15:07:16 +02:00
|
|
|
- 'If a checksum is passed to this parameter, the digest of the
|
2014-10-06 09:53:35 +02:00
|
|
|
destination file will be calculated after it is downloaded to ensure
|
|
|
|
its integrity and verify that the transfer completed successfully.
|
2015-09-08 18:00:21 +02:00
|
|
|
Format: <algorithm>:<checksum>, e.g.: checksum="sha256:D98291AC[...]B6DC7B97"
|
2016-01-29 10:24:28 +01:00
|
|
|
If you worry about portability, only the sha1 algorithm is available
|
|
|
|
on all platforms and python versions. The third party hashlib
|
2015-10-04 14:24:07 +02:00
|
|
|
library can be installed for access to additional algorithms.
|
|
|
|
Additionaly, if a checksum is passed to this parameter, and the file exist under
|
|
|
|
the C(dest) location, the destination_checksum would be calculated, and if
|
|
|
|
checksum equals destination_checksum, the file download would be skipped
|
2015-10-05 15:07:16 +02:00
|
|
|
(unless C(force) is true). '
|
2014-10-06 09:53:35 +02:00
|
|
|
version_added: "2.0"
|
|
|
|
required: false
|
|
|
|
default: null
|
2013-03-02 08:30:28 +01:00
|
|
|
use_proxy:
|
|
|
|
description:
|
2013-06-05 07:28:28 +02:00
|
|
|
- if C(no), it will not use a proxy, even if one is defined in
|
|
|
|
an environment variable on the target hosts.
|
2013-03-02 08:30:28 +01:00
|
|
|
required: false
|
2013-06-01 18:13:21 +02:00
|
|
|
default: 'yes'
|
|
|
|
choices: ['yes', 'no']
|
2014-03-10 22:06:52 +01:00
|
|
|
validate_certs:
|
|
|
|
description:
|
|
|
|
- If C(no), SSL certificates will not be validated. This should only be used
|
|
|
|
on personally controlled sites using self-signed certificates.
|
|
|
|
required: false
|
|
|
|
default: 'yes'
|
|
|
|
choices: ['yes', 'no']
|
2014-09-08 05:58:25 +02:00
|
|
|
timeout:
|
|
|
|
description:
|
2016-01-29 10:24:28 +01:00
|
|
|
- Timeout in seconds for URL request
|
2014-09-08 05:58:25 +02:00
|
|
|
required: false
|
|
|
|
default: 10
|
|
|
|
version_added: '1.8'
|
2015-05-06 00:31:02 +02:00
|
|
|
headers:
|
|
|
|
description:
|
2015-07-27 20:20:38 +02:00
|
|
|
- 'Add custom HTTP headers to a request in the format "key:value,key:value"'
|
|
|
|
required: false
|
|
|
|
default: null
|
|
|
|
version_added: '2.0'
|
2014-04-23 20:44:36 +02:00
|
|
|
url_username:
|
|
|
|
description:
|
|
|
|
- The username for use in HTTP basic authentication. This parameter can be used
|
|
|
|
without C(url_password) for sites that allow empty passwords.
|
|
|
|
required: false
|
|
|
|
version_added: '1.6'
|
|
|
|
url_password:
|
|
|
|
description:
|
2015-07-27 20:20:38 +02:00
|
|
|
- The password for use in HTTP basic authentication. If the C(url_username)
|
|
|
|
parameter is not specified, the C(url_password) parameter will not be used.
|
2014-04-23 20:44:36 +02:00
|
|
|
required: false
|
|
|
|
version_added: '1.6'
|
2014-10-07 12:04:34 +02:00
|
|
|
force_basic_auth:
|
2015-07-19 23:42:12 +02:00
|
|
|
version_added: '2.0'
|
2014-10-07 12:04:34 +02:00
|
|
|
description:
|
|
|
|
- httplib2, the library used by the uri module only sends authentication information when a webservice
|
|
|
|
responds to an initial request with a 401 status. Since some basic auth services do not properly
|
|
|
|
send a 401, logins will fail. This option forces the sending of the Basic authentication header
|
|
|
|
upon initial request.
|
|
|
|
required: false
|
|
|
|
choices: [ "yes", "no" ]
|
|
|
|
default: "no"
|
2012-09-28 03:06:31 +02:00
|
|
|
others:
|
|
|
|
description:
|
|
|
|
- all arguments accepted by the M(file) module also work here
|
|
|
|
required: false
|
2012-09-17 15:11:04 +02:00
|
|
|
# informational: requirements for nodes
|
2015-06-24 15:51:00 +02:00
|
|
|
requirements: [ ]
|
2015-06-15 21:53:30 +02:00
|
|
|
author: "Jan-Piet Mens (@jpmens)"
|
2012-09-17 15:11:04 +02:00
|
|
|
'''
|
|
|
|
|
2013-02-23 18:03:33 +01:00
|
|
|
EXAMPLES='''
|
2013-06-18 20:46:57 +02:00
|
|
|
- name: download foo.conf
|
|
|
|
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf mode=0440
|
2013-06-05 07:28:28 +02:00
|
|
|
|
2014-10-07 12:04:34 +02:00
|
|
|
- name: download file and force basic auth
|
|
|
|
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf force_basic_auth=yes
|
2015-05-06 00:31:02 +02:00
|
|
|
|
|
|
|
- name: download file with custom HTTP headers
|
2015-09-09 23:38:05 +02:00
|
|
|
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf headers='key:value,key:value'
|
2014-10-06 09:53:35 +02:00
|
|
|
|
|
|
|
- name: download file with check
|
|
|
|
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf checksum=sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
|
|
|
|
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf checksum=md5:66dffb5228a211e61d6d7ef4a86f5758
|
2016-04-21 22:28:29 +02:00
|
|
|
|
|
|
|
- name: download file from a file path
|
|
|
|
get_url: url="file:///tmp/afile.txt" dest=/tmp/afilecopy.txt
|
2013-02-23 18:03:33 +01:00
|
|
|
'''
|
|
|
|
|
2016-06-09 19:25:17 +02:00
|
|
|
from ansible.module_utils.six.moves.urllib.parse import urlsplit
|
2015-06-24 15:51:00 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
# ==============================================================
|
|
|
|
# url handling
|
|
|
|
|
|
|
|
def url_filename(url):
|
2016-06-09 19:25:17 +02:00
|
|
|
fn = os.path.basename(urlsplit(url)[2])
|
2012-07-22 17:08:16 +02:00
|
|
|
if fn == '':
|
|
|
|
return 'index.html'
|
|
|
|
return fn
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2015-08-06 22:37:48 +02:00
|
|
|
def url_get(module, url, dest, use_proxy, last_mod_time, force, timeout=10, headers=None, tmp_dest=''):
|
2012-07-22 17:08:16 +02:00
|
|
|
"""
|
2013-11-02 17:23:59 +01:00
|
|
|
Download data from the url and store in a temporary file.
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
Return (tempfile, info about the request)
|
2012-07-20 12:57:15 +02:00
|
|
|
"""
|
|
|
|
|
2015-05-06 00:31:02 +02:00
|
|
|
rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout, headers=headers)
|
2012-07-20 12:57:15 +02:00
|
|
|
|
|
|
|
if info['status'] == 304:
|
2012-11-06 23:32:49 +01:00
|
|
|
module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', ''))
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2014-11-07 06:25:55 +01:00
|
|
|
# create a temporary file and copy content to do checksum-based replacement
|
2016-04-21 22:28:29 +02:00
|
|
|
if info['status'] != 200 and not url.startswith('file:/'):
|
2012-11-06 23:32:49 +01:00
|
|
|
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], url=url, dest=dest)
|
2012-07-22 17:08:16 +02:00
|
|
|
|
2015-08-06 22:37:48 +02:00
|
|
|
if tmp_dest != '':
|
|
|
|
# tmp_dest should be an existing dir
|
|
|
|
tmp_dest_is_dir = os.path.isdir(tmp_dest)
|
|
|
|
if not tmp_dest_is_dir:
|
|
|
|
if os.path.exists(tmp_dest):
|
|
|
|
module.fail_json(msg="%s is a file but should be a directory." % tmp_dest)
|
|
|
|
else:
|
|
|
|
module.fail_json(msg="%s directoy does not exist." % tmp_dest)
|
|
|
|
|
|
|
|
fd, tempname = tempfile.mkstemp(dir=tmp_dest)
|
|
|
|
else:
|
|
|
|
fd, tempname = tempfile.mkstemp()
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
f = os.fdopen(fd, 'wb')
|
|
|
|
try:
|
2014-03-10 22:06:52 +01:00
|
|
|
shutil.copyfileobj(rsp, f)
|
2016-05-18 18:08:30 +02:00
|
|
|
except Exception:
|
|
|
|
err = get_exception()
|
2012-07-22 17:08:16 +02:00
|
|
|
os.remove(tempname)
|
|
|
|
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
|
|
|
|
f.close()
|
2014-03-10 22:06:52 +01:00
|
|
|
rsp.close()
|
2012-07-22 17:08:16 +02:00
|
|
|
return tempname, info
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
def extract_filename_from_headers(headers):
|
|
|
|
"""
|
|
|
|
Extracts a filename from the given dict of HTTP headers.
|
|
|
|
|
|
|
|
Looks for the content-disposition header and applies a regex.
|
|
|
|
Returns the filename if successful, else None."""
|
2014-08-23 05:10:45 +02:00
|
|
|
cont_disp_regex = 'attachment; ?filename="?([^"]+)'
|
2013-11-02 17:23:59 +01:00
|
|
|
res = None
|
|
|
|
|
|
|
|
if 'content-disposition' in headers:
|
|
|
|
cont_disp = headers['content-disposition']
|
|
|
|
match = re.match(cont_disp_regex, cont_disp)
|
|
|
|
if match:
|
|
|
|
res = match.group(1)
|
|
|
|
# Try preventing any funny business.
|
|
|
|
res = os.path.basename(res)
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
2014-10-06 09:53:35 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
# ==============================================================
|
|
|
|
# main
|
|
|
|
|
|
|
|
def main():
|
2014-03-10 22:06:52 +01:00
|
|
|
argument_spec = url_argument_spec()
|
|
|
|
argument_spec.update(
|
2014-04-04 22:36:38 +02:00
|
|
|
url = dict(required=True),
|
2014-03-10 22:06:52 +01:00
|
|
|
dest = dict(required=True),
|
2016-01-11 16:16:19 +01:00
|
|
|
backup = dict(default=False, type='bool'),
|
2014-03-10 22:06:52 +01:00
|
|
|
sha256sum = dict(default=''),
|
2014-10-06 09:53:35 +02:00
|
|
|
checksum = dict(default=''),
|
2014-09-08 05:58:25 +02:00
|
|
|
timeout = dict(required=False, type='int', default=10),
|
2015-05-06 00:31:02 +02:00
|
|
|
headers = dict(required=False, default=None),
|
2015-08-06 22:37:48 +02:00
|
|
|
tmp_dest = dict(required=False, default=''),
|
2014-03-10 22:06:52 +01:00
|
|
|
)
|
2012-07-22 17:08:16 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
module = AnsibleModule(
|
2012-08-02 01:42:31 +02:00
|
|
|
# not checking because of daisy chain to file module
|
2014-03-10 22:06:52 +01:00
|
|
|
argument_spec = argument_spec,
|
2012-10-21 04:51:36 +02:00
|
|
|
add_file_common_args=True
|
2012-07-20 12:57:15 +02:00
|
|
|
)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
url = module.params['url']
|
|
|
|
dest = os.path.expanduser(module.params['dest'])
|
2016-01-11 16:16:19 +01:00
|
|
|
backup = module.params['backup']
|
2013-02-23 19:59:52 +01:00
|
|
|
force = module.params['force']
|
2013-06-05 07:28:28 +02:00
|
|
|
sha256sum = module.params['sha256sum']
|
2014-10-06 09:53:35 +02:00
|
|
|
checksum = module.params['checksum']
|
2013-03-02 08:30:28 +01:00
|
|
|
use_proxy = module.params['use_proxy']
|
2014-09-08 05:58:25 +02:00
|
|
|
timeout = module.params['timeout']
|
2015-08-06 22:37:48 +02:00
|
|
|
tmp_dest = os.path.expanduser(module.params['tmp_dest'])
|
|
|
|
|
2015-05-06 00:31:02 +02:00
|
|
|
# Parse headers to dict
|
|
|
|
if module.params['headers']:
|
|
|
|
try:
|
|
|
|
headers = dict(item.split(':') for item in module.params['headers'].split(','))
|
|
|
|
except:
|
|
|
|
module.fail_json(msg="The header parameter requires a key:value,key:value syntax to be properly parsed.")
|
|
|
|
else:
|
|
|
|
headers = None
|
2012-08-21 00:39:37 +02:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
dest_is_dir = os.path.isdir(dest)
|
|
|
|
last_mod_time = None
|
2012-11-06 23:32:49 +01:00
|
|
|
|
2014-10-06 09:53:35 +02:00
|
|
|
# workaround for usage of deprecated sha256sum parameter
|
|
|
|
if sha256sum != '':
|
|
|
|
checksum = 'sha256:%s' % (sha256sum)
|
|
|
|
|
|
|
|
# checksum specified, parse for algorithm and checksum
|
|
|
|
if checksum != '':
|
|
|
|
try:
|
|
|
|
algorithm, checksum = checksum.rsplit(':', 1)
|
|
|
|
# Remove any non-alphanumeric characters, including the infamous
|
|
|
|
# Unicode zero-width space
|
|
|
|
checksum = re.sub(r'\W+', '', checksum).lower()
|
|
|
|
# Ensure the checksum portion is a hexdigest
|
|
|
|
int(checksum, 16)
|
|
|
|
except ValueError:
|
|
|
|
module.fail_json(msg="The checksum parameter has to be in format <algorithm>:<checksum>")
|
2015-01-22 16:51:09 +01:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
if not dest_is_dir and os.path.exists(dest):
|
2015-01-22 16:51:09 +01:00
|
|
|
checksum_mismatch = False
|
|
|
|
|
|
|
|
# If the download is not forced and there is a checksum, allow
|
|
|
|
# checksum match to skip the download.
|
2014-10-06 09:53:35 +02:00
|
|
|
if not force and checksum != '':
|
|
|
|
destination_checksum = module.digest_from_file(dest, algorithm)
|
2015-01-22 16:51:09 +01:00
|
|
|
|
2014-10-06 09:53:35 +02:00
|
|
|
if checksum == destination_checksum:
|
2015-01-22 16:51:09 +01:00
|
|
|
module.exit_json(msg="file already exists", dest=dest, url=url, changed=False)
|
|
|
|
|
|
|
|
checksum_mismatch = True
|
|
|
|
|
2014-10-06 09:53:35 +02:00
|
|
|
# Not forcing redownload, unless checksum does not match
|
2015-01-22 16:51:09 +01:00
|
|
|
if not force and not checksum_mismatch:
|
2016-03-20 15:34:59 +01:00
|
|
|
# allow file attribute changes
|
|
|
|
module.params['path'] = dest
|
|
|
|
file_args = module.load_file_common_arguments(module.params)
|
|
|
|
file_args['path'] = dest
|
|
|
|
changed = module.set_fs_attributes_if_different(file_args, False)
|
|
|
|
|
|
|
|
if changed:
|
|
|
|
module.exit_json(msg="file already exists but file attributes changed", dest=dest, url=url, changed=changed)
|
|
|
|
module.exit_json(msg="file already exists", dest=dest, url=url, changed=changed)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
# If the file already exists, prepare the last modified time for the
|
|
|
|
# request.
|
|
|
|
mtime = os.path.getmtime(dest)
|
|
|
|
last_mod_time = datetime.datetime.utcfromtimestamp(mtime)
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
# download to tmpsrc
|
2015-08-06 22:37:48 +02:00
|
|
|
tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest)
|
2013-11-02 17:23:59 +01:00
|
|
|
|
|
|
|
# Now the request has completed, we can finally generate the final
|
|
|
|
# destination file name from the info dict.
|
2013-11-09 00:35:14 +01:00
|
|
|
|
2013-11-02 17:23:59 +01:00
|
|
|
if dest_is_dir:
|
|
|
|
filename = extract_filename_from_headers(info)
|
|
|
|
if not filename:
|
|
|
|
# Fall back to extracting the filename from the URL.
|
2013-11-09 00:35:14 +01:00
|
|
|
# Pluck the URL from the info, since a redirect could have changed
|
|
|
|
# it.
|
|
|
|
filename = url_filename(info['url'])
|
2013-11-02 17:23:59 +01:00
|
|
|
dest = os.path.join(dest, filename)
|
|
|
|
|
2014-11-07 06:25:55 +01:00
|
|
|
checksum_src = None
|
|
|
|
checksum_dest = None
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
# raise an error if there is no tmpsrc file
|
|
|
|
if not os.path.exists(tmpsrc):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'])
|
|
|
|
if not os.access(tmpsrc, os.R_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Source %s not readable" % (tmpsrc))
|
2014-11-07 06:25:55 +01:00
|
|
|
checksum_src = module.sha1(tmpsrc)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
# check if there is no dest file
|
|
|
|
if os.path.exists(dest):
|
|
|
|
# raise an error if copy has no permission on dest
|
|
|
|
if not os.access(dest, os.W_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Destination %s not writable" % (dest))
|
|
|
|
if not os.access(dest, os.R_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Destination %s not readable" % (dest))
|
2014-11-07 06:25:55 +01:00
|
|
|
checksum_dest = module.sha1(dest)
|
2012-07-20 12:57:15 +02:00
|
|
|
else:
|
|
|
|
if not os.access(os.path.dirname(dest), os.W_OK):
|
|
|
|
os.remove(tmpsrc)
|
|
|
|
module.fail_json( msg="Destination %s not writable" % (os.path.dirname(dest)))
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2016-01-11 16:16:19 +01:00
|
|
|
backup_file = None
|
2014-11-07 06:25:55 +01:00
|
|
|
if checksum_src != checksum_dest:
|
2012-07-20 12:57:15 +02:00
|
|
|
try:
|
2016-01-11 16:16:19 +01:00
|
|
|
if backup:
|
|
|
|
if os.path.exists(dest):
|
|
|
|
backup_file = module.backup_local(dest)
|
2012-07-20 12:57:15 +02:00
|
|
|
shutil.copyfile(tmpsrc, dest)
|
2016-05-18 18:08:30 +02:00
|
|
|
except Exception:
|
|
|
|
err = get_exception()
|
2012-07-20 12:57:15 +02:00
|
|
|
os.remove(tmpsrc)
|
2012-08-07 02:07:02 +02:00
|
|
|
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
|
2012-07-20 12:57:15 +02:00
|
|
|
changed = True
|
|
|
|
else:
|
|
|
|
changed = False
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2014-10-06 09:53:35 +02:00
|
|
|
if checksum != '':
|
|
|
|
destination_checksum = module.digest_from_file(dest, algorithm)
|
2013-09-27 22:37:21 +02:00
|
|
|
|
2014-10-06 09:53:35 +02:00
|
|
|
if checksum != destination_checksum:
|
2013-06-05 07:28:28 +02:00
|
|
|
os.remove(dest)
|
2014-10-06 09:53:35 +02:00
|
|
|
module.fail_json(msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum))
|
2013-06-05 07:28:28 +02:00
|
|
|
|
2012-07-20 12:57:15 +02:00
|
|
|
os.remove(tmpsrc)
|
2012-07-22 17:08:16 +02:00
|
|
|
|
2012-10-21 04:51:36 +02:00
|
|
|
# allow file attribute changes
|
|
|
|
module.params['path'] = dest
|
|
|
|
file_args = module.load_file_common_arguments(module.params)
|
|
|
|
file_args['path'] = dest
|
2014-03-19 03:39:45 +01:00
|
|
|
changed = module.set_fs_attributes_if_different(file_args, changed)
|
2012-10-21 04:51:36 +02:00
|
|
|
|
2014-11-07 06:25:55 +01:00
|
|
|
# Backwards compat only. We'll return None on FIPS enabled systems
|
|
|
|
try:
|
|
|
|
md5sum = module.md5(dest)
|
|
|
|
except ValueError:
|
|
|
|
md5sum = None
|
|
|
|
|
2016-01-11 16:16:19 +01:00
|
|
|
res_args = dict(
|
|
|
|
url = url, dest = dest, src = tmpsrc, md5sum = md5sum, checksum_src = checksum_src,
|
|
|
|
checksum_dest = checksum_dest, changed = changed, msg = info.get('msg', '')
|
|
|
|
)
|
|
|
|
if backup_file:
|
|
|
|
res_args['backup_file'] = backup_file
|
|
|
|
|
2012-07-22 17:08:16 +02:00
|
|
|
# Mission complete
|
2016-01-11 16:16:19 +01:00
|
|
|
module.exit_json(**res_args)
|
2012-07-20 12:57:15 +02:00
|
|
|
|
2013-12-02 21:13:49 +01:00
|
|
|
# import module snippets
|
2013-12-02 21:11:23 +01:00
|
|
|
from ansible.module_utils.basic import *
|
2014-03-10 22:06:52 +01:00
|
|
|
from ansible.module_utils.urls import *
|
2015-06-24 15:51:00 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|