2013-10-21 22:22:42 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
|
|
|
# (c) 2013, Dylan Martin <dmartin@seattlecentral.edu>
|
2015-04-15 18:24:45 +02:00
|
|
|
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
|
2016-04-18 22:31:06 +02:00
|
|
|
# (c) 2016, Dag Wieers <dag@wieers.com>
|
2013-10-21 22:22:42 +02:00
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2013-10-21 23:10:55 +02:00
|
|
|
DOCUMENTATION = '''
|
|
|
|
---
|
|
|
|
module: unarchive
|
2013-11-19 00:55:49 +01:00
|
|
|
version_added: 1.4
|
2015-02-17 14:48:59 +01:00
|
|
|
short_description: Unpacks an archive after (optionally) copying it from the local machine.
|
2014-10-02 03:30:25 +02:00
|
|
|
extends_documentation_fragment: files
|
2013-10-21 23:10:55 +02:00
|
|
|
description:
|
2016-07-14 18:24:23 +02:00
|
|
|
- The M(unarchive) module unpacks an archive. By default, it will copy the source file from the local system to the target before unpacking - set remote_src=yes to unpack an archive which already exists on the target..
|
2013-10-21 23:10:55 +02:00
|
|
|
options:
|
|
|
|
src:
|
|
|
|
description:
|
2016-07-14 18:24:23 +02:00
|
|
|
- If remote_src=no (default), local path to archive file to copy to the target server; can be absolute or relative. If remote_src=yes, path on the target server to existing archive file to unpack.
|
|
|
|
- If remote_src=yes and src contains ://, the remote machine will download the file from the url first. (version_added 2.0)
|
2013-10-21 23:10:55 +02:00
|
|
|
required: true
|
|
|
|
default: null
|
|
|
|
dest:
|
|
|
|
description:
|
|
|
|
- Remote absolute path where the archive should be unpacked
|
|
|
|
required: true
|
|
|
|
default: null
|
2013-12-03 02:51:10 +01:00
|
|
|
copy:
|
|
|
|
description:
|
2015-02-17 14:48:59 +01:00
|
|
|
- "If true, the file is copied from local 'master' to the target machine, otherwise, the plugin will look for src archive at the target machine."
|
2016-07-14 18:24:23 +02:00
|
|
|
- "This option has been deprecated in favor of C(remote_src)"
|
|
|
|
- "This option is mutually exclusive with C(remote_src)."
|
2013-12-03 02:51:10 +01:00
|
|
|
required: false
|
|
|
|
choices: [ "yes", "no" ]
|
|
|
|
default: "yes"
|
2014-03-08 01:19:46 +01:00
|
|
|
creates:
|
|
|
|
description:
|
|
|
|
- a filename, when it already exists, this step will B(not) be run.
|
|
|
|
required: no
|
|
|
|
default: null
|
|
|
|
version_added: "1.6"
|
2015-04-15 18:24:45 +02:00
|
|
|
list_files:
|
|
|
|
description:
|
|
|
|
- If set to True, return the list of files that are contained in the tarball.
|
|
|
|
required: false
|
|
|
|
choices: [ "yes", "no" ]
|
|
|
|
default: "no"
|
|
|
|
version_added: "2.0"
|
2016-04-18 22:31:06 +02:00
|
|
|
exclude:
|
|
|
|
description:
|
|
|
|
- List the directory and file entries that you would like to exclude from the unarchive action.
|
|
|
|
required: false
|
|
|
|
default: []
|
|
|
|
version_added: "2.1"
|
|
|
|
keep_newer:
|
|
|
|
description:
|
|
|
|
- Do not replace existing files that are newer than files from the archive.
|
|
|
|
required: false
|
|
|
|
default: no
|
|
|
|
version_added: "2.1"
|
|
|
|
extra_opts:
|
|
|
|
description:
|
|
|
|
- Specify additional options by passing in an array.
|
|
|
|
default:
|
|
|
|
required: false
|
|
|
|
version_added: "2.1"
|
2016-07-14 18:24:23 +02:00
|
|
|
remote_src:
|
|
|
|
description:
|
|
|
|
- "Set to C(yes) to indicate the archived file is already on the remote system and not local to the Ansible controller."
|
|
|
|
- "This option is mutually exclusive with C(copy)."
|
2016-07-14 20:05:00 +02:00
|
|
|
required: false
|
|
|
|
default: "no"
|
|
|
|
choices: ["yes", "no"]
|
|
|
|
version_added: "2.2"
|
2016-07-14 18:24:23 +02:00
|
|
|
validate_certs:
|
|
|
|
description:
|
|
|
|
- This only applies if using a https url as the source of the file.
|
|
|
|
- This should only set to C(no) used on personally controlled sites using self-signed cer
|
|
|
|
- Prior to 2.2 the code worked as if this was set to C(yes).
|
|
|
|
required: false
|
|
|
|
default: "yes"
|
|
|
|
choices: ["yes", "no"]
|
|
|
|
version_added: "2.2"
|
2016-04-18 22:31:06 +02:00
|
|
|
author: "Dag Wieers (@dagwieers)"
|
2013-10-21 23:10:55 +02:00
|
|
|
todo:
|
2016-04-18 22:31:06 +02:00
|
|
|
- re-implement tar support using native tarfile module
|
|
|
|
- re-implement zip support using native zipfile module
|
2013-10-21 23:10:55 +02:00
|
|
|
notes:
|
2016-04-18 22:31:06 +02:00
|
|
|
- requires C(gtar)/C(unzip) command on target host
|
2016-08-11 14:53:37 +02:00
|
|
|
- can handle I(.zip) files using C(unzip) as well as I(.tar), I(.tar.gz), I(.tar.bz2) and I(.tar.xz) files using C(gtar)
|
2016-04-18 22:31:06 +02:00
|
|
|
- uses gtar's C(--diff arg) to calculate if changed or not. If this C(arg) is not
|
2013-10-21 23:10:55 +02:00
|
|
|
supported, it will always unpack the archive
|
2013-11-22 00:22:20 +01:00
|
|
|
- existing files/directories in the destination which are not in the archive
|
2013-10-21 23:10:55 +02:00
|
|
|
are not touched. This is the same behavior as a normal archive extraction
|
2013-11-22 00:22:20 +01:00
|
|
|
- existing files/directories in the destination which are not in the archive
|
2013-10-21 23:10:55 +02:00
|
|
|
are ignored for purposes of deciding if the archive should be unpacked or not
|
|
|
|
'''
|
|
|
|
|
|
|
|
EXAMPLES = '''
|
|
|
|
# Example from Ansible Playbooks
|
|
|
|
- unarchive: src=foo.tgz dest=/var/lib/foo
|
2014-07-19 05:22:37 +02:00
|
|
|
|
2014-07-28 18:07:31 +02:00
|
|
|
# Unarchive a file that is already on the remote machine
|
2016-07-14 18:24:23 +02:00
|
|
|
- unarchive: src=/tmp/foo.zip dest=/usr/local/bin remote_src=yes
|
2015-06-01 01:03:35 +02:00
|
|
|
|
2015-07-14 16:18:37 +02:00
|
|
|
# Unarchive a file that needs to be downloaded (added in 2.0)
|
2016-07-14 18:24:23 +02:00
|
|
|
- unarchive: src=https://example.com/example.zip dest=/usr/local/bin remote_src=yes
|
2013-10-21 23:10:55 +02:00
|
|
|
'''
|
|
|
|
|
2015-04-15 14:05:41 +02:00
|
|
|
import re
|
2013-10-21 22:22:42 +02:00
|
|
|
import os
|
2016-04-18 22:31:06 +02:00
|
|
|
import stat
|
|
|
|
import pwd
|
|
|
|
import grp
|
|
|
|
import datetime
|
|
|
|
import time
|
|
|
|
import binascii
|
2016-07-06 23:40:48 +02:00
|
|
|
import codecs
|
2016-05-27 01:40:50 +02:00
|
|
|
from zipfile import ZipFile, BadZipfile
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2016-08-25 15:25:21 +02:00
|
|
|
try: # python 3.3+
|
|
|
|
from shlex import quote
|
|
|
|
except ImportError: # older python
|
|
|
|
from pipes import quote
|
|
|
|
|
2015-04-15 14:05:41 +02:00
|
|
|
# String from tar that shows the tar contents are different from the
|
|
|
|
# filesystem
|
2016-04-18 22:31:06 +02:00
|
|
|
OWNER_DIFF_RE = re.compile(r': Uid differs$')
|
|
|
|
GROUP_DIFF_RE = re.compile(r': Gid differs$')
|
|
|
|
MODE_DIFF_RE = re.compile(r': Mode differs$')
|
2016-06-10 23:35:08 +02:00
|
|
|
MOD_TIME_DIFF_RE = re.compile(r': Mod time differs$')
|
2016-04-18 22:31:06 +02:00
|
|
|
#NEWER_DIFF_RE = re.compile(r' is newer or same age.$')
|
2016-08-25 15:25:21 +02:00
|
|
|
EMPTY_FILE_RE = re.compile(r': : Warning: Cannot stat: No such file or directory$')
|
2016-04-18 22:31:06 +02:00
|
|
|
MISSING_FILE_RE = re.compile(r': Warning: Cannot stat: No such file or directory$')
|
2016-08-10 23:54:29 +02:00
|
|
|
ZIP_FILE_MODE_RE = re.compile(r'([r-][w-][SsTtx-]){3}')
|
2015-06-24 15:48:57 +02:00
|
|
|
# When downloading an archive, how much of the archive to download before
|
|
|
|
# saving to a tempfile (64k)
|
|
|
|
BUFSIZE = 65536
|
2015-04-15 14:05:41 +02:00
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
def crc32(path):
|
2016-08-25 15:25:21 +02:00
|
|
|
''' Return a CRC32 checksum of a file '''
|
2016-04-18 22:31:06 +02:00
|
|
|
return binascii.crc32(open(path).read()) & 0xffffffff
|
|
|
|
|
2016-08-25 15:25:21 +02:00
|
|
|
def shell_escape(string):
|
|
|
|
''' Quote meta-characters in the args for the unix shell '''
|
|
|
|
return re.sub(r'([^A-Za-z0-9_])', r'\\\1', string)
|
|
|
|
|
2015-01-14 22:10:13 +01:00
|
|
|
class UnarchiveError(Exception):
|
|
|
|
pass
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
# class to handle .zip files
|
2015-01-22 01:14:11 +01:00
|
|
|
class ZipArchive(object):
|
2015-01-14 22:10:13 +01:00
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
def __init__(self, src, dest, file_args, module):
|
2013-10-21 22:22:42 +02:00
|
|
|
self.src = src
|
|
|
|
self.dest = dest
|
2016-04-18 22:31:06 +02:00
|
|
|
self.file_args = file_args
|
|
|
|
self.opts = module.params['extra_opts']
|
2013-10-21 22:22:42 +02:00
|
|
|
self.module = module
|
2016-04-18 22:31:06 +02:00
|
|
|
self.excludes = module.params['exclude']
|
|
|
|
self.includes = []
|
2014-04-16 03:38:39 +02:00
|
|
|
self.cmd_path = self.module.get_bin_path('unzip')
|
2015-01-14 22:10:13 +01:00
|
|
|
self._files_in_archive = []
|
2016-04-18 22:31:06 +02:00
|
|
|
self._infodict = dict()
|
|
|
|
|
|
|
|
def _permstr_to_octal(self, modestr, umask):
|
|
|
|
''' Convert a Unix permission string (rw-r--r--) into a mode (0644) '''
|
|
|
|
revstr = modestr[::-1]
|
|
|
|
mode = 0
|
|
|
|
for j in range(0, 3):
|
|
|
|
for i in range(0, 3):
|
|
|
|
if revstr[i+3*j] in ['r', 'w', 'x', 's', 't']:
|
|
|
|
mode += 2**(i+3*j)
|
|
|
|
# The unzip utility does not support setting the stST bits
|
|
|
|
# if revstr[i+3*j] in ['s', 't', 'S', 'T' ]:
|
|
|
|
# mode += 2**(9+j)
|
|
|
|
return ( mode & ~umask )
|
|
|
|
|
2016-05-27 01:40:50 +02:00
|
|
|
def _legacy_file_list(self, force_refresh=False):
|
|
|
|
unzip_bin = self.module.get_bin_path('unzip')
|
|
|
|
if not unzip_bin:
|
|
|
|
raise UnarchiveError('Python Zipfile cannot read %s and unzip not found' % self.src)
|
|
|
|
|
|
|
|
rc, out, err = self.module.run_command([unzip_bin, '-v', self.src])
|
|
|
|
if rc:
|
|
|
|
raise UnarchiveError('Neither python zipfile nor unzip can read %s' % self.src)
|
|
|
|
|
|
|
|
for line in out.splitlines()[3:-2]:
|
|
|
|
fields = line.split(None, 7)
|
|
|
|
self._files_in_archive.append(fields[7])
|
|
|
|
self._infodict[fields[7]] = long(fields[6])
|
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
def _crc32(self, path):
|
|
|
|
if self._infodict:
|
|
|
|
return self._infodict[path]
|
|
|
|
|
|
|
|
try:
|
2016-05-27 01:40:50 +02:00
|
|
|
archive = ZipFile(self.src)
|
|
|
|
except BadZipfile:
|
|
|
|
e = get_exception()
|
|
|
|
if e.args[0].lower().startswith('bad magic number'):
|
|
|
|
# Python2.4 can't handle zipfiles with > 64K files. Try using
|
|
|
|
# /usr/bin/unzip instead
|
|
|
|
self._legacy_file_list()
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
for item in archive.infolist():
|
|
|
|
self._infodict[item.filename] = long(item.CRC)
|
|
|
|
except:
|
|
|
|
archive.close()
|
|
|
|
raise UnarchiveError('Unable to list files in the archive')
|
2016-04-18 22:31:06 +02:00
|
|
|
|
|
|
|
return self._infodict[path]
|
2015-01-14 22:10:13 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def files_in_archive(self, force_refresh=False):
|
|
|
|
if self._files_in_archive and not force_refresh:
|
|
|
|
return self._files_in_archive
|
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
self._files_in_archive = []
|
2015-01-14 22:10:13 +01:00
|
|
|
try:
|
2016-05-27 01:40:50 +02:00
|
|
|
archive = ZipFile(self.src)
|
|
|
|
except BadZipfile:
|
|
|
|
e = get_exception()
|
|
|
|
if e.args[0].lower().startswith('bad magic number'):
|
|
|
|
# Python2.4 can't handle zipfiles with > 64K files. Try using
|
|
|
|
# /usr/bin/unzip instead
|
|
|
|
self._legacy_file_list(force_refresh)
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
for member in archive.namelist():
|
|
|
|
if member not in self.excludes:
|
|
|
|
self._files_in_archive.append(member)
|
|
|
|
except:
|
|
|
|
archive.close()
|
|
|
|
raise UnarchiveError('Unable to list files in the archive')
|
2015-01-14 22:10:13 +01:00
|
|
|
|
2016-05-27 01:40:50 +02:00
|
|
|
archive.close()
|
2015-01-14 22:10:13 +01:00
|
|
|
return self._files_in_archive
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
def is_unarchived(self):
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd = [ self.cmd_path, '-ZT', '-s', self.src ]
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.excludes:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend([ ' -x ', ] + self.excludes)
|
2016-04-18 22:31:06 +02:00
|
|
|
rc, out, err = self.module.run_command(cmd)
|
|
|
|
|
|
|
|
old_out = out
|
|
|
|
diff = ''
|
|
|
|
out = ''
|
|
|
|
if rc == 0:
|
|
|
|
unarchived = True
|
|
|
|
else:
|
|
|
|
unarchived = False
|
|
|
|
|
|
|
|
# Get some information related to user/group ownership
|
|
|
|
umask = os.umask(0)
|
|
|
|
os.umask(umask)
|
|
|
|
|
|
|
|
# Get current user and group information
|
|
|
|
groups = os.getgroups()
|
|
|
|
run_uid = os.getuid()
|
|
|
|
run_gid = os.getgid()
|
|
|
|
try:
|
|
|
|
run_owner = pwd.getpwuid(run_uid).pw_name
|
|
|
|
except:
|
|
|
|
run_owner = run_uid
|
|
|
|
try:
|
|
|
|
run_group = grp.getgrgid(run_gid).gr_name
|
|
|
|
except:
|
|
|
|
run_group = run_gid
|
|
|
|
|
|
|
|
# Get future user ownership
|
|
|
|
fut_owner = fut_uid = None
|
|
|
|
if self.file_args['owner']:
|
|
|
|
try:
|
|
|
|
tpw = pwd.getpwname(self.file_args['owner'])
|
|
|
|
except:
|
|
|
|
try:
|
|
|
|
tpw = pwd.getpwuid(self.file_args['owner'])
|
|
|
|
except:
|
|
|
|
tpw = pwd.getpwuid(run_uid)
|
|
|
|
fut_owner = tpw.pw_name
|
|
|
|
fut_uid = tpw.pw_uid
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
fut_owner = run_owner
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
fut_uid = run_uid
|
|
|
|
|
|
|
|
# Get future group ownership
|
|
|
|
fut_group = fut_gid = None
|
|
|
|
if self.file_args['group']:
|
|
|
|
try:
|
|
|
|
tgr = grp.getgrnam(self.file_args['group'])
|
|
|
|
except:
|
|
|
|
try:
|
|
|
|
tgr = grp.getgrgid(self.file_args['group'])
|
|
|
|
except:
|
|
|
|
tgr = grp.getgrgid(run_gid)
|
|
|
|
fut_group = tgr.gr_name
|
|
|
|
fut_gid = tgr.gr_gid
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
fut_group = run_group
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
fut_gid = run_gid
|
|
|
|
|
|
|
|
for line in old_out.splitlines():
|
|
|
|
change = False
|
|
|
|
|
2016-06-10 17:05:22 +02:00
|
|
|
pcs = line.split(None, 7)
|
2016-06-27 22:18:12 +02:00
|
|
|
if len(pcs) != 8:
|
|
|
|
# Too few fields... probably a piece of the header or footer
|
|
|
|
continue
|
2016-06-10 17:05:22 +02:00
|
|
|
|
|
|
|
# Check first and seventh field in order to skip header/footer
|
|
|
|
if len(pcs[0]) != 7 and len(pcs[0]) != 10: continue
|
|
|
|
if len(pcs[6]) != 15: continue
|
2016-04-18 22:31:06 +02:00
|
|
|
|
2016-06-27 22:18:12 +02:00
|
|
|
if pcs[0][0] not in 'dl-?' or not frozenset(pcs[0][1:]).issubset('rwxst-'):
|
|
|
|
continue
|
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
ztype = pcs[0][0]
|
2016-06-27 22:18:12 +02:00
|
|
|
permstr = pcs[0][1:]
|
2016-06-10 17:05:22 +02:00
|
|
|
version = pcs[1]
|
|
|
|
ostype = pcs[2]
|
2016-04-18 22:31:06 +02:00
|
|
|
size = int(pcs[3])
|
|
|
|
path = pcs[7]
|
|
|
|
|
|
|
|
# Skip excluded files
|
|
|
|
if path in self.excludes:
|
|
|
|
out += 'Path %s is excluded on request\n' % path
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Itemized change requires L for symlink
|
|
|
|
if path[-1] == '/':
|
|
|
|
if ztype != 'd':
|
|
|
|
err += 'Path %s incorrectly tagged as "%s", but is a directory.\n' % (path, ztype)
|
|
|
|
ftype = 'd'
|
|
|
|
elif ztype == 'l':
|
|
|
|
ftype = 'L'
|
|
|
|
elif ztype == '-':
|
|
|
|
ftype = 'f'
|
|
|
|
elif ztype == '?':
|
|
|
|
ftype = 'f'
|
|
|
|
|
|
|
|
# Some files may be storing FAT permissions, not Unix permissions
|
|
|
|
if len(permstr) == 6:
|
|
|
|
if path[-1] == '/':
|
|
|
|
permstr = 'rwxrwxrwx'
|
|
|
|
elif permstr == 'rwx---':
|
|
|
|
permstr = 'rwxrwxrwx'
|
|
|
|
else:
|
|
|
|
permstr = 'rw-rw-rw-'
|
|
|
|
|
|
|
|
# Test string conformity
|
|
|
|
if len(permstr) != 9 or not ZIP_FILE_MODE_RE.match(permstr):
|
|
|
|
raise UnarchiveError('ZIP info perm format incorrect, %s' % permstr)
|
|
|
|
|
|
|
|
# DEBUG
|
|
|
|
# err += "%s%s %10d %s\n" % (ztype, permstr, size, path)
|
|
|
|
|
|
|
|
dest = os.path.join(self.dest, path)
|
|
|
|
try:
|
|
|
|
st = os.lstat(dest)
|
|
|
|
except:
|
|
|
|
change = True
|
|
|
|
self.includes.append(path)
|
|
|
|
err += 'Path %s is missing\n' % path
|
|
|
|
diff += '>%s++++++.?? %s\n' % (ftype, path)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Compare file types
|
|
|
|
if ftype == 'd' and not stat.S_ISDIR(st.st_mode):
|
|
|
|
change = True
|
|
|
|
self.includes.append(path)
|
|
|
|
err += 'File %s already exists, but not as a directory\n' % path
|
|
|
|
diff += 'c%s++++++.?? %s\n' % (ftype, path)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if ftype == 'f' and not stat.S_ISREG(st.st_mode):
|
|
|
|
change = True
|
|
|
|
unarchived = False
|
|
|
|
self.includes.append(path)
|
|
|
|
err += 'Directory %s already exists, but not as a regular file\n' % path
|
|
|
|
diff += 'c%s++++++.?? %s\n' % (ftype, path)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if ftype == 'L' and not stat.S_ISLNK(st.st_mode):
|
|
|
|
change = True
|
|
|
|
self.includes.append(path)
|
|
|
|
err += 'Directory %s already exists, but not as a symlink\n' % path
|
|
|
|
diff += 'c%s++++++.?? %s\n' % (ftype, path)
|
|
|
|
continue
|
|
|
|
|
2016-05-02 20:29:04 +02:00
|
|
|
itemized = list('.%s.......??' % ftype)
|
2016-04-18 22:31:06 +02:00
|
|
|
|
2016-06-27 22:18:12 +02:00
|
|
|
# Note: this timestamp calculation has a rounding error
|
|
|
|
# somewhere... unzip and this timestamp can be one second off
|
|
|
|
# When that happens, we report a change and re-unzip the file
|
2016-04-18 22:31:06 +02:00
|
|
|
dt_object = datetime.datetime(*(time.strptime(pcs[6], '%Y%m%d.%H%M%S')[0:6]))
|
|
|
|
timestamp = time.mktime(dt_object.timetuple())
|
|
|
|
|
|
|
|
# Compare file timestamps
|
|
|
|
if stat.S_ISREG(st.st_mode):
|
|
|
|
if self.module.params['keep_newer']:
|
|
|
|
if timestamp > st.st_mtime:
|
|
|
|
change = True
|
|
|
|
self.includes.append(path)
|
|
|
|
err += 'File %s is older, replacing file\n' % path
|
|
|
|
itemized[4] = 't'
|
|
|
|
elif stat.S_ISREG(st.st_mode) and timestamp < st.st_mtime:
|
|
|
|
# Add to excluded files, ignore other changes
|
|
|
|
out += 'File %s is newer, excluding file\n' % path
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
if timestamp != st.st_mtime:
|
|
|
|
change = True
|
|
|
|
self.includes.append(path)
|
|
|
|
err += 'File %s differs in mtime (%f vs %f)\n' % (path, timestamp, st.st_mtime)
|
|
|
|
itemized[4] = 't'
|
|
|
|
|
|
|
|
# Compare file sizes
|
|
|
|
if stat.S_ISREG(st.st_mode) and size != st.st_size:
|
|
|
|
change = True
|
|
|
|
err += 'File %s differs in size (%d vs %d)\n' % (path, size, st.st_size)
|
|
|
|
itemized[3] = 's'
|
|
|
|
|
|
|
|
# Compare file checksums
|
|
|
|
if stat.S_ISREG(st.st_mode):
|
|
|
|
crc = crc32(dest)
|
|
|
|
if crc != self._crc32(path):
|
|
|
|
change = True
|
|
|
|
err += 'File %s differs in CRC32 checksum (0x%08x vs 0x%08x)\n' % (path, self._crc32(path), crc)
|
|
|
|
itemized[2] = 'c'
|
|
|
|
|
|
|
|
# Compare file permissions
|
|
|
|
|
|
|
|
# Do not handle permissions of symlinks
|
|
|
|
if ftype != 'L':
|
2016-07-07 00:16:51 +02:00
|
|
|
|
|
|
|
# Use the new mode provided with the action, if there is one
|
|
|
|
if self.file_args['mode']:
|
|
|
|
if isinstance(self.file_args['mode'], int):
|
|
|
|
mode = self.file_args['mode']
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
mode = int(self.file_args['mode'], 8)
|
|
|
|
except Exception:
|
|
|
|
e = get_exception()
|
|
|
|
self.module.fail_json(path=path, msg="mode %(mode)s must be in octal form" % self.file_args, details=str(e))
|
2016-04-18 22:31:06 +02:00
|
|
|
# Only special files require no umask-handling
|
2016-07-07 00:16:51 +02:00
|
|
|
elif ztype == '?':
|
2016-04-18 22:31:06 +02:00
|
|
|
mode = self._permstr_to_octal(permstr, 0)
|
|
|
|
else:
|
|
|
|
mode = self._permstr_to_octal(permstr, umask)
|
2016-07-07 00:16:51 +02:00
|
|
|
|
|
|
|
if mode != stat.S_IMODE(st.st_mode):
|
2016-04-18 22:31:06 +02:00
|
|
|
change = True
|
|
|
|
itemized[5] = 'p'
|
|
|
|
err += 'Path %s differs in permissions (%o vs %o)\n' % (path, mode, stat.S_IMODE(st.st_mode))
|
|
|
|
|
|
|
|
# Compare file user ownership
|
|
|
|
owner = uid = None
|
|
|
|
try:
|
|
|
|
owner = pwd.getpwuid(st.st_uid).pw_name
|
|
|
|
except:
|
|
|
|
uid = st.st_uid
|
|
|
|
|
|
|
|
# If we are not root and requested owner is not our user, fail
|
|
|
|
if run_uid != 0 and (fut_owner != run_owner or fut_uid != run_uid):
|
|
|
|
raise UnarchiveError('Cannot change ownership of %s to %s, as user %s' % (path, fut_owner, run_owner))
|
|
|
|
|
|
|
|
if owner and owner != fut_owner:
|
|
|
|
change = True
|
|
|
|
err += 'Path %s is owned by user %s, not by user %s as expected\n' % (path, owner, fut_owner)
|
|
|
|
itemized[6] = 'o'
|
|
|
|
elif uid and uid != fut_uid:
|
|
|
|
change = True
|
|
|
|
err += 'Path %s is owned by uid %s, not by uid %s as expected\n' % (path, uid, fut_uid)
|
|
|
|
itemized[6] = 'o'
|
|
|
|
|
|
|
|
# Compare file group ownership
|
|
|
|
group = gid = None
|
|
|
|
try:
|
|
|
|
group = grp.getgrgid(st.st_gid).gr_name
|
|
|
|
except:
|
|
|
|
gid = st.st_gid
|
|
|
|
|
|
|
|
if run_uid != 0 and fut_gid not in groups:
|
|
|
|
raise UnarchiveError('Cannot change group ownership of %s to %s, as user %s' % (path, fut_group, run_owner))
|
|
|
|
|
|
|
|
if group and group != fut_group:
|
|
|
|
change = True
|
|
|
|
err += 'Path %s is owned by group %s, not by group %s as expected\n' % (path, group, fut_group)
|
|
|
|
itemized[6] = 'g'
|
|
|
|
elif gid and gid != fut_gid:
|
|
|
|
change = True
|
|
|
|
err += 'Path %s is owned by gid %s, not by gid %s as expected\n' % (path, gid, fut_gid)
|
|
|
|
itemized[6] = 'g'
|
|
|
|
|
|
|
|
# Register changed files and finalize diff output
|
|
|
|
if change:
|
|
|
|
if path not in self.includes:
|
|
|
|
self.includes.append(path)
|
2016-05-02 20:29:04 +02:00
|
|
|
diff += '%s %s\n' % (''.join(itemized), path)
|
2016-04-18 22:31:06 +02:00
|
|
|
|
|
|
|
if self.includes:
|
|
|
|
unarchived = False
|
|
|
|
|
|
|
|
# DEBUG
|
|
|
|
# out = old_out + out
|
|
|
|
|
|
|
|
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd, diff=diff)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
|
|
|
def unarchive(self):
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd = [ self.cmd_path, '-o', self.src ]
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.opts:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend(self.opts)
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.includes:
|
2016-08-25 15:25:21 +02:00
|
|
|
# NOTE: Command unzip has this strange behaviour where it expects quoted filenames to also be escaped
|
|
|
|
cmd.extend(map(shell_escape, self.includes))
|
2016-04-18 22:31:06 +02:00
|
|
|
# We don't need to handle excluded files, since we simply do not include them
|
|
|
|
# if self.excludes:
|
2016-08-25 15:25:21 +02:00
|
|
|
# cmd.extend([ '-x' ] + self.excludes ])
|
|
|
|
cmd.extend([ '-d', self.dest ])
|
2013-10-21 22:22:42 +02:00
|
|
|
rc, out, err = self.module.run_command(cmd)
|
2014-02-12 08:57:00 +01:00
|
|
|
return dict(cmd=cmd, rc=rc, out=out, err=err)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
|
|
|
def can_handle_archive(self):
|
2014-04-16 03:38:39 +02:00
|
|
|
if not self.cmd_path:
|
|
|
|
return False
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd = [ self.cmd_path, '-l', self.src ]
|
2013-10-21 22:22:42 +02:00
|
|
|
rc, out, err = self.module.run_command(cmd)
|
|
|
|
if rc == 0:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
# class to handle gzipped tar files
|
2015-01-22 01:14:11 +01:00
|
|
|
class TgzArchive(object):
|
2015-01-14 22:10:13 +01:00
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
def __init__(self, src, dest, file_args, module):
|
2013-10-21 22:22:42 +02:00
|
|
|
self.src = src
|
|
|
|
self.dest = dest
|
2016-04-18 22:31:06 +02:00
|
|
|
self.file_args = file_args
|
|
|
|
self.opts = module.params['extra_opts']
|
2013-10-21 22:22:42 +02:00
|
|
|
self.module = module
|
2016-08-25 15:25:21 +02:00
|
|
|
if self.module.check_mode:
|
|
|
|
self.module.exit_json(skipped=True, msg="remote module (%s) does not support check mode when using gtar" % self.module._name)
|
2016-04-18 22:31:06 +02:00
|
|
|
self.excludes = [ path.rstrip('/') for path in self.module.params['exclude']]
|
2016-08-25 15:25:21 +02:00
|
|
|
# Prefer gtar (GNU tar) as it supports the compression options -z, -j and -J
|
2015-01-26 18:45:59 +01:00
|
|
|
self.cmd_path = self.module.get_bin_path('gtar', None)
|
|
|
|
if not self.cmd_path:
|
|
|
|
# Fallback to tar
|
|
|
|
self.cmd_path = self.module.get_bin_path('tar')
|
2016-08-25 15:25:21 +02:00
|
|
|
self.zipflag = '-z'
|
2015-01-14 22:10:13 +01:00
|
|
|
self._files_in_archive = []
|
|
|
|
|
|
|
|
@property
|
|
|
|
def files_in_archive(self, force_refresh=False):
|
|
|
|
if self._files_in_archive and not force_refresh:
|
|
|
|
return self._files_in_archive
|
|
|
|
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd = [ self.cmd_path, '--list', '-C', self.dest ]
|
|
|
|
if self.zipflag:
|
|
|
|
cmd.append(self.zipflag)
|
2016-07-06 23:40:48 +02:00
|
|
|
if self.opts:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend([ '--show-transformed-names' ] + self.opts)
|
2016-07-06 23:40:48 +02:00
|
|
|
if self.excludes:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend([ '--exclude=' + quote(f) for f in self.excludes ])
|
|
|
|
cmd.extend([ '-f', self.src ])
|
2016-07-06 23:40:48 +02:00
|
|
|
rc, out, err = self.module.run_command(cmd)
|
|
|
|
if rc != 0:
|
2015-01-14 22:10:13 +01:00
|
|
|
raise UnarchiveError('Unable to list files in the archive')
|
|
|
|
|
2016-07-06 23:40:48 +02:00
|
|
|
for filename in out.splitlines():
|
|
|
|
# Compensate for locale-related problems in gtar output (octal unicode representation) #11348
|
|
|
|
# filename = filename.decode('string_escape')
|
|
|
|
filename = codecs.escape_decode(filename)[0]
|
2016-04-18 22:31:06 +02:00
|
|
|
if filename and filename not in self.excludes:
|
2015-01-14 22:10:13 +01:00
|
|
|
self._files_in_archive.append(filename)
|
|
|
|
return self._files_in_archive
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
def is_unarchived(self):
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd = [ self.cmd_path, '--diff', '-C', self.dest ]
|
|
|
|
if self.zipflag:
|
|
|
|
cmd.append(self.zipflag)
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.opts:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend([ '--show-transformed-names' ] + self.opts)
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.file_args['owner']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--owner=' + quote(self.file_args['owner']))
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.file_args['group']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--group=' + quote(self.file_args['group']))
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.file_args['mode']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--mode=' + quote(self.file_args['mode']))
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.module.params['keep_newer']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--keep-newer-files')
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.excludes:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend([ '--exclude=' + quote(f) for f in self.excludes ])
|
|
|
|
cmd.extend([ '-f', self.src ])
|
2013-10-21 22:22:42 +02:00
|
|
|
rc, out, err = self.module.run_command(cmd)
|
2016-04-18 22:31:06 +02:00
|
|
|
|
|
|
|
# Check whether the differences are in something that we're
|
|
|
|
# setting anyway
|
|
|
|
|
|
|
|
# What is different
|
|
|
|
unarchived = True
|
|
|
|
old_out = out
|
|
|
|
out = ''
|
|
|
|
run_uid = os.getuid()
|
|
|
|
# When unarchiving as a user, or when owner/group/mode is supplied --diff is insufficient
|
|
|
|
# Only way to be sure is to check request with what is on disk (as we do for zip)
|
|
|
|
# Leave this up to set_fs_attributes_if_different() instead of inducing a (false) change
|
|
|
|
for line in old_out.splitlines() + err.splitlines():
|
2016-08-25 15:25:21 +02:00
|
|
|
# FIXME: Remove the bogus lines from error-output as well !
|
|
|
|
# Ignore bogus errors on empty filenames (when using --split-component)
|
|
|
|
if EMPTY_FILE_RE.search(line):
|
|
|
|
continue
|
2016-04-18 22:31:06 +02:00
|
|
|
if run_uid == 0 and not self.file_args['owner'] and OWNER_DIFF_RE.search(line):
|
|
|
|
out += line + '\n'
|
|
|
|
if run_uid == 0 and not self.file_args['group'] and GROUP_DIFF_RE.search(line):
|
|
|
|
out += line + '\n'
|
|
|
|
if not self.file_args['mode'] and MODE_DIFF_RE.search(line):
|
2016-06-10 23:35:08 +02:00
|
|
|
out += line + '\n'
|
|
|
|
if MOD_TIME_DIFF_RE.search(line):
|
2016-04-18 22:31:06 +02:00
|
|
|
out += line + '\n'
|
|
|
|
if MISSING_FILE_RE.search(line):
|
|
|
|
out += line + '\n'
|
|
|
|
if out:
|
|
|
|
unarchived = False
|
2014-02-12 08:57:00 +01:00
|
|
|
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
|
|
|
def unarchive(self):
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd = [ self.cmd_path, '--extract', '-C', self.dest ]
|
|
|
|
if self.zipflag:
|
|
|
|
cmd.append(self.zipflag)
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.opts:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend([ '--show-transformed-names' ] + self.opts)
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.file_args['owner']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--owner=' + quote(self.file_args['owner']))
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.file_args['group']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--group=' + quote(self.file_args['group']))
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.file_args['mode']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--mode=' + quote(self.file_args['mode']))
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.module.params['keep_newer']:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.append('--keep-newer-files')
|
2016-04-18 22:31:06 +02:00
|
|
|
if self.excludes:
|
2016-08-25 15:25:21 +02:00
|
|
|
cmd.extend([ '--exclude=' + quote(f) for f in self.excludes ])
|
|
|
|
cmd.extend([ '-f', self.src ])
|
2014-06-15 16:38:41 +02:00
|
|
|
rc, out, err = self.module.run_command(cmd, cwd=self.dest)
|
2014-02-12 08:57:00 +01:00
|
|
|
return dict(cmd=cmd, rc=rc, out=out, err=err)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
|
|
|
def can_handle_archive(self):
|
2014-04-16 03:38:39 +02:00
|
|
|
if not self.cmd_path:
|
|
|
|
return False
|
2015-01-14 22:10:13 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
if self.files_in_archive:
|
2014-02-12 08:57:00 +01:00
|
|
|
return True
|
2015-01-14 22:10:13 +01:00
|
|
|
except UnarchiveError:
|
|
|
|
pass
|
|
|
|
# Errors and no files in archive assume that we weren't able to
|
|
|
|
# properly unarchive it
|
2013-10-21 22:22:42 +02:00
|
|
|
return False
|
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
# class to handle tar files that aren't compressed
|
2015-01-22 01:14:11 +01:00
|
|
|
class TarArchive(TgzArchive):
|
2016-04-18 22:31:06 +02:00
|
|
|
def __init__(self, src, dest, file_args, module):
|
|
|
|
super(TarArchive, self).__init__(src, dest, file_args, module)
|
2016-05-02 20:27:44 +02:00
|
|
|
# argument to tar
|
2013-10-21 22:22:42 +02:00
|
|
|
self.zipflag = ''
|
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
# class to handle bzip2 compressed tar files
|
2015-01-22 01:14:11 +01:00
|
|
|
class TarBzipArchive(TgzArchive):
|
2016-04-18 22:31:06 +02:00
|
|
|
def __init__(self, src, dest, file_args, module):
|
|
|
|
super(TarBzipArchive, self).__init__(src, dest, file_args, module)
|
2016-08-25 15:25:21 +02:00
|
|
|
self.zipflag = '-j'
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
# class to handle xz compressed tar files
|
2015-01-22 01:14:11 +01:00
|
|
|
class TarXzArchive(TgzArchive):
|
2016-04-18 22:31:06 +02:00
|
|
|
def __init__(self, src, dest, file_args, module):
|
|
|
|
super(TarXzArchive, self).__init__(src, dest, file_args, module)
|
2016-08-25 15:25:21 +02:00
|
|
|
self.zipflag = '-J'
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
# try handlers in order and return the one that works or bail if none work
|
2016-04-18 22:31:06 +02:00
|
|
|
def pick_handler(src, dest, file_args, module):
|
2016-08-25 15:25:21 +02:00
|
|
|
handlers = [ZipArchive, TgzArchive, TarArchive, TarBzipArchive, TarXzArchive]
|
2013-10-21 22:22:42 +02:00
|
|
|
for handler in handlers:
|
2016-04-18 22:31:06 +02:00
|
|
|
obj = handler(src, dest, file_args, module)
|
2013-10-21 22:22:42 +02:00
|
|
|
if obj.can_handle_archive():
|
|
|
|
return obj
|
2015-11-30 20:47:38 +01:00
|
|
|
module.fail_json(msg='Failed to find handler for "%s". Make sure the required command to extract the file is installed.' % src)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
def main():
|
|
|
|
module = AnsibleModule(
|
|
|
|
# not checking because of daisy chain to file module
|
|
|
|
argument_spec = dict(
|
2016-02-29 09:21:02 +01:00
|
|
|
src = dict(required=True, type='path'),
|
|
|
|
original_basename = dict(required=False, type='str'), # used to handle 'dest is a directory' via template, a slight hack
|
|
|
|
dest = dict(required=True, type='path'),
|
2016-07-14 18:24:23 +02:00
|
|
|
copy = dict(required=False, default=True, type='bool'),
|
|
|
|
remote_src = dict(required=False, default=False, type='bool'),
|
2016-02-29 09:21:02 +01:00
|
|
|
creates = dict(required=False, type='path'),
|
2016-04-18 22:31:06 +02:00
|
|
|
list_files = dict(required=False, default=False, type='bool'),
|
|
|
|
keep_newer = dict(required=False, default=False, type='bool'),
|
2016-05-13 19:10:35 +02:00
|
|
|
exclude = dict(required=False, default=[], type='list'),
|
2016-04-18 22:31:06 +02:00
|
|
|
extra_opts = dict(required=False, default=[], type='list'),
|
2016-05-05 11:25:40 +02:00
|
|
|
validate_certs = dict(required=False, default=True, type='bool'),
|
2013-10-21 22:22:42 +02:00
|
|
|
),
|
2016-04-18 22:31:06 +02:00
|
|
|
add_file_common_args = True,
|
2016-08-25 15:25:21 +02:00
|
|
|
mutually_exclusive = [("copy", "remote_src"),],
|
|
|
|
# check-mode only works for zip files, we cover that later
|
|
|
|
supports_check_mode = True,
|
2013-10-21 22:22:42 +02:00
|
|
|
)
|
|
|
|
|
2016-05-29 15:52:18 +02:00
|
|
|
# We screenscrape a huge amount of commands so use C locale anytime we do
|
|
|
|
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
|
|
|
|
|
2016-07-14 18:24:23 +02:00
|
|
|
src = os.path.expanduser(module.params['src'])
|
|
|
|
dest = os.path.expanduser(module.params['dest'])
|
|
|
|
copy = module.params['copy']
|
|
|
|
remote_src = module.params['remote_src']
|
2015-01-14 22:10:13 +01:00
|
|
|
file_args = module.load_file_common_arguments(module.params)
|
2013-10-21 22:22:42 +02:00
|
|
|
# did tar file arrive?
|
|
|
|
if not os.path.exists(src):
|
2016-08-11 20:23:29 +02:00
|
|
|
if not remote_src and copy:
|
2014-02-12 08:57:00 +01:00
|
|
|
module.fail_json(msg="Source '%s' failed to transfer" % src)
|
2015-06-01 01:03:35 +02:00
|
|
|
# If copy=false, and src= contains ://, try and download the file to a temp directory.
|
|
|
|
elif '://' in src:
|
2016-04-18 22:31:06 +02:00
|
|
|
tempdir = os.path.dirname(os.path.realpath(__file__))
|
2015-06-01 01:03:35 +02:00
|
|
|
package = os.path.join(tempdir, str(src.rsplit('/', 1)[1]))
|
|
|
|
try:
|
|
|
|
rsp, info = fetch_url(module, src)
|
2016-03-08 00:28:41 +01:00
|
|
|
# If download fails, raise a proper exception
|
|
|
|
if rsp is None:
|
|
|
|
raise Exception(info['msg'])
|
2015-06-01 01:03:35 +02:00
|
|
|
f = open(package, 'w')
|
|
|
|
# Read 1kb at a time to save on ram
|
|
|
|
while True:
|
2015-06-24 15:48:57 +02:00
|
|
|
data = rsp.read(BUFSIZE)
|
2015-06-01 01:03:35 +02:00
|
|
|
|
|
|
|
if data == "":
|
|
|
|
break # End of file, break while loop
|
|
|
|
|
|
|
|
f.write(data)
|
|
|
|
f.close()
|
|
|
|
src = package
|
2016-05-18 16:01:53 +02:00
|
|
|
except Exception:
|
|
|
|
e = get_exception()
|
2015-06-01 01:03:35 +02:00
|
|
|
module.fail_json(msg="Failure downloading %s, %s" % (src, e))
|
2013-12-03 02:51:10 +01:00
|
|
|
else:
|
2014-02-12 08:57:00 +01:00
|
|
|
module.fail_json(msg="Source '%s' does not exist" % src)
|
2013-10-21 22:22:42 +02:00
|
|
|
if not os.access(src, os.R_OK):
|
2014-02-12 08:57:00 +01:00
|
|
|
module.fail_json(msg="Source '%s' not readable" % src)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2015-06-26 14:49:59 +02:00
|
|
|
# skip working with 0 size archives
|
|
|
|
try:
|
|
|
|
if os.path.getsize(src) == 0:
|
2015-06-30 17:14:30 +02:00
|
|
|
module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" % src)
|
2016-05-18 16:01:53 +02:00
|
|
|
except Exception:
|
|
|
|
e = get_exception()
|
2015-06-26 14:49:59 +02:00
|
|
|
module.fail_json(msg="Source '%s' not readable" % src)
|
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
# is dest OK to receive tar file?
|
2014-08-05 02:41:26 +02:00
|
|
|
if not os.path.isdir(dest):
|
|
|
|
module.fail_json(msg="Destination '%s' is not a directory" % dest)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2016-04-18 22:31:06 +02:00
|
|
|
handler = pick_handler(src, dest, file_args, module)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2014-02-12 08:57:00 +01:00
|
|
|
res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
|
|
|
# do we need to do unpack?
|
2016-04-18 22:31:06 +02:00
|
|
|
check_results = handler.is_unarchived()
|
|
|
|
|
|
|
|
# DEBUG
|
|
|
|
# res_args['check_results'] = check_results
|
|
|
|
|
2016-08-25 15:25:21 +02:00
|
|
|
if module.check_mode:
|
|
|
|
res_args['changed'] = not check_results['unarchived']
|
|
|
|
elif check_results['unarchived']:
|
2013-10-21 22:22:42 +02:00
|
|
|
res_args['changed'] = False
|
2015-01-14 22:10:13 +01:00
|
|
|
else:
|
|
|
|
# do the unpack
|
|
|
|
try:
|
|
|
|
res_args['extract_results'] = handler.unarchive()
|
|
|
|
if res_args['extract_results']['rc'] != 0:
|
|
|
|
module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
|
|
|
|
except IOError:
|
2016-05-15 11:57:02 +02:00
|
|
|
module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
|
2015-01-14 22:10:13 +01:00
|
|
|
else:
|
|
|
|
res_args['changed'] = True
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2016-08-25 15:25:21 +02:00
|
|
|
# Get diff if required
|
|
|
|
if check_results.get('diff', False):
|
|
|
|
res_args['diff'] = { 'prepared': check_results['diff'] }
|
2016-04-18 22:31:06 +02:00
|
|
|
|
|
|
|
# Run only if we found differences (idempotence) or diff was missing
|
2016-08-25 15:25:21 +02:00
|
|
|
if res_args.get('diff', True) and not module.check_mode:
|
2016-04-18 22:31:06 +02:00
|
|
|
# do we need to change perms?
|
|
|
|
for filename in handler.files_in_archive:
|
|
|
|
file_args['path'] = os.path.join(dest, filename)
|
|
|
|
try:
|
|
|
|
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])
|
2016-05-18 16:01:53 +02:00
|
|
|
except (IOError, OSError):
|
|
|
|
e = get_exception()
|
2016-05-15 11:57:02 +02:00
|
|
|
module.fail_json(msg="Unexpected error when accessing exploded file: %s" % str(e), **res_args)
|
2013-10-21 22:22:42 +02:00
|
|
|
|
2015-04-15 18:24:45 +02:00
|
|
|
if module.params['list_files']:
|
|
|
|
res_args['files'] = handler.files_in_archive
|
|
|
|
|
2013-10-21 22:22:42 +02:00
|
|
|
module.exit_json(**res_args)
|
|
|
|
|
2013-12-02 21:13:49 +01:00
|
|
|
# import module snippets
|
2013-12-02 21:11:23 +01:00
|
|
|
from ansible.module_utils.basic import *
|
2015-06-01 01:03:35 +02:00
|
|
|
from ansible.module_utils.urls import *
|
2015-01-14 22:10:13 +01:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|