2019-07-24 20:58:13 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2019-08-21 08:53:35 +02:00
|
|
|
import contextlib
|
2019-07-24 20:58:13 +02:00
|
|
|
import fnmatch
|
2019-08-21 08:53:35 +02:00
|
|
|
import glob
|
2019-07-24 20:58:13 +02:00
|
|
|
import os
|
|
|
|
import re
|
2019-08-21 08:53:35 +02:00
|
|
|
import shutil
|
|
|
|
import subprocess
|
2019-07-31 07:36:03 +02:00
|
|
|
import sys
|
2019-08-21 08:53:35 +02:00
|
|
|
import tarfile
|
2019-07-24 20:58:13 +02:00
|
|
|
import tempfile
|
|
|
|
|
|
|
|
|
2019-08-21 08:53:35 +02:00
|
|
|
def assemble_files_to_ship(complete_file_list):
|
|
|
|
"""
|
|
|
|
This looks for all files which should be shipped in the sdist
|
|
|
|
"""
|
|
|
|
# All files which are in the repository except these:
|
|
|
|
ignore_patterns = (
|
|
|
|
# Developer-only tools
|
2020-09-28 07:34:12 +02:00
|
|
|
'.azure-pipelines/*',
|
2019-08-21 08:53:35 +02:00
|
|
|
'.github/*',
|
|
|
|
'.github/*/*',
|
|
|
|
'changelogs/fragments/*',
|
2020-05-06 04:50:11 +02:00
|
|
|
'hacking/backport/*',
|
2020-02-19 18:55:34 +01:00
|
|
|
'hacking/shippable/*',
|
2019-08-21 08:53:35 +02:00
|
|
|
'hacking/tests/*',
|
|
|
|
'hacking/ticket_stubs/*',
|
|
|
|
'test/sanity/code-smell/botmeta.*',
|
|
|
|
'test/utils/*',
|
|
|
|
'test/utils/*/*',
|
|
|
|
'test/utils/*/*/*',
|
|
|
|
'.git*',
|
|
|
|
)
|
2019-07-24 20:58:13 +02:00
|
|
|
ignore_files = frozenset((
|
2019-08-21 08:53:35 +02:00
|
|
|
# Developer-only tools
|
|
|
|
'changelogs/config.yaml',
|
|
|
|
'hacking/README.md',
|
|
|
|
'hacking/ansible-profile',
|
|
|
|
'hacking/cgroup_perf_recap_graph.py',
|
|
|
|
'hacking/create_deprecated_issues.py',
|
|
|
|
'hacking/deprecated_issue_template.md',
|
|
|
|
'hacking/fix_test_syntax.py',
|
|
|
|
'hacking/get_library.py',
|
|
|
|
'hacking/metadata-tool.py',
|
|
|
|
'hacking/report.py',
|
|
|
|
'hacking/return_skeleton_generator.py',
|
|
|
|
'hacking/test-module',
|
|
|
|
'hacking/test-module.py',
|
2020-03-25 01:57:14 +01:00
|
|
|
'test/support/README.md',
|
2019-08-21 08:53:35 +02:00
|
|
|
'.cherry_picker.toml',
|
|
|
|
'.mailmap',
|
|
|
|
# Possibly should be included
|
|
|
|
'examples/scripts/uptime.py',
|
[backport][docs][2.10]Docsbackportapalooza 8 (#71379)
* Move 2.10.0rc1 release date a few days forward. (#71270)
At yesterday's meeting it was decided to have ansible-2.10.0 depend on
ansible-base-2.10.1 so that we can get several fixes for ansible-base's
routing (including adding the gluster.gluster collection).
ansible-base-2.10.1 will release on September 8th. So we will plan on
releasing ansible-2.10.0rc1 on the 10th.
https://meetbot.fedoraproject.org/ansible-community/2020-08-12/ansible_community_meeting.2020-08-12-18.00.html
(cherry picked from commit e507c127e58791755d207b46f6c829dacd7ad55c)
* a few writing style updates (#71212)
(cherry picked from commit 4f0bd5de38fb72c4aa686fa8736a3d8cc75393c0)
* Fix code markups and add link to CVE (#71082)
(cherry picked from commit 92d59a58c09f2a8baf811abe1beb09e4f911eb54)
* Fix 404 links (#71256)
Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
(cherry picked from commit ecea0185064b4ce8932917702a84962a75280fcf)
* Writing style updates to Developing dynamic inventory topic (#71245)
* modified the writing style
* incorporated peer feedback
(cherry picked from commit ecd3b52ad7e0dbe9042b71b2e22b33e5cef79141)
* Fix roadmap formatting. (#71275)
(cherry picked from commit ee48e0b0ad33dab245ff6a64f3ac0344de06ae56)
* Update password.py (#71295)
List md5_crypt, bcrypt, sha256_crypt, sha512_crypt as hash schemes in the password plugin.
(cherry picked from commit 1d1de2c6fd2231a88b494574eba7f4d3fd7ba5b5)
* Update ansible european IRC channel (#71326)
Signed-off-by: Rémi VERCHERE <remi@verchere.fr>
(cherry picked from commit 824cd4cbeb0a576bcd9b8a118c1f9fdcc7816ce1)
* Add warning about copyright year change (#71251)
To simplify project administration and avoid any legal issues,
add a warning in the docs. This reflects - https://github.com/ansible/ansible/issues/45989#issuecomment-423635622 and fixes: #45989
Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
(cherry picked from commit 606604bb97ab7ab94b42a53669f09c51f7e4d818)
* subelements: Clarify parameter docs (#71177)
skip_missing parameter in subelements lookup plugin is accepted from
inside the dictionary.
Fixes: #38182
Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
(cherry picked from commit 6d17736ef45cbb81bdcbafd5b52dc45f75535baf)
* Writing style updates to Using Variables topic (#71194)
* updated topic title, underline length for headings, and incorporated peer feedback
(cherry picked from commit 4d68efbe248cdf75b4a9d87c9a04bd19db81a1d1)
* cron module defaults to current user, not root (#71337)
(cherry picked from commit 4792d83e13d7622832e3885ffa2d3d0e543d42b6)
* Update Network Getting Started for FQCN/collection world (#71188)
* pull out network roles, cleanup, update first playbook examples, update gather facts section, some inventory conversion to .yml, update inventory and roles, simplify the navigation titles, fix tocs, feedback comments
(cherry picked from commit f79a7c558574a44016d2ff978aaddf00f241a08c)
* Add documentation about info/facts module development (#71250)
Fixes: #40151
Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
(cherry picked from commit 4f993922c87a6f12821f40c460750471bd6ee1e7)
* network: Correct documentation (#71246)
ini-style inventory does not support Ansible Vault password.
This fixes network_best_practices_2.5 doc.
Fixes: #69039
Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
(cherry picked from commit a1257d75aa2f874ea2768dd99c4affe8b37a886f)
* tidies up vars page (#71339)
(cherry picked from commit 02ea80f6d77f03ebb06ac78efc4bff183afe3c6e)
* base.yml: Fix typos (#71346)
(cherry picked from commit 41d7d53573e4f1bf366299b7f1eff15d6643c7ab)
* quick fix to change main back to devel (#71342)
* quick fix to change main back to devel
* Update docs/docsite/rst/dev_guide/developing_collections.rst
Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit 74f88c56a515d7edbc2c0572ad56c367a3775fd0)
* Add note about integration tests for new modules to the dev guide (#71345)
(cherry picked from commit b82889eef550cbb4074859b9fa0c38b6c04f56f1)
* update fest link (#71376)
(cherry picked from commit 80b8fde94652ba5a183a8dd3df64c1780a86c57d)
* incorporate minimalism feedback on debugging page (#71272)
Co-authored-by: bobjohnsrh <50667510+bobjohnsrh@users.noreply.github.com>
(cherry picked from commit 5073cfc8bc0dbd3c8796e460891aee971921faa2)
* fix header problem
Co-authored-by: Toshio Kuratomi <a.badger@gmail.com>
Co-authored-by: Sayee <57951841+sayee-jadhav@users.noreply.github.com>
Co-authored-by: Baptiste Mille-Mathias <baptiste.millemathias@gmail.com>
Co-authored-by: Abhijeet Kasurde <akasurde@redhat.com>
Co-authored-by: Felix Fontein <felix@fontein.de>
Co-authored-by: rovshango <rovshan.go@gmail.com>
Co-authored-by: Remi Verchere <rverchere@users.noreply.github.com>
Co-authored-by: Jake Howard <RealOrangeOne@users.noreply.github.com>
Co-authored-by: Alicia Cozine <879121+acozine@users.noreply.github.com>
Co-authored-by: Per Lundberg <perlun@gmail.com>
Co-authored-by: Andrew Klychkov <aaklychkov@mail.ru>
2020-08-21 00:44:50 +02:00
|
|
|
'examples/scripts/my_test.py',
|
|
|
|
'examples/scripts/my_test_info.py',
|
|
|
|
'examples/scripts/my_test_facts.py',
|
2019-08-21 08:53:35 +02:00
|
|
|
'examples/DOCUMENTATION.yml',
|
2020-05-19 23:27:08 +02:00
|
|
|
'examples/play.yml',
|
2019-08-21 08:53:35 +02:00
|
|
|
'examples/hosts.yaml',
|
|
|
|
'examples/hosts.yml',
|
|
|
|
'examples/inventory_script_schema.json',
|
|
|
|
'examples/plugin_filters.yml',
|
|
|
|
'hacking/env-setup',
|
|
|
|
'hacking/env-setup.fish',
|
|
|
|
'MANIFEST',
|
2019-07-24 20:58:13 +02:00
|
|
|
))
|
|
|
|
|
2019-08-21 08:53:35 +02:00
|
|
|
# These files are generated and then intentionally added to the sdist
|
|
|
|
|
|
|
|
# Manpages
|
|
|
|
manpages = ['docs/man/man1/ansible.1']
|
|
|
|
for dirname, dummy, files in os.walk('bin'):
|
|
|
|
for filename in files:
|
|
|
|
path = os.path.join(dirname, filename)
|
|
|
|
if os.path.islink(path):
|
|
|
|
if os.readlink(path) == 'ansible':
|
|
|
|
manpages.append('docs/man/man1/%s.1' % filename)
|
|
|
|
|
|
|
|
# Misc
|
|
|
|
misc_generated_files = [
|
|
|
|
'SYMLINK_CACHE.json',
|
|
|
|
'PKG-INFO',
|
|
|
|
]
|
|
|
|
|
|
|
|
shipped_files = manpages + misc_generated_files
|
|
|
|
|
|
|
|
for path in complete_file_list:
|
|
|
|
if path not in ignore_files:
|
|
|
|
for ignore in ignore_patterns:
|
2019-07-31 07:36:03 +02:00
|
|
|
if fnmatch.fnmatch(path, ignore):
|
2019-08-21 08:53:35 +02:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
shipped_files.append(path)
|
|
|
|
|
|
|
|
return shipped_files
|
|
|
|
|
|
|
|
|
|
|
|
def assemble_files_to_install(complete_file_list):
|
|
|
|
"""
|
|
|
|
This looks for all of the files which should show up in an installation of ansible
|
|
|
|
"""
|
|
|
|
ignore_patterns = tuple()
|
|
|
|
|
|
|
|
pkg_data_files = []
|
|
|
|
for path in complete_file_list:
|
|
|
|
|
|
|
|
if path.startswith("lib/ansible"):
|
|
|
|
prefix = 'lib'
|
|
|
|
elif path.startswith("test/lib/ansible_test"):
|
|
|
|
prefix = 'test/lib'
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
for ignore in ignore_patterns:
|
|
|
|
if fnmatch.fnmatch(path, ignore):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
pkg_data_files.append(os.path.relpath(path, prefix))
|
|
|
|
|
|
|
|
return pkg_data_files
|
|
|
|
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def clean_repository(file_list):
|
|
|
|
"""Copy the repository to clean it of artifacts"""
|
|
|
|
# Create a tempdir that will be the clean repo
|
|
|
|
with tempfile.TemporaryDirectory() as repo_root:
|
|
|
|
directories = set((repo_root + os.path.sep,))
|
|
|
|
|
|
|
|
for filename in file_list:
|
|
|
|
# Determine if we need to create the directory
|
|
|
|
directory = os.path.dirname(filename)
|
|
|
|
dest_dir = os.path.join(repo_root, directory)
|
|
|
|
if dest_dir not in directories:
|
|
|
|
os.makedirs(dest_dir)
|
|
|
|
|
|
|
|
# Keep track of all the directories that now exist
|
|
|
|
path_components = directory.split(os.path.sep)
|
|
|
|
path = repo_root
|
|
|
|
for component in path_components:
|
|
|
|
path = os.path.join(path, component)
|
|
|
|
if path not in directories:
|
|
|
|
directories.add(path)
|
|
|
|
|
|
|
|
# Copy the file
|
|
|
|
shutil.copy2(filename, dest_dir, follow_symlinks=False)
|
|
|
|
|
|
|
|
yield repo_root
|
|
|
|
|
|
|
|
|
|
|
|
def create_sdist(tmp_dir):
|
|
|
|
"""Create an sdist in the repository"""
|
2019-08-24 20:38:30 +02:00
|
|
|
create = subprocess.Popen(
|
2019-08-21 08:53:35 +02:00
|
|
|
['make', 'snapshot', 'SDIST_DIR=%s' % tmp_dir],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
2019-08-24 20:38:30 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
stderr = create.communicate()[1]
|
|
|
|
|
|
|
|
if create.returncode != 0:
|
|
|
|
raise Exception('make snapshot failed:\n%s' % stderr)
|
2019-08-21 08:53:35 +02:00
|
|
|
|
|
|
|
# Determine path to sdist
|
|
|
|
tmp_dir_files = os.listdir(tmp_dir)
|
|
|
|
|
|
|
|
if not tmp_dir_files:
|
|
|
|
raise Exception('sdist was not created in the temp dir')
|
|
|
|
elif len(tmp_dir_files) > 1:
|
|
|
|
raise Exception('Unexpected extra files in the temp dir')
|
|
|
|
|
|
|
|
return os.path.join(tmp_dir, tmp_dir_files[0])
|
|
|
|
|
|
|
|
|
|
|
|
def extract_sdist(sdist_path, tmp_dir):
|
|
|
|
"""Untar the sdist"""
|
|
|
|
# Untar the sdist from the tmp_dir
|
|
|
|
with tarfile.open(os.path.join(tmp_dir, sdist_path), 'r|*') as sdist:
|
|
|
|
sdist.extractall(path=tmp_dir)
|
|
|
|
|
|
|
|
# Determine the sdist directory name
|
|
|
|
sdist_filename = os.path.basename(sdist_path)
|
|
|
|
tmp_dir_files = os.listdir(tmp_dir)
|
|
|
|
try:
|
|
|
|
tmp_dir_files.remove(sdist_filename)
|
|
|
|
except ValueError:
|
|
|
|
# Unexpected could not find original sdist in temp dir
|
|
|
|
raise
|
|
|
|
|
|
|
|
if len(tmp_dir_files) > 1:
|
|
|
|
raise Exception('Unexpected extra files in the temp dir')
|
|
|
|
elif len(tmp_dir_files) < 1:
|
|
|
|
raise Exception('sdist extraction did not occur i nthe temp dir')
|
|
|
|
|
|
|
|
return os.path.join(tmp_dir, tmp_dir_files[0])
|
|
|
|
|
|
|
|
|
|
|
|
def install_sdist(tmp_dir, sdist_dir):
|
|
|
|
"""Install the extracted sdist into the temporary directory"""
|
2019-08-24 20:38:30 +02:00
|
|
|
install = subprocess.Popen(
|
2019-08-21 08:53:35 +02:00
|
|
|
['python', 'setup.py', 'install', '--root=%s' % tmp_dir],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
cwd=os.path.join(tmp_dir, sdist_dir),
|
2019-08-24 20:38:30 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
stdout, stderr = install.communicate()
|
|
|
|
|
|
|
|
if install.returncode != 0:
|
|
|
|
raise Exception('sdist install failed:\n%s' % stderr)
|
2019-08-21 08:53:35 +02:00
|
|
|
|
|
|
|
# Determine the prefix for the installed files
|
|
|
|
match = re.search('^creating (%s/.*?/(?:site|dist)-packages)/ansible$' %
|
|
|
|
tmp_dir, stdout, flags=re.M)
|
|
|
|
return match.group(1)
|
|
|
|
|
|
|
|
|
|
|
|
def check_sdist_contains_expected(sdist_dir, to_ship_files):
|
|
|
|
"""Check that the files we expect to ship are present in the sdist"""
|
|
|
|
results = []
|
|
|
|
for filename in to_ship_files:
|
|
|
|
path = os.path.join(sdist_dir, filename)
|
|
|
|
if not os.path.exists(path):
|
|
|
|
results.append('%s: File was not added to sdist' % filename)
|
|
|
|
|
|
|
|
# Also changelog
|
|
|
|
changelog_files = glob.glob(os.path.join(sdist_dir, 'changelogs/CHANGELOG-v2.[0-9]*.rst'))
|
|
|
|
if not changelog_files:
|
|
|
|
results.append('changelogs/CHANGELOG-v2.*.rst: Changelog file was not added to the sdist')
|
|
|
|
elif len(changelog_files) > 1:
|
|
|
|
results.append('changelogs/CHANGELOG-v2.*.rst: Too many changelog files: %s'
|
|
|
|
% changelog_files)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def check_sdist_files_are_wanted(sdist_dir, to_ship_files):
|
|
|
|
"""Check that all files in the sdist are desired"""
|
|
|
|
results = []
|
|
|
|
for dirname, dummy, files in os.walk(sdist_dir):
|
|
|
|
dirname = os.path.relpath(dirname, start=sdist_dir)
|
|
|
|
if dirname == '.':
|
|
|
|
dirname = ''
|
|
|
|
|
|
|
|
for filename in files:
|
|
|
|
path = os.path.join(dirname, filename)
|
|
|
|
if path not in to_ship_files:
|
|
|
|
if fnmatch.fnmatch(path, 'changelogs/CHANGELOG-v2.[0-9]*.rst'):
|
|
|
|
# changelog files are expected
|
|
|
|
continue
|
|
|
|
|
|
|
|
# FIXME: ansible-test doesn't pass the paths of symlinks to us so we aren't
|
|
|
|
# checking those
|
|
|
|
if not os.path.islink(os.path.join(sdist_dir, path)):
|
|
|
|
results.append('%s: File in sdist was not in the repository' % path)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def check_installed_contains_expected(install_dir, to_install_files):
|
|
|
|
"""Check that all the files we expect to be installed are"""
|
|
|
|
results = []
|
|
|
|
for filename in to_install_files:
|
|
|
|
path = os.path.join(install_dir, filename)
|
|
|
|
if not os.path.exists(path):
|
|
|
|
results.append('%s: File not installed' % os.path.join('lib', filename))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
EGG_RE = re.compile('ansible[^/]+\\.egg-info/(PKG-INFO|SOURCES.txt|'
|
|
|
|
'dependency_links.txt|not-zip-safe|requires.txt|top_level.txt)$')
|
|
|
|
|
|
|
|
|
|
|
|
def check_installed_files_are_wanted(install_dir, to_install_files):
|
|
|
|
"""Check that all installed files were desired"""
|
|
|
|
results = []
|
|
|
|
|
|
|
|
for dirname, dummy, files in os.walk(install_dir):
|
|
|
|
dirname = os.path.relpath(dirname, start=install_dir)
|
|
|
|
if dirname == '.':
|
|
|
|
dirname = ''
|
|
|
|
|
|
|
|
for filename in files:
|
|
|
|
# If this is a byte code cache, look for the python file's name
|
|
|
|
directory = dirname
|
|
|
|
if filename.endswith('.pyc') or filename.endswith('.pyo'):
|
|
|
|
# Remove the trailing "o" or c"
|
|
|
|
filename = filename[:-1]
|
|
|
|
|
|
|
|
if directory.endswith('%s__pycache__' % os.path.sep):
|
|
|
|
# Python3 byte code cache, look for the basename of
|
|
|
|
# __pycache__/__init__.cpython-36.py
|
|
|
|
segments = filename.rsplit('.', 2)
|
|
|
|
if len(segments) >= 3:
|
|
|
|
filename = '.'.join((segments[0], segments[2]))
|
|
|
|
directory = os.path.dirname(directory)
|
|
|
|
|
|
|
|
path = os.path.join(directory, filename)
|
|
|
|
|
|
|
|
# Test that the file was listed for installation
|
|
|
|
if path not in to_install_files:
|
|
|
|
# FIXME: ansible-test doesn't pass the paths of symlinks to us so we
|
|
|
|
# aren't checking those
|
|
|
|
if not os.path.islink(os.path.join(install_dir, path)):
|
|
|
|
if not EGG_RE.match(path):
|
|
|
|
results.append('%s: File was installed but was not supposed to be' % path)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def _find_symlinks():
|
|
|
|
symlink_list = []
|
|
|
|
for dirname, directories, filenames in os.walk('.'):
|
|
|
|
for filename in filenames:
|
|
|
|
path = os.path.join(dirname, filename)
|
|
|
|
# Strip off "./" from the front
|
|
|
|
path = path[2:]
|
|
|
|
if os.path.islink(path):
|
|
|
|
symlink_list.append(path)
|
|
|
|
|
|
|
|
return symlink_list
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
"""All of the files in the repository"""
|
|
|
|
complete_file_list = []
|
|
|
|
for path in sys.argv[1:] or sys.stdin.read().splitlines():
|
|
|
|
complete_file_list.append(path)
|
|
|
|
|
|
|
|
# ansible-test isn't currently passing symlinks to us so construct those ourselves for now
|
|
|
|
for filename in _find_symlinks():
|
|
|
|
if filename not in complete_file_list:
|
|
|
|
# For some reason ansible-test is passing us lib/ansible/module_utils/ansible_release.py
|
|
|
|
# which is a symlink even though it doesn't pass any others
|
|
|
|
complete_file_list.append(filename)
|
|
|
|
|
|
|
|
# We may run this after docs sanity tests so get a clean repository to run in
|
|
|
|
with clean_repository(complete_file_list) as clean_repo_dir:
|
|
|
|
os.chdir(clean_repo_dir)
|
|
|
|
|
|
|
|
to_ship_files = assemble_files_to_ship(complete_file_list)
|
|
|
|
to_install_files = assemble_files_to_install(complete_file_list)
|
|
|
|
|
|
|
|
results = []
|
|
|
|
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
|
|
sdist_path = create_sdist(tmp_dir)
|
|
|
|
sdist_dir = extract_sdist(sdist_path, tmp_dir)
|
|
|
|
|
|
|
|
# Check that the files that are supposed to be in the sdist are there
|
|
|
|
results.extend(check_sdist_contains_expected(sdist_dir, to_ship_files))
|
|
|
|
|
|
|
|
# Check that the files that are in the sdist are in the repository
|
|
|
|
results.extend(check_sdist_files_are_wanted(sdist_dir, to_ship_files))
|
|
|
|
|
|
|
|
# install the sdist
|
|
|
|
install_dir = install_sdist(tmp_dir, sdist_dir)
|
|
|
|
|
|
|
|
# Check that the files that are supposed to be installed are there
|
|
|
|
results.extend(check_installed_contains_expected(install_dir, to_install_files))
|
|
|
|
|
|
|
|
# Check that the files that are installed are supposed to be installed
|
|
|
|
results.extend(check_installed_files_are_wanted(install_dir, to_install_files))
|
|
|
|
|
|
|
|
for message in results:
|
|
|
|
print(message)
|
2019-07-24 20:58:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|