only add data when there is data to add (#54559) (#73566)

Only add data when there is data to add

  also avoid clobbering existing data with empty file
  fixes #45843

* remove redundant code, update comments
* fix mock dataloader, original does not return None
* added test

(cherry picked from commit ec8a556538)
This commit is contained in:
Brian Coca 2021-03-08 05:07:44 -05:00 committed by GitHub
parent 5647f8aa55
commit 2f51105936
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 60 additions and 19 deletions

View file

@ -0,0 +1,2 @@
bugfixes:
- ensure we don't clobber role vars data when getting an empty file

View file

@ -236,7 +236,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
try: try:
data = loader.load_from_file(include_file) data = loader.load_from_file(include_file)
if data is None: if not data:
display.warning('file %s is empty and had no tasks to include' % include_file) display.warning('file %s is empty and had no tasks to include' % include_file)
continue continue
elif not isinstance(data, list): elif not isinstance(data, list):

View file

@ -22,6 +22,7 @@ __metaclass__ = type
import os import os
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError
from ansible.module_utils._text import to_text
from ansible.module_utils.six import iteritems, binary_type, text_type from ansible.module_utils.six import iteritems, binary_type, text_type
from ansible.module_utils.common._collections_compat import Container, Mapping, Set, Sequence from ansible.module_utils.common._collections_compat import Container, Mapping, Set, Sequence
from ansible.playbook.attribute import FieldAttribute from ansible.playbook.attribute import FieldAttribute
@ -33,8 +34,10 @@ from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable from ansible.playbook.taggable import Taggable
from ansible.plugins.loader import add_all_plugin_dirs from ansible.plugins.loader import add_all_plugin_dirs
from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars from ansible.utils.vars import combine_vars
display = Display()
__all__ = ['Role', 'hash_params'] __all__ = ['Role', 'hash_params']
@ -207,14 +210,14 @@ class Role(Base, Conditional, Taggable, CollectionSearch):
# vars and default vars are regular dictionaries # vars and default vars are regular dictionaries
self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True) self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True)
if self._role_vars is None: if self._role_vars is None:
self._role_vars = dict() self._role_vars = {}
elif not isinstance(self._role_vars, dict): elif not isinstance(self._role_vars, Mapping):
raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True) self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True)
if self._default_vars is None: if self._default_vars is None:
self._default_vars = dict() self._default_vars = {}
elif not isinstance(self._default_vars, dict): elif not isinstance(self._default_vars, Mapping):
raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
# load the role's other files, if they exist # load the role's other files, if they exist
@ -269,32 +272,53 @@ class Role(Base, Conditional, Taggable, CollectionSearch):
obj=handler_data, orig_exc=e) obj=handler_data, orig_exc=e)
def _load_role_yaml(self, subdir, main=None, allow_dir=False): def _load_role_yaml(self, subdir, main=None, allow_dir=False):
'''
Find and load role YAML files and return data found.
:param subdir: subdir of role to search (vars, files, tasks, handlers, defaults)
:type subdir: string
:param main: filename to match, will default to 'main.<ext>' if not provided.
:type main: string
:param allow_dir: If true we combine results of multiple matching files found.
If false, highlander rules. Only for vars(dicts) and not tasks(lists).
:type allow_dir: bool
:returns: data from the matched file(s), type can be dict or list depending on vars or tasks.
'''
data = None
file_path = os.path.join(self._role_path, subdir) file_path = os.path.join(self._role_path, subdir)
if self._loader.path_exists(file_path) and self._loader.is_directory(file_path): if self._loader.path_exists(file_path) and self._loader.is_directory(file_path):
# Valid extensions and ordering for roles is hard-coded to maintain # Valid extensions and ordering for roles is hard-coded to maintain portability
# role portability extensions = ['.yml', '.yaml', '.json'] # same as default for YAML_FILENAME_EXTENSIONS
extensions = ['.yml', '.yaml', '.json']
# If no <main> is specified by the user, look for files with # look for files w/o extensions before/after bare name depending on it being set or not
# extensions before bare name. Otherwise, look for bare name first. # keep 'main' as original to figure out errors if no files found
if main is None: if main is None:
_main = 'main' _main = 'main'
extensions.append('') extensions.append('')
else: else:
_main = main _main = main
extensions.insert(0, '') extensions.insert(0, '')
# not really 'find_vars_files' but find_files_with_extensions_default_to_yaml_filename_extensions
found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir) found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir)
if found_files: if found_files:
data = {}
for found in found_files: for found in found_files:
new_data = self._loader.load_from_file(found) new_data = self._loader.load_from_file(found)
if new_data and allow_dir: if new_data:
if data is not None and isinstance(new_data, Mapping):
data = combine_vars(data, new_data) data = combine_vars(data, new_data)
else: else:
data = new_data data = new_data
return data
# found data so no need to continue unless we want to merge
if not allow_dir:
break
elif main is not None: elif main is not None:
# this won't trigger with default only when <subdir>_from is specified
raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main)) raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main))
return None
return data
def _load_dependencies(self): def _load_dependencies(self):
''' '''

View file

@ -0,0 +1,4 @@
- hosts: all
gather_facts: false
roles:
- data

View file

@ -0,0 +1 @@
defined_var: 1

View file

@ -0,0 +1,5 @@
- name: ensure data was correctly defind
assert:
that:
- defined_var is defined
- defined_var == 1

View file

@ -12,3 +12,7 @@ set -eux
# include/import can execute another instance of role # include/import can execute another instance of role
[ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags importrole "$@" | grep -c '"msg": "A"')" = "2" ] [ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags importrole "$@" | grep -c '"msg": "A"')" = "2" ]
[ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags includerole "$@" | grep -c '"msg": "A"')" = "2" ] [ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags includerole "$@" | grep -c '"msg": "A"')" = "2" ]
# ensure role data is merged correctly
ansible-playbook data_integrity.yml -i ../../inventory "$@"

View file

@ -39,10 +39,11 @@ class DictDataLoader(DataLoader):
self._vault_secrets = None self._vault_secrets = None
def load_from_file(self, path, cache=True, unsafe=False): def load_from_file(self, path, cache=True, unsafe=False):
data = None
path = to_text(path) path = to_text(path)
if path in self._file_mapping: if path in self._file_mapping:
return self.load(self._file_mapping[path], path) data = self.load(self._file_mapping[path], path)
return None return data
# TODO: the real _get_file_contents returns a bytestring, so we actually convert the # TODO: the real _get_file_contents returns a bytestring, so we actually convert the
# unicode/text it's created with to utf-8 # unicode/text it's created with to utf-8