Meta meta meta

This commit is contained in:
James Cammarata 2016-08-23 14:02:29 -05:00
parent 33245b2011
commit 3a51587220
5 changed files with 111 additions and 159 deletions

View file

@ -24,10 +24,11 @@ import itertools
import operator import operator
import uuid import uuid
from copy import deepcopy
from functools import partial from functools import partial
from inspect import getmembers from inspect import getmembers
from ansible.compat.six import iteritems, string_types from ansible.compat.six import iteritems, string_types, with_metaclass
from jinja2.exceptions import UndefinedError from jinja2.exceptions import UndefinedError
@ -44,10 +45,64 @@ except ImportError:
from ansible.utils.display import Display from ansible.utils.display import Display
display = Display() display = Display()
BASE_ATTRIBUTES = {}
def _generic_g(prop_name, self):
method = "_get_attr_%s" % prop_name
try:
value = getattr(self, method)()
except AttributeError:
try:
value = self._attributes[prop_name]
if value is None and not self._finalized:
try:
value = self._get_parent_attribute(prop_name)
except AttributeError:
pass
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
class Base: return value
def _generic_s(prop_name, self, value):
self._attributes[prop_name] = value
def _generic_d(prop_name, self):
del self._attributes[prop_name]
class BaseMeta(type):
def __new__(cls, name, parents, dct):
def _create_attrs(src_dict, dst_dict):
keys = list(src_dict.keys())
for attr_name in keys:
value = src_dict[attr_name]
if isinstance(value, Attribute):
if attr_name.startswith('_'):
attr_name = attr_name[1:]
getter = partial(_generic_g, attr_name)
setter = partial(_generic_s, attr_name)
deleter = partial(_generic_d, attr_name)
dst_dict[attr_name] = property(getter, setter, deleter)
dst_dict['_valid_attrs'][attr_name] = value
dst_dict['_attributes'][attr_name] = value.default
def _process_parents(parents, dst_dict):
for parent in parents:
if hasattr(parent, '__dict__'):
_create_attrs(parent.__dict__, dst_dict)
_process_parents(parent.__bases__, dst_dict)
dct['_attributes'] = dict()
dct['_valid_attrs'] = dict()
_create_attrs(dct, dct)
_process_parents(parents, dct)
return super(BaseMeta, cls).__new__(cls, name, parents, dct)
class Base(with_metaclass(BaseMeta, object)):
# connection/transport # connection/transport
_connection = FieldAttribute(isa='string') _connection = FieldAttribute(isa='string')
@ -85,85 +140,15 @@ class Base:
# every object gets a random uuid: # every object gets a random uuid:
self._uuid = uuid.uuid4() self._uuid = uuid.uuid4()
# and initialize the base attributes # initialize the default field attribute values
self._initialize_base_attributes() #self._attributes = dict()
#for (name, attr) in iteritems(self._valid_attrs):
self._cached_parent_attrs = dict() # self._attributes[name] = attr.default
self._attributes = self._attributes.copy()
# and init vars, avoid using defaults in field declaration as it lives across plays # and init vars, avoid using defaults in field declaration as it lives across plays
self.vars = dict() self.vars = dict()
# The following three functions are used to programatically define data
# descriptors (aka properties) for the Attributes of all of the playbook
# objects (tasks, blocks, plays, etc).
#
# The function signature is a little strange because of how we define
# them. We use partial to give each method the name of the Attribute that
# it is for. Since partial prefills the positional arguments at the
# beginning of the function we end up with the first positional argument
# being allocated to the name instead of to the class instance (self) as
# normal. To deal with that we make the property name field the first
# positional argument and self the second arg.
#
# Because these methods are defined inside of the class, they get bound to
# the instance when the object is created. After we run partial on them
# and put the result back into the class as a property, they get bound
# a second time. This leads to self being placed in the arguments twice.
# To work around that, we mark the functions as @staticmethod so that the
# first binding to the instance doesn't happen.
@staticmethod
def _generic_g(prop_name, self):
method = "_get_attr_%s" % prop_name
try:
value = getattr(self, method)()
except AttributeError:
try:
value = self._attributes[prop_name]
if value is None and not self._finalized:
try:
if prop_name in self._cached_parent_attrs:
value = self._cached_parent_attrs[prop_name]
else:
value = self._get_parent_attribute(prop_name)
# FIXME: temporarily disabling due to bugs
#self._cached_parent_attrs[prop_name] = value
except AttributeError:
pass
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
return value
@staticmethod
def _generic_s(prop_name, self, value):
self._attributes[prop_name] = value
@staticmethod
def _generic_d(prop_name, self):
del self._attributes[prop_name]
def _get_base_attributes(self):
'''
Returns the list of attributes for this class (or any subclass thereof).
If the attribute name starts with an underscore, it is removed
'''
# check cache before retrieving attributes
if self.__class__.__name__ in BASE_ATTRIBUTES:
return BASE_ATTRIBUTES[self.__class__.__name__]
# Cache init
base_attributes = dict()
for (name, value) in getmembers(self.__class__):
if isinstance(value, Attribute):
if name.startswith('_'):
name = name[1:]
base_attributes[name] = value
BASE_ATTRIBUTES[self.__class__.__name__] = base_attributes
return base_attributes
def dump_me(self, depth=0): def dump_me(self, depth=0):
if depth == 0: if depth == 0:
print("DUMPING OBJECT ------------------------------------------------------") print("DUMPING OBJECT ------------------------------------------------------")
@ -178,23 +163,6 @@ class Base:
if hasattr(self, '_play') and self._play: if hasattr(self, '_play') and self._play:
self._play.dump_me(depth+2) self._play.dump_me(depth+2)
def _initialize_base_attributes(self):
# each class knows attributes set upon it, see Task.py for example
self._attributes = dict()
for (name, value) in self._get_base_attributes().items():
getter = partial(self._generic_g, name)
setter = partial(self._generic_s, name)
deleter = partial(self._generic_d, name)
# Place the property into the class so that cls.name is the
# property functions.
setattr(Base, name, property(getter, setter, deleter))
# Place the value into the instance so that the property can
# process and hold that value.
setattr(self, name, value.default)
def preprocess_data(self, ds): def preprocess_data(self, ds):
''' infrequently used method to do some pre-processing of legacy terms ''' ''' infrequently used method to do some pre-processing of legacy terms '''
@ -230,8 +198,7 @@ class Base:
# Walk all attributes in the class. We sort them based on their priority # Walk all attributes in the class. We sort them based on their priority
# so that certain fields can be loaded before others, if they are dependent. # so that certain fields can be loaded before others, if they are dependent.
base_attributes = self._get_base_attributes() for name, attr in sorted(iteritems(self._valid_attrs), key=operator.itemgetter(1)):
for name, attr in sorted(base_attributes.items(), key=operator.itemgetter(1)):
# copy the value over unless a _load_field method is defined # copy the value over unless a _load_field method is defined
if name in ds: if name in ds:
method = getattr(self, '_load_%s' % name, None) method = getattr(self, '_load_%s' % name, None)
@ -264,7 +231,7 @@ class Base:
not map to attributes for this object. not map to attributes for this object.
''' '''
valid_attrs = frozenset(name for name in self._get_base_attributes()) valid_attrs = frozenset(self._valid_attrs.keys())
for key in ds: for key in ds:
if key not in valid_attrs: if key not in valid_attrs:
raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (key, self.__class__.__name__), obj=ds) raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (key, self.__class__.__name__), obj=ds)
@ -274,7 +241,7 @@ class Base:
if not self._validated: if not self._validated:
# walk all fields in the object # walk all fields in the object
for (name, attribute) in iteritems(self._get_base_attributes()): for (name, attribute) in iteritems(self._valid_attrs):
# run validator only if present # run validator only if present
method = getattr(self, '_validate_%s' % name, None) method = getattr(self, '_validate_%s' % name, None)
@ -299,7 +266,7 @@ class Base:
new_me = self.__class__() new_me = self.__class__()
for name in self._get_base_attributes(): for name in self._valid_attrs.keys():
attr_val = getattr(self, name) attr_val = getattr(self, name)
if isinstance(attr_val, collections.Sequence): if isinstance(attr_val, collections.Sequence):
setattr(new_me, name, attr_val[:]) setattr(new_me, name, attr_val[:])
@ -330,7 +297,7 @@ class Base:
# save the omit value for later checking # save the omit value for later checking
omit_value = templar._available_variables.get('omit') omit_value = templar._available_variables.get('omit')
for (name, attribute) in iteritems(self._get_base_attributes()): for (name, attribute) in iteritems(self._valid_attrs):
if getattr(self, name) is None: if getattr(self, name) is None:
if not attribute.required: if not attribute.required:
@ -432,44 +399,6 @@ class Base:
self._finalized = True self._finalized = True
def serialize(self):
'''
Serializes the object derived from the base object into
a dictionary of values. This only serializes the field
attributes for the object, so this may need to be overridden
for any classes which wish to add additional items not stored
as field attributes.
'''
repr = dict()
for name in self._get_base_attributes():
repr[name] = getattr(self, name)
# serialize the uuid field
repr['uuid'] = getattr(self, '_uuid')
return repr
def deserialize(self, data):
'''
Given a dictionary of values, load up the field attributes for
this object. As with serialize(), if there are any non-field
attribute data members, this method will need to be overridden
and extended.
'''
assert isinstance(data, dict)
for (name, attribute) in iteritems(self._get_base_attributes()):
if name in data:
setattr(self, name, data[name])
else:
setattr(self, name, attribute.default)
# restore the UUID field
setattr(self, '_uuid', data.get('uuid'))
def _load_vars(self, attr, ds): def _load_vars(self, attr, ds):
''' '''
Vars in a play can be specified either as a dictionary directly, or Vars in a play can be specified either as a dictionary directly, or
@ -515,12 +444,43 @@ class Base:
if not isinstance(new_value, list): if not isinstance(new_value, list):
new_value = [ new_value ] new_value = [ new_value ]
#return list(set(value + new_value))
return [i for i,_ in itertools.groupby(value + new_value) if i is not None] return [i for i,_ in itertools.groupby(value + new_value) if i is not None]
def __getstate__(self): def serialize(self):
return self.serialize() '''
Serializes the object derived from the base object into
a dictionary of values. This only serializes the field
attributes for the object, so this may need to be overridden
for any classes which wish to add additional items not stored
as field attributes.
'''
repr = dict()
for name in self._valid_attrs.keys():
repr[name] = getattr(self, name)
# serialize the uuid field
repr['uuid'] = getattr(self, '_uuid')
return repr
def deserialize(self, data):
'''
Given a dictionary of values, load up the field attributes for
this object. As with serialize(), if there are any non-field
attribute data members, this method will need to be overridden
and extended.
'''
assert isinstance(data, dict)
for (name, attribute) in iteritems(self._valid_attrs):
if name in data:
setattr(self, name, data[name])
else:
setattr(self, name, attribute.default)
# restore the UUID field
setattr(self, '_uuid', data.get('uuid'))
def __setstate__(self, data):
self.__init__()
self.deserialize(data)

View file

@ -202,7 +202,7 @@ class Block(Base, Become, Conditional, Taggable):
''' '''
data = dict() data = dict()
for attr in self._get_base_attributes(): for attr in self._valid_attrs:
if attr not in ('block', 'rescue', 'always'): if attr not in ('block', 'rescue', 'always'):
data[attr] = getattr(self, attr) data[attr] = getattr(self, attr)
@ -229,7 +229,7 @@ class Block(Base, Become, Conditional, Taggable):
# we don't want the full set of attributes (the task lists), as that # we don't want the full set of attributes (the task lists), as that
# would lead to a serialize/deserialize loop # would lead to a serialize/deserialize loop
for attr in self._get_base_attributes(): for attr in self._valid_attrs:
if attr in data and attr not in ('block', 'rescue', 'always'): if attr in data and attr not in ('block', 'rescue', 'always'):
setattr(self, attr, data.get(attr)) setattr(self, attr, data.get(attr))
@ -324,15 +324,7 @@ class Block(Base, Become, Conditional, Taggable):
return value return value
def _get_attr_environment(self): def _get_attr_environment(self):
''' return self._get_parent_attribute('environment')
Override for the 'tags' getattr fetcher, used from Base.
'''
environment = self._attributes['environment']
parent_environment = self._get_parent_attribute('environment', extend=True)
if parent_environment is not None:
environment = self._extend_value(environment, parent_environment)
return environment
def _get_attr_any_errors_fatal(self): def _get_attr_any_errors_fatal(self):
''' '''

View file

@ -147,7 +147,7 @@ class Role(Base, Become, Conditional, Taggable):
# copy over all field attributes, except for when and tags, which # copy over all field attributes, except for when and tags, which
# are special cases and need to preserve pre-existing values # are special cases and need to preserve pre-existing values
for (attr_name, _) in iteritems(self._get_base_attributes()): for (attr_name, _) in iteritems(self._valid_attrs):
if attr_name not in ('when', 'tags'): if attr_name not in ('when', 'tags'):
setattr(self, attr_name, getattr(role_include, attr_name)) setattr(self, attr_name, getattr(role_include, attr_name))

View file

@ -183,7 +183,7 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
role_def = dict() role_def = dict()
role_params = dict() role_params = dict()
base_attribute_names = frozenset(self._get_base_attributes().keys()) base_attribute_names = frozenset(self._valid_attrs.keys())
for (key, value) in iteritems(ds): for (key, value) in iteritems(ds):
# use the list of FieldAttribute values to determine what is and is not # use the list of FieldAttribute values to determine what is and is not
# an extra parameter for this role (or sub-class of this role) # an extra parameter for this role (or sub-class of this role)

View file

@ -220,7 +220,7 @@ class Task(Base, Conditional, Taggable, Become):
# top level of the task, so we move those into the 'vars' dictionary # top level of the task, so we move those into the 'vars' dictionary
# here, and show a deprecation message as we will remove this at # here, and show a deprecation message as we will remove this at
# some point in the future. # some point in the future.
if action == 'include' and k not in self._get_base_attributes() and k not in self.DEPRECATED_ATTRIBUTES: if action == 'include' and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
display.deprecated("Specifying include variables at the top-level of the task is deprecated." display.deprecated("Specifying include variables at the top-level of the task is deprecated."
" Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n" " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
" for currently supported syntax regarding included files and variables") " for currently supported syntax regarding included files and variables")