From 21d623869f11b9a93aa978411ff9a8e42d444543 Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Thu, 19 Jul 2018 13:17:43 +0400 Subject: [PATCH 01/63] Refactoring pt. 1 In this patch parser rules are made more strict, some boilerplate from parser and other tools is removed, and a bug with override in composite item is partially addressed. Also an attempt is made to enhance test coverage. --- reclass/core.py | 6 +- reclass/datatypes/parameters.py | 76 +++++----- reclass/datatypes/tests/test_exports.py | 15 +- reclass/errors.py | 7 + reclass/storage/yaml_git/__init__.py | 10 +- reclass/utils/dictpath.py | 15 +- reclass/values/__init__.py | 2 +- reclass/values/compitem.py | 11 +- reclass/values/dictitem.py | 1 + reclass/values/invitem.py | 150 +++++++++---------- reclass/values/item.py | 8 +- reclass/values/listitem.py | 1 + reclass/values/refitem.py | 9 +- reclass/values/scaitem.py | 1 + reclass/values/tests/__init__.py | 7 - reclass/values/tests/test_compitem.py | 184 ++++++++++++++++++++++++ reclass/values/tests/test_value.py | 18 ++- reclass/values/value.py | 19 ++- reclass/values/valuelist.py | 12 +- 19 files changed, 379 insertions(+), 173 deletions(-) create mode 100644 reclass/values/tests/test_compitem.py diff --git a/reclass/core.py b/reclass/core.py index ed5a392d..2facfbee 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -210,8 +210,8 @@ def _nodeinfo(self, nodename, inventory): try: node = self._node_entity(nodename) node.initialise_interpolation() - if node.parameters.has_inv_query() and inventory is None: - inventory = self._get_inventory(node.parameters.needs_all_envs(), node.environment, node.parameters.get_inv_queries()) + if node.parameters.has_inv_query and inventory is None: + inventory = self._get_inventory(node.parameters.needs_all_envs, node.environment, node.parameters.get_inv_queries()) node.interpolate(inventory) return node except InterpolationError as e: @@ -237,7 +237,7 @@ def inventory(self): inventory = self._get_inventory(True, '', None) for n in self._storage.enumerate_nodes(): entities[n] = self._nodeinfo(n, inventory) - if entities[n].parameters.has_inv_query(): + if entities[n].parameters.has_inv_query: nodes.add(n) for n in query_nodes: entities[n] = self._nodeinfo(n, inventory) diff --git a/reclass/datatypes/parameters.py b/reclass/datatypes/parameters.py index fa0f3797..4bac31a8 100644 --- a/reclass/datatypes/parameters.py +++ b/reclass/datatypes/parameters.py @@ -29,8 +29,9 @@ from reclass.utils.parameterlist import ParameterList from reclass.values.value import Value from reclass.values.valuelist import ValueList -from reclass.errors import InfiniteRecursionError, ResolveError, ResolveErrorList, InterpolationError, BadReferencesError - +from reclass.errors import InfiniteRecursionError, ResolveError +from reclass.errors import ResolveErrorList, InterpolationError, ParseError +from reclass.errors import BadReferencesError class Parameters(object): ''' @@ -84,12 +85,14 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + @property def has_inv_query(self): return len(self._inv_queries) > 0 def get_inv_queries(self): return self._inv_queries + @property def needs_all_envs(self): return self._needs_all_envs @@ -108,7 +111,8 @@ def _wrap_value(self, value): return self._wrap_list(value) else: try: - return Value(value, self._settings, self._uri, parse_string=self._parse_strings) + return Value(value, self._settings, self._uri, + parse_string=self._parse_strings) except InterpolationError as e: e.context = DictPath(self._settings.delimiter) raise @@ -154,7 +158,8 @@ def _update_value(self, cur, new): uri = new.uri else: uri = self._uri - values.append(Value(new, self._settings, uri, parse_string=self._parse_strings)) + values.append(Value(new, self._settings, uri, + parse_string=self._parse_strings)) return values @@ -246,37 +251,37 @@ def merge(self, other): self._base = self._merge_recurse(self._base, wrapped) def _render_simple_container(self, container, key, value, path): - if isinstance(value, ValueList): - if value.is_complex(): - p = path.new_subpath(key) - self._unrendered[p] = True - container[key] = value - if value.has_inv_query(): - self._inv_queries.append((p, value)) - if value.needs_all_envs(): - self._needs_all_envs = True - return - else: - value = value.merge() - if isinstance(value, Value) and value.is_container(): - value = value.contents() - if isinstance(value, dict): - container[key] = self._render_simple_dict(value, path.new_subpath(key)) - elif isinstance(value, list): - container[key] = self._render_simple_list(value, path.new_subpath(key)) - elif isinstance(value, Value): - if value.is_complex(): - p = path.new_subpath(key) - self._unrendered[p] = True - container[key] = value - if value.has_inv_query(): - self._inv_queries.append((p, value)) - if value.needs_all_envs(): - self._needs_all_envs = True - else: - container[key] = value.render(None, None) + if isinstance(value, ValueList): + if value.is_complex: + p = path.new_subpath(key) + self._unrendered[p] = True + container[key] = value + if value.has_inv_query: + self._inv_queries.append((p, value)) + if value.needs_all_envs(): + self._needs_all_envs = True + return else: + value = value.merge() + if isinstance(value, Value) and value.is_container(): + value = value.contents + if isinstance(value, dict): + container[key] = self._render_simple_dict(value, path.new_subpath(key)) + elif isinstance(value, list): + container[key] = self._render_simple_list(value, path.new_subpath(key)) + elif isinstance(value, Value): + if value.is_complex: + p = path.new_subpath(key) + self._unrendered[p] = True container[key] = value + if value.has_inv_query: + self._inv_queries.append((p, value)) + if value.needs_all_envs(): + self._needs_all_envs = True + else: + container[key] = value.render(None, None) + else: + container[key] = value def _render_simple_dict(self, dictionary, path): new_dict = {} @@ -311,7 +316,8 @@ def _initialise_interpolate(self): self._inv_queries = [] self._needs_all_envs = False self._resolve_errors = ResolveErrorList() - self._base = self._render_simple_dict(self._base, DictPath(self._settings.delimiter)) + self._base = self._render_simple_dict(self._base, + DictPath(self._settings.delimiter)) def _interpolate_inner(self, path, inventory): value = path.get_value(self._base) @@ -370,7 +376,7 @@ def _interpolate_references(self, path, value, inventory): ancestor = ancestor.new_subpath(k) if ancestor in self._unrendered: self._interpolate_inner(ancestor, inventory) - if value.allRefs(): + if value.allRefs: all_refs = True else: # not all references in the value could be calculated previously so diff --git a/reclass/datatypes/tests/test_exports.py b/reclass/datatypes/tests/test_exports.py index 8ccd6dfd..21845177 100644 --- a/reclass/datatypes/tests/test_exports.py +++ b/reclass/datatypes/tests/test_exports.py @@ -89,11 +89,16 @@ def test_list_if_expr_invquery_with_and(self): self.assertEqual(p.as_dict(), r) def test_list_if_expr_invquery_with_and_missing(self): - e = {'node1': {'a': 1, 'b': 2, 'c': 'green'}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2}} - p = Parameters({'exp': '$[ if exports:b == 2 and exports:c == green ]'}, SETTINGS, '') - r = {'exp': ['node1']} - p.interpolate(e) - self.assertEqual(p.as_dict(), r) + inventory = {'node1': {'a': 1, 'b': 2, 'c': 'green'}, + 'node2': {'a': 3, 'b': 3}, + 'node3': {'a': 3, 'b': 2}} + mapping = {'exp': '$[ if exports:b == 2 and exports:c == green ]'} + expected = {'exp': ['node1']} + + pars = Parameters(mapping, SETTINGS, '') + pars.interpolate(inventory) + + self.assertEqual(pars.as_dict(), expected) def test_list_if_expr_invquery_with_and(self): e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 4}} diff --git a/reclass/errors.py b/reclass/errors.py index 0c9d48f0..330ad4cc 100644 --- a/reclass/errors.py +++ b/reclass/errors.py @@ -359,3 +359,10 @@ def _get_message(self): "definition in '{3}'. Nodes can only be defined once " \ "per inventory." return msg.format(self._storage, self._name, self._uris[1], self._uris[0]) + + +class MissingModuleError(ReclassException): + + def __init__(self, modname): + msg = "Module %s is missing" % modname + super(MissingModuleError, self).__init__(rc=posix.EX_DATAERR, msg=msg) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index c26ef776..38de092e 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -17,7 +17,13 @@ import warnings with warnings.catch_warnings(): warnings.simplefilter('ignore') - import pygit2 + try: + # NOTE: in some distros pygit2 could require special effort to acquire. + # It is not a problem per se, but it breaks tests for no real reason. + # This try block is for keeping tests sane. + import pygit2 + except ImportError: + pygit2 = None from six import iteritems @@ -70,6 +76,8 @@ def __repr__(self): class GitRepo(object): def __init__(self, uri): + if pygit2 is None: + raise errors.MissingModuleError('pygit2') self.transport, _, self.url = uri.repo.partition('://') self.name = self.url.replace('/', '_') self.credentials = None diff --git a/reclass/utils/dictpath.py b/reclass/utils/dictpath.py index 32831cf3..6bf152a2 100644 --- a/reclass/utils/dictpath.py +++ b/reclass/utils/dictpath.py @@ -93,9 +93,9 @@ def __ne__(self, other): def __hash__(self): return hash(str(self)) - def _get_path(self): + @property + def path(self): return self._parts - path = property(_get_path) def _get_key(self): if len(self._parts) == 0: @@ -114,17 +114,8 @@ def _get_innermost_container(self, base): def _split_string(self, string): return re.split(r'(? 1 - def key_parts(self): - if self.has_ancestors(): - return self._parts[:-1] - else: - return [] + return self._parts[:-1] def new_subpath(self, key): return DictPath(self._delim, self._parts + [key]) diff --git a/reclass/values/__init__.py b/reclass/values/__init__.py index 9aaaf25a..ec0f8822 100644 --- a/reclass/values/__init__.py +++ b/reclass/values/__init__.py @@ -1,4 +1,4 @@ -# -*- coding: utf-8 +# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function diff --git a/reclass/values/compitem.py b/reclass/values/compitem.py index 183bc43d..704ac692 100644 --- a/reclass/values/compitem.py +++ b/reclass/values/compitem.py @@ -17,26 +17,29 @@ def __init__(self, items, settings): self.type = Item.COMPOSITE self._items = items self._settings = settings - self._refs = [] - self._allRefs = False self.assembleRefs() + # TODO: possibility of confusion. Looks like 'assemble' should be either + # 'gather' or 'extract'. def assembleRefs(self, context={}): self._refs = [] self._allRefs = True for item in self._items: - if item.has_references(): + if item.has_references: item.assembleRefs(context) self._refs.extend(item.get_references()) - if item.allRefs() is False: + if item.allRefs is False: self._allRefs = False + @property def contents(self): return self._items + @property def allRefs(self): return self._allRefs + @property def has_references(self): return len(self._refs) > 0 diff --git a/reclass/values/dictitem.py b/reclass/values/dictitem.py index d5272b98..b96875fe 100644 --- a/reclass/values/dictitem.py +++ b/reclass/values/dictitem.py @@ -18,6 +18,7 @@ def __init__(self, item, settings): self._dict = item self._settings = settings + @property def contents(self): return self._dict diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py index 37a35cf5..15b66c0c 100644 --- a/reclass/values/invitem.py +++ b/reclass/values/invitem.py @@ -9,9 +9,11 @@ from __future__ import unicode_literals import copy +import functools import pyparsing as pp -from six import iteritems, string_types +from six import iteritems +from six import string_types from .item import Item from reclass.settings import Settings @@ -35,15 +37,68 @@ _IGNORE_ERRORS = '+IgnoreErrors' _ALL_ENVS = '+AllEnvs' + +def _get_parser(): + def tag_with(tag, transform=lambda x:x): + def inner(tag, string, location, tokens): + token = transform(tokens[0]) + tokens[0] = (tag, token) + return functools.partial(inner, tag) + + _object = tag_with(_OBJ) + _option = tag_with(_OPTION) + _expr_list_test = tag_with(_LIST_TEST) + _test = tag_with(_TEST) + _logical = tag_with(_LOGICAL) + _if = tag_with(_IF) + _expr_var = tag_with(_VALUE) + _expr_test = tag_with(_TEST) + _integer = tag_with(_OBJ, int) + _number = tag_with(_OBJ, float) + + end = pp.StringEnd() + ignore_errors = pp.CaselessLiteral(_IGNORE_ERRORS) + all_envs = pp.CaselessLiteral(_ALL_ENVS) + option = (ignore_errors | all_envs).setParseAction(_option) + options = pp.Group(pp.ZeroOrMore(option)) + operator_test = (pp.Literal(_EQUAL) | + pp.Literal(_NOT_EQUAL)).setParseAction(_test) + operator_logical = (pp.CaselessLiteral(_AND) | + pp.CaselessLiteral(_OR)).setParseAction(_logical) + begin_if = pp.CaselessLiteral(_IF).setParseAction(_if) + obj = pp.Word(pp.printables).setParseAction(_object) + sign = pp.Optional(pp.Literal('-')) + number = pp.Word(pp.nums) + dpoint = pp.Literal('.') + integer = pp.Combine(sign + number + pp.WordEnd()).setParseAction(_integer) + real = pp.Combine(sign + + ((number + dpoint + number) | + (dpoint + number) | + (number + dpoint)) + ).setParseAction(_number) + item = integer | real | obj + + single_test = item + operator_test + item + additional_test = operator_logical + single_test + expr_var = pp.Group(obj + end).setParseAction(_expr_var) + expr_test = pp.Group(obj + begin_if + single_test + + pp.ZeroOrMore(additional_test) + + end).setParseAction(_expr_test) + expr_list_test = pp.Group(begin_if + single_test + + pp.ZeroOrMore(additional_test) + + end).setParseAction(_expr_list_test) + expr = expr_test | expr_var | expr_list_test + line = options + expr + end + return line + + class Element(object): def __init__(self, expression, delimiter): self._delimiter = delimiter - self._export_path = None - self._parameter_path = None - self._parameter_value = None - self._export_path, self._parameter_path, self._parameter_value = self._get_vars(expression[0][1], self._export_path, self._parameter_path, self._parameter_value) - self._export_path, self._parameter_path, self._parameter_value = self._get_vars(expression[2][1], self._export_path, self._parameter_path, self._parameter_value) + # TODO: this double sommersault must be cleaned + _ = self._get_vars(expression[2][1], *self._get_vars(expression[0][1])) + self._export_path, self._parameter_path, self._parameter_value = _ try: self._export_path.drop_first() @@ -82,7 +137,8 @@ def value(self, context, items): if export_value != self._parameter_value: result = True else: - raise ExpressionError('Unknown test {0}'.format(self._test), tbFlag=False) + raise ExpressionError('Unknown test {0}'.format(self._test), + tbFlag=False) return result else: return False @@ -93,7 +149,7 @@ def _resolve(self, path, dictionary): except KeyError as e: raise ResolveError(str(path)) - def _get_vars(self, var, export, parameter, value): + def _get_vars(self, var, export=None, parameter=None, value=None): if isinstance(var, string_types): path = DictPath(self._delimiter, var) if path.path[0].lower() == 'exports': @@ -150,81 +206,14 @@ def value(self, context, items): elif self._operators[i] == _OR: result = result or next_result else: - raise ExpressionError('Unknown operator {0} {1}'.format(self._operators[i], self.elements), tbFlag=False) + emsg = 'Unknown operator {0} {1}'.format( + self._operators[i], self.elements) + raise ExpressionError(emsg, tbFlag=False) return result class InvItem(Item): - def _get_parser(): - - def _object(string, location, tokens): - token = tokens[0] - tokens[0] = (_OBJ, token) - - def _integer(string, location, tokens): - try: - token = int(tokens[0]) - except ValueError: - token = tokens[0] - tokens[0] = (_OBJ, token) - - def _number(string, location, tokens): - try: - token = float(tokens[0]) - except ValueError: - token = tokens[0] - tokens[0] = (_OBJ, token) - - def _option(string, location, tokens): - token = tokens[0] - tokens[0] = (_OPTION, token) - - def _test(string, location, tokens): - token = tokens[0] - tokens[0] = (_TEST, token) - - def _logical(string, location, tokens): - token = tokens[0] - tokens[0] = (_LOGICAL, token) - - def _if(string, location, tokens): - token = tokens[0] - tokens[0] = (_IF, token) - - def _expr_var(string, location, tokens): - token = tokens[0] - tokens[0] = (_VALUE, token) - - def _expr_test(string, location, tokens): - token = tokens[0] - tokens[0] = (_TEST, token) - - def _expr_list_test(string, location, tokens): - token = tokens[0] - tokens[0] = (_LIST_TEST, token) - - white_space = pp.White().suppress() - end = pp.StringEnd() - ignore_errors = pp.CaselessLiteral(_IGNORE_ERRORS) - all_envs = pp.CaselessLiteral(_ALL_ENVS) - option = (ignore_errors | all_envs).setParseAction(_option) - options = pp.Group(pp.ZeroOrMore(option + white_space)) - operator_test = (pp.Literal(_EQUAL) | pp.Literal(_NOT_EQUAL)).setParseAction(_test) - operator_logical = (pp.CaselessLiteral(_AND) | pp.CaselessLiteral(_OR)).setParseAction(_logical) - begin_if = pp.CaselessLiteral(_IF, ).setParseAction(_if) - obj = pp.Word(pp.printables).setParseAction(_object) - integer = pp.Word('0123456789-').setParseAction(_integer) - number = pp.Word('0123456789-.').setParseAction(_number) - item = integer | number | obj - single_test = white_space + item + white_space + operator_test + white_space + item - additional_test = white_space + operator_logical + single_test - expr_var = pp.Group(obj + pp.Optional(white_space) + end).setParseAction(_expr_var) - expr_test = pp.Group(obj + white_space + begin_if + single_test + pp.ZeroOrMore(additional_test) + end).setParseAction(_expr_test) - expr_list_test = pp.Group(begin_if + single_test + pp.ZeroOrMore(additional_test) + end).setParseAction(_expr_list_test) - expr = (expr_test | expr_var | expr_list_test) - line = options + expr + end - return line _parser = _get_parser() @@ -238,7 +227,7 @@ def __init__(self, item, settings): def _parse_expression(self, expr): try: - tokens = InvItem._parser.parseString(expr).asList() + tokens = self._parser.parseString(expr).asList() except pp.ParseException as e: raise ParseError(e.msg, e.line, e.col, e.lineno) @@ -278,12 +267,15 @@ def _parse_expression(self, expr): def assembleRefs(self, context): return + @property def contents(self): return self._expr_text + @property def has_inv_query(self): return True + @property def has_references(self): return len(self._question.refs()) > 0 diff --git a/reclass/values/item.py b/reclass/values/item.py index cad36849..4ab3f68e 100644 --- a/reclass/values/item.py +++ b/reclass/values/item.py @@ -12,6 +12,8 @@ class Item(object): + # TODO: use enum.Enum + # TODO: consider DotMap COMPOSITE = 1 DICTIONARY = 2 INV_QUERY = 3 @@ -26,18 +28,22 @@ class Item(object): def allRefs(self): return True + @property def has_references(self): return False + @property def has_inv_query(self): return False def is_container(self): return False + @property def is_complex(self): - return (self.has_references() | self.has_inv_query()) + return (self.has_references | self.has_inv_query) + @property def contents(self): msg = "Item class {0} does not implement contents()" raise NotImplementedError(msg.format(self.__class__.__name__)) diff --git a/reclass/values/listitem.py b/reclass/values/listitem.py index 41c02ddb..0f0ee603 100644 --- a/reclass/values/listitem.py +++ b/reclass/values/listitem.py @@ -18,6 +18,7 @@ def __init__(self, item, settings): self._list = item self._settings = settings + @property def contents(self): return self._list diff --git a/reclass/values/refitem.py b/reclass/values/refitem.py index d24eeee5..5713346c 100644 --- a/reclass/values/refitem.py +++ b/reclass/values/refitem.py @@ -21,18 +21,16 @@ def __init__(self, items, settings): self.type = Item.REFERENCE self._settings = settings self._items = items - self._refs = [] - self._allRefs = False self.assembleRefs() def assembleRefs(self, context={}): self._refs = [] self._allRefs = True for item in self._items: - if item.has_references(): + if item.has_references: item.assembleRefs(context) self._refs.extend(item.get_references()) - if item.allRefs() == False: + if item.allRefs == False: self._allRefs = False try: strings = [ str(i.render(context, None)) for i in self._items ] @@ -41,12 +39,15 @@ def assembleRefs(self, context={}): except ResolveError as e: self._allRefs = False + @property def contents(self): return self._items + @property def allRefs(self): return self._allRefs + @property def has_references(self): return len(self._refs) > 0 diff --git a/reclass/values/scaitem.py b/reclass/values/scaitem.py index c16ab453..c65f3020 100644 --- a/reclass/values/scaitem.py +++ b/reclass/values/scaitem.py @@ -18,6 +18,7 @@ def __init__(self, value, settings): self._value = value self._settings = settings + @property def contents(self): return self._value diff --git a/reclass/values/tests/__init__.py b/reclass/values/tests/__init__.py index 16d1248d..e69de29b 100644 --- a/reclass/values/tests/__init__.py +++ b/reclass/values/tests/__init__.py @@ -1,7 +0,0 @@ -# -# -*- coding: utf-8 -# -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals diff --git a/reclass/values/tests/test_compitem.py b/reclass/values/tests/test_compitem.py new file mode 100644 index 00000000..3d63d3b5 --- /dev/null +++ b/reclass/values/tests/test_compitem.py @@ -0,0 +1,184 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +import unittest + +SETTINGS = Settings() + +class TestCompItem(unittest.TestCase): + + def test_assembleRefs_no_items(self): + composite = CompItem([], SETTINGS) + + self.assertFalse(composite.has_references) + + def test_assembleRefs_one_item_without_refs(self): + val1 = Value('foo', SETTINGS, '') + + composite = CompItem([val1], SETTINGS) + + self.assertFalse(composite.has_references) + + def test_assembleRefs_one_item_with_one_ref(self): + val1 = Value('${foo}', SETTINGS, '') + expected_refs = ['foo'] + + composite = CompItem([val1], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_one_item_with_two_refs(self): + val1 = Value('${foo}${bar}', SETTINGS, '') + expected_refs = ['foo', 'bar'] + + composite = CompItem([val1], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_two_items_one_with_one_ref_one_without(self): + val1 = Value('${foo}bar', SETTINGS, '') + val2 = Value('baz', SETTINGS, '') + expected_refs = ['foo'] + + composite = CompItem([val1, val2], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_two_items_both_with_one_ref(self): + val1 = Value('${foo}', SETTINGS, '') + val2 = Value('${bar}', SETTINGS, '') + expected_refs = ['foo', 'bar'] + + composite = CompItem([val1, val2], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_assembleRefs_two_items_with_two_refs(self): + val1 = Value('${foo}${baz}', SETTINGS, '') + val2 = Value('${bar}${meep}', SETTINGS, '') + expected_refs = ['foo', 'baz', 'bar', 'meep'] + + composite = CompItem([val1, val2], SETTINGS) + + self.assertTrue(composite.has_references) + self.assertEquals(composite.get_references(), expected_refs) + + def test_render_single_item(self): + val1 = Value('${foo}', SETTINGS, '') + + composite = CompItem([val1], SETTINGS) + + self.assertEquals(1, composite.render({'foo': 1}, None)) + + + def test_render_multiple_items(self): + val1 = Value('${foo}', SETTINGS, '') + val2 = Value('${bar}', SETTINGS, '') + + composite = CompItem([val1, val2], SETTINGS) + + self.assertEquals('12', composite.render({'foo': 1, 'bar': 2}, None)) + + def test_merge_over_merge_scalar(self): + val1 = Value(None, SETTINGS, '') + scalar = ScaItem(1, SETTINGS) + composite = CompItem([val1], SETTINGS) + + result = composite.merge_over(scalar) + + self.assertEquals(result, composite) + + + def test_merge_over_merge_composite(self): + val1 = Value(None, SETTINGS, '') + val2 = Value(None, SETTINGS, '') + composite1 = CompItem([val1], SETTINGS) + composite2 = CompItem([val2], SETTINGS) + + result = composite2.merge_over(composite1) + + self.assertEquals(result, composite2) + + @unittest.skip("self._value bug") + def test_merge_over_merge_list_scalar_allowed(self): + # This nice bunch of lines below breaks merge because fuck you that's + # why. Seriously so, merger_over simply is not working for Composites + sets = Settings() + sets.allow_scalar_override = True + val1 = Value(None, SETTINGS, '') + listitem = ListItem([1], SETTINGS) + composite = CompItem([val1], sets) + + result = composite.merge_over(listitem) + + self.assertEquals(result, composite2) + + @unittest.skip("self._value bug") + def test_merge_over_merge_list_override_allowed(self): + sets = Settings() + sets.allow_none_override = True + val1 = Value(None, SETTINGS, '') + listitem = ListItem([1], SETTINGS) + composite = CompItem([val1], sets) + + result = composite.merge_over(listitem) + + self.assertEquals(result, composite2) + + def test_merge_over_merge_list_not_allowed(self): + val1 = Value(None, SETTINGS, '') + listitem = ListItem([1], SETTINGS) + composite = CompItem([val1], SETTINGS) + + self.assertRaises(RuntimeError, composite.merge_over, listitem) + + + @unittest.skip("self._value bug") + def test_merge_dict_scalar_allowed(self): + sets = Settings() + sets.allow_scalar_override = True + val1 = Value(None, SETTINGS, '') + dictitem = DictItem({'foo': 'bar'}, SETTINGS) + composite = CompItem([val1], sets) + + result = composite.merge_over(dictitem) + + self.assertEquals(result, composite) + + @unittest.skip("self._value bug") + def test_merge_dict_override_allowed(self): + sets = Settings() + sets.allow_none_override = True + val1 = Value(None, SETTINGS, '') + dictitem = DictItem([1], SETTINGS) + composite = CompItem([val1], sets) + + result = composite.merge_over(dictitem) + + self.assertEquals(result, composite) + + def test_merge_dict_dict_not_allowed(self): + val1 = Value(None, SETTINGS, '') + dictitem = DictItem({'foo': 'bar'}, SETTINGS) + composite = CompItem([val1], SETTINGS) + + self.assertRaises(RuntimeError, composite.merge_over, dictitem) + + def test_merge_other_types_not_allowed(self): + other = type('Other', (object,), {'type': 34}) + val1 = Value(None, SETTINGS, '') + composite = CompItem([val1], SETTINGS) + + self.assertRaises(RuntimeError, composite.merge_over, other) + + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_value.py b/reclass/values/tests/test_value.py index 48533408..a06d2207 100644 --- a/reclass/values/tests/test_value.py +++ b/reclass/values/tests/test_value.py @@ -11,8 +11,6 @@ from __future__ import print_function from __future__ import unicode_literals -import pyparsing as pp - from reclass.settings import Settings from reclass.values.value import Value from reclass.errors import ResolveError, ParseError @@ -42,14 +40,14 @@ class TestValue(unittest.TestCase): def test_simple_string(self): s = 'my cat likes to hide in boxes' tv = Value(s, SETTINGS, '') - self.assertFalse(tv.has_references()) + self.assertFalse(tv.has_references) self.assertEquals(tv.render(CONTEXT, None), s) def _test_solo_ref(self, key): s = _var(key) tv = Value(s, SETTINGS, '') res = tv.render(CONTEXT, None) - self.assertTrue(tv.has_references()) + self.assertTrue(tv.has_references) self.assertEqual(res, CONTEXT[key]) def test_solo_ref_string(self): @@ -70,7 +68,7 @@ def test_solo_ref_bool(self): def test_single_subst_bothends(self): s = 'I like ' + _var('favcolour') + ' and I like it' tv = Value(s, SETTINGS, '') - self.assertTrue(tv.has_references()) + self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) @@ -78,7 +76,7 @@ def test_single_subst_bothends(self): def test_single_subst_start(self): s = _var('favcolour') + ' is my favourite colour' tv = Value(s, SETTINGS, '') - self.assertTrue(tv.has_references()) + self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) @@ -86,7 +84,7 @@ def test_single_subst_start(self): def test_single_subst_end(self): s = 'I like ' + _var('favcolour') tv = Value(s, SETTINGS, '') - self.assertTrue(tv.has_references()) + self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) @@ -95,7 +93,7 @@ def test_deep_subst_solo(self): motd = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(motd) tv = Value(s, SETTINGS, '') - self.assertTrue(tv.has_references()) + self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, motd, CONTEXT['motd']['greeting'])) @@ -104,7 +102,7 @@ def test_multiple_subst(self): greet = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(greet) + ' I like ' + _var('favcolour') + '!' tv = Value(s, SETTINGS, '') - self.assertTrue(tv.has_references()) + self.assertTrue(tv.has_references) want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting']) want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour']) self.assertEqual(tv.render(CONTEXT, None), want) @@ -113,7 +111,7 @@ def test_multiple_subst_flush(self): greet = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(greet) + ' I like ' + _var('favcolour') tv = Value(s, SETTINGS, '') - self.assertTrue(tv.has_references()) + self.assertTrue(tv.has_references) want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting']) want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour']) self.assertEqual(tv.render(CONTEXT, None), want) diff --git a/reclass/values/value.py b/reclass/values/value.py index 4e86274c..613d5537 100644 --- a/reclass/values/value.py +++ b/reclass/values/value.py @@ -70,17 +70,20 @@ def item_type_str(self): def is_container(self): return self._item.is_container() + @property def allRefs(self): - return self._item.allRefs() + return self._item.allRefs + @property def has_references(self): - return self._item.has_references() + return self._item.has_references + @property def has_inv_query(self): - return self._item.has_inv_query() + return self._item.has_inv_query def needs_all_envs(self): - if self._item.has_inv_query(): + if self._item.has_inv_query: return self._item.needs_all_envs() else: return False @@ -88,8 +91,9 @@ def needs_all_envs(self): def ignore_failed_render(self): return self._item.ignore_failed_render() + @property def is_complex(self): - return self._item.is_complex() + return self._item.is_complex def get_references(self): return self._item.get_references() @@ -98,7 +102,7 @@ def get_inv_references(self): return self._item.get_inv_references() def assembleRefs(self, context): - if self._item.has_references(): + if self._item.has_references: self._item.assembleRefs(context) def render(self, context, inventory): @@ -108,8 +112,9 @@ def render(self, context, inventory): e.uri = self._uri raise + @property def contents(self): - return self._item.contents() + return self._item.contents def merge_over(self, value): self._item = self._item.merge_over(value._item) diff --git a/reclass/values/valuelist.py b/reclass/values/valuelist.py index 9c1e1fad..b4a089da 100644 --- a/reclass/values/valuelist.py +++ b/reclass/values/valuelist.py @@ -48,24 +48,28 @@ def _update(self): self._is_complex = False item_type = self._values[0].item_type() for v in self._values: - if v.is_complex() or v.constant or v.overwrite or v.item_type() != item_type: + if v.is_complex or v.constant or v.overwrite or v.item_type() != item_type: self._is_complex = True + @property def has_references(self): return len(self._refs) > 0 + @property def has_inv_query(self): return self._has_inv_query def get_inv_references(self): return self._inv_refs + @property def is_complex(self): return self._is_complex def get_references(self): return self._refs + @property def allRefs(self): return self._allRefs @@ -76,7 +80,7 @@ def _check_for_inv_query(self): self._has_inv_query = False self._ignore_failed_render = True for value in self._values: - if value.has_inv_query(): + if value.has_inv_query: self._inv_refs.extend(value.get_inv_references) self._has_inv_query = True if vale.ignore_failed_render() is False: @@ -89,9 +93,9 @@ def assembleRefs(self, context={}): self._allRefs = True for value in self._values: value.assembleRefs(context) - if value.has_references(): + if value.has_references: self._refs.extend(value.get_references()) - if value.allRefs() is False: + if value.allRefs is False: self._allRefs = False def merge(self): From aeea6882f27f2ac1df39ac8030efaf4749e64581 Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Thu, 19 Jul 2018 16:03:34 +0400 Subject: [PATCH 02/63] Refactoring pt. 2 More boilerplate removed. --- .travis.yml | 5 +- Pipfile | 1 + reclass/datatypes/applications.py | 4 +- reclass/datatypes/classes.py | 9 +- reclass/datatypes/entity.py | 54 ++--- reclass/datatypes/parameters.py | 36 +-- reclass/utils/dictpath.py | 18 +- reclass/values/compitem.py | 58 +---- reclass/values/dictitem.py | 26 +- reclass/values/invitem.py | 329 +++++++++----------------- reclass/values/item.py | 66 ++++-- reclass/values/listitem.py | 36 +-- reclass/values/parser_funcs.py | 98 ++++++-- reclass/values/refitem.py | 59 +---- reclass/values/scaitem.py | 28 +-- reclass/values/tests/test_compitem.py | 52 ---- reclass/values/value.py | 19 +- reclass/values/valuelist.py | 44 ++-- requirements.txt | 2 +- setup.py | 2 +- 20 files changed, 362 insertions(+), 584 deletions(-) diff --git a/.travis.yml b/.travis.yml index b060639f..559ef9ae 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,8 +25,9 @@ env: - PACKAGENAME="reclass" install: &pyinst -- pip install pyparsing -- pip install PyYAML +- pip install -r requirements.txt +#- pip install pyparsing +#- pip install PyYAML # To test example models with kitchen: - | test -e Gemfile || cat < Gemfile diff --git a/Pipfile b/Pipfile index 525e7ccf..fc2022b5 100644 --- a/Pipfile +++ b/Pipfile @@ -10,6 +10,7 @@ pyparsing = "*" PyYAML = "*" six = "*" pyyaml = "*" +enum34 = "*" # FIXME, issues with compile phase #"pygit2" = "*" diff --git a/reclass/datatypes/applications.py b/reclass/datatypes/applications.py index 8c6ed151..90ae54c5 100644 --- a/reclass/datatypes/applications.py +++ b/reclass/datatypes/applications.py @@ -33,9 +33,9 @@ def __init__(self, iterable=None, self._negations = [] super(Applications, self).__init__(iterable) - def _get_negation_prefix(self): + @property + def negation_prefix(self): return self._negation_prefix - negation_prefix = property(_get_negation_prefix) def append_if_new(self, item): self._assert_is_string(item) diff --git a/reclass/datatypes/classes.py b/reclass/datatypes/classes.py index 33d9b933..5270e280 100644 --- a/reclass/datatypes/classes.py +++ b/reclass/datatypes/classes.py @@ -11,11 +11,6 @@ from __future__ import print_function from __future__ import unicode_literals -#try: -# from types import StringTypes -#except ImportError: -# StringTypes = (str, ) - import six import os from reclass.errors import InvalidClassnameError @@ -61,7 +56,6 @@ def merge_unique(self, iterable): self.append_if_new(i) def _assert_is_string(self, item): - #if not isinstance(item, StringTypes): if not isinstance(item, six.string_types): raise TypeError('%s instances can only contain strings, '\ 'not %s' % (self.__class__.__name__, type(item))) @@ -81,5 +75,4 @@ def append_if_new(self, item): self._append_if_new(item) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, - self._items) + return '%s(%r)' % (self.__class__.__name__, self._items) diff --git a/reclass/datatypes/entity.py b/reclass/datatypes/entity.py index 3c927c3d..8133de53 100644 --- a/reclass/datatypes/entity.py +++ b/reclass/datatypes/entity.py @@ -22,18 +22,16 @@ class Entity(object): for merging. The name and uri of an Entity will be updated to the name and uri of the Entity that is being merged. ''' - def __init__(self, settings, classes=None, applications=None, parameters=None, - exports=None, uri=None, name=None, environment=None): - self._uri = uri or '' - self._name = name or '' - if classes is None: classes = Classes() - self._set_classes(classes) - if applications is None: applications = Applications() - self._set_applications(applications) - if parameters is None: parameters = Parameters(None, settings, uri) - if exports is None: exports = Exports(None, settings, uri) - self._set_parameters(parameters) - self._set_exports(exports) + def __init__(self, settings, classes=None, applications=None, + parameters=None, exports=None, uri=None, name=None, + environment=None): + self._uri = '' if uri is None else uri + self._name = '' if name is None else name + self._classes = self._set_field(classes, Classes) + self._applications = self._set_field(applications, Applications) + pars = [None, settings, uri] + self._parameters = self._set_field(parameters, Parameters, pars) + self._exports = self._set_field(exports, Exports, pars) self._environment = environment name = property(lambda s: s._name) @@ -52,29 +50,15 @@ def environment(self): def environment(self, value): self._environment = value - def _set_classes(self, classes): - if not isinstance(classes, Classes): - raise TypeError('Entity.classes cannot be set to '\ - 'instance of type %s' % type(classes)) - self._classes = classes - - def _set_applications(self, applications): - if not isinstance(applications, Applications): - raise TypeError('Entity.applications cannot be set to '\ - 'instance of type %s' % type(applications)) - self._applications = applications - - def _set_parameters(self, parameters): - if not isinstance(parameters, Parameters): - raise TypeError('Entity.parameters cannot be set to '\ - 'instance of type %s' % type(parameters)) - self._parameters = parameters - - def _set_exports(self, exports): - if not isinstance(exports, Exports): - raise TypeError('Entity.exports cannot be set to '\ - 'instance of type %s' % type(exports)) - self._exports = exports + def _set_field(self, received_value, expected_type, parameters=None): + if parameters is None: + parameters = [] + if received_value is None: + return expected_type(*parameters) + if not isinstance(received_value, expected_type): + raise TypeError('Entity.%s cannot be set to instance of type %s' % + (type(expected_type), type(received_value))) + return received_value def merge(self, other): self._classes.merge_unique(other._classes) diff --git a/reclass/datatypes/parameters.py b/reclass/datatypes/parameters.py index 4bac31a8..1db35eb5 100644 --- a/reclass/datatypes/parameters.py +++ b/reclass/datatypes/parameters.py @@ -12,11 +12,6 @@ from __future__ import print_function from __future__ import unicode_literals -#try: -# from types import StringTypes -#except ImportError: -# StringTypes = (str, ) - import copy import sys import types @@ -33,6 +28,7 @@ from reclass.errors import ResolveErrorList, InterpolationError, ParseError from reclass.errors import BadReferencesError + class Parameters(object): ''' A class to hold nested dictionaries with the following specialities: @@ -62,10 +58,9 @@ def __init__(self, mapping, settings, uri, parse_strings=True): self._uri = uri self._base = ParameterDict(uri=self._uri) self._unrendered = None - self._escapes_handled = {} self._inv_queries = [] - self._resolve_errors = ResolveErrorList() - self._needs_all_envs = False + self.resolve_errors = ResolveErrorList() + self.needs_all_envs = False self._parse_strings = parse_strings if mapping is not None: # initialise by merging @@ -92,13 +87,6 @@ def has_inv_query(self): def get_inv_queries(self): return self._inv_queries - @property - def needs_all_envs(self): - return self._needs_all_envs - - def resolve_errors(self): - return self._resolve_errors - def as_dict(self): return self._base.copy() @@ -258,8 +246,8 @@ def _render_simple_container(self, container, key, value, path): container[key] = value if value.has_inv_query: self._inv_queries.append((p, value)) - if value.needs_all_envs(): - self._needs_all_envs = True + if value.needs_all_envs: + self.needs_all_envs = True return else: value = value.merge() @@ -276,8 +264,8 @@ def _render_simple_container(self, container, key, value, path): container[key] = value if value.has_inv_query: self._inv_queries.append((p, value)) - if value.needs_all_envs(): - self._needs_all_envs = True + if value.needs_all_envs: + self.needs_all_envs = True else: container[key] = value.render(None, None) else: @@ -303,8 +291,8 @@ def interpolate(self, inventory=None): # processing them, so we cannot just iterate the dict path, v = next(iteritems(self._unrendered)) self._interpolate_inner(path, inventory) - if self._resolve_errors.have_errors(): - raise self._resolve_errors + if self.resolve_errors.have_errors(): + raise self.resolve_errors def initialise_interpolation(self): self._unrendered = None @@ -314,8 +302,8 @@ def _initialise_interpolate(self): if self._unrendered is None: self._unrendered = {} self._inv_queries = [] - self._needs_all_envs = False - self._resolve_errors = ResolveErrorList() + self.needs_all_envs = False + self.resolve_errors = ResolveErrorList() self._base = self._render_simple_dict(self._base, DictPath(self._settings.delimiter)) @@ -339,7 +327,7 @@ def _interpolate_render_value(self, path, value, inventory): except ResolveError as e: e.context = path if self._settings.group_errors: - self._resolve_errors.add(e) + self.resolve_errors.add(e) new = None else: raise diff --git a/reclass/utils/dictpath.py b/reclass/utils/dictpath.py index 6bf152a2..70c7bb51 100644 --- a/reclass/utils/dictpath.py +++ b/reclass/utils/dictpath.py @@ -81,11 +81,12 @@ def __str__(self): return self._delim.join(str(i) for i in self._parts) def __eq__(self, other): + if not (isinstance(other, six.string_types) or + isinstance(other, self.__class__)): + return False if isinstance(other, six.string_types): other = DictPath(self._delim, other) - - return self._parts == other._parts \ - and self._delim == other._delim + return self._parts == other._parts and self._delim == other._delim def __ne__(self, other): return not self.__eq__(other) @@ -152,18 +153,17 @@ def is_ancestor_of(self, other): def exists_in(self, container): item = container - for i in self._parts: + for part in self._parts: if isinstance(item, (dict, list)): - if i in item: + if part in item: if isinstance(item, dict): - item = item[i] + item = item[part] elif isinstance(container, list): - item = item[int(i)] + item = item[int(part)] else: return False else: if item == self._parts[-1]: return True - else: - return False + return False return True diff --git a/reclass/values/compitem.py b/reclass/values/compitem.py index 704ac692..c262f277 100644 --- a/reclass/values/compitem.py +++ b/reclass/values/compitem.py @@ -3,64 +3,28 @@ # # This file is part of reclass # -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals from reclass.settings import Settings -from .item import Item +from reclass.values import item -class CompItem(Item): - def __init__(self, items, settings): - self.type = Item.COMPOSITE - self._items = items - self._settings = settings - self.assembleRefs() +class CompItem(item.ItemWithReferences): - # TODO: possibility of confusion. Looks like 'assemble' should be either - # 'gather' or 'extract'. - def assembleRefs(self, context={}): - self._refs = [] - self._allRefs = True - for item in self._items: - if item.has_references: - item.assembleRefs(context) - self._refs.extend(item.get_references()) - if item.allRefs is False: - self._allRefs = False + type = item.ItemTypes.COMPOSITE - @property - def contents(self): - return self._items - - @property - def allRefs(self): - return self._allRefs - - @property - def has_references(self): - return len(self._refs) > 0 - - def get_references(self): - return self._refs - - def merge_over(self, item): - if item.type == Item.SCALAR or item.type == Item.COMPOSITE: + def merge_over(self, other): + if (other.type == item.ItemTypes.SCALAR or + other.type == item.ItemTypes.COMPOSITE): return self - raise RuntimeError('Trying to merge %s over %s' % (repr(self), repr(item))) + raise RuntimeError('Failed to merge %s over %s' % (self, other)) def render(self, context, inventory): # Preserve type if only one item - if len(self._items) == 1: - return self._items[0].render(context, inventory) + if len(self.contents) == 1: + return self.contents[0].render(context, inventory) # Multiple items - strings = [ str(i.render(context, inventory)) for i in self._items ] + strings = [str(i.render(context, inventory)) for i in self.contents] return "".join(strings) - def __repr__(self): - return 'CompItem(%r)' % self._items - def __str__(self): - return ''.join([ str(i) for i in self._items ]) + return ''.join([str(i) for i in self.contents]) diff --git a/reclass/values/dictitem.py b/reclass/values/dictitem.py index b96875fe..0648a39e 100644 --- a/reclass/values/dictitem.py +++ b/reclass/values/dictitem.py @@ -3,30 +3,10 @@ # # This file is part of reclass # -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from reclass.settings import Settings -from .item import Item +from reclass.values import item -class DictItem(Item): - def __init__(self, item, settings): - self.type = Item.DICTIONARY - self._dict = item - self._settings = settings +class DictItem(item.ContainerItem): - @property - def contents(self): - return self._dict - - def is_container(self): - return True - - def render(self, context, inventory): - return self._dict - - def __repr__(self): - return 'DictItem(%r)' % self._dict + type = item.ItemTypes.DICTIONARY diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py index 15b66c0c..0179f4f3 100644 --- a/reclass/values/invitem.py +++ b/reclass/values/invitem.py @@ -9,139 +9,72 @@ from __future__ import unicode_literals import copy -import functools +import itertools as it +import operator import pyparsing as pp from six import iteritems from six import string_types -from .item import Item +from reclass.values import item +from reclass.values import parser_funcs from reclass.settings import Settings from reclass.utils.dictpath import DictPath from reclass.errors import ExpressionError, ParseError, ResolveError -_OBJ = 'OBJ' -_TEST = 'TEST' -_LIST_TEST = 'LIST_TEST' -_LOGICAL = 'LOGICAL' -_OPTION = 'OPTION' - -_VALUE = 'VALUE' -_IF = 'IF' -_AND = 'AND' -_OR = 'OR' - -_EQUAL = '==' -_NOT_EQUAL = '!=' - -_IGNORE_ERRORS = '+IgnoreErrors' -_ALL_ENVS = '+AllEnvs' - - -def _get_parser(): - def tag_with(tag, transform=lambda x:x): - def inner(tag, string, location, tokens): - token = transform(tokens[0]) - tokens[0] = (tag, token) - return functools.partial(inner, tag) - - _object = tag_with(_OBJ) - _option = tag_with(_OPTION) - _expr_list_test = tag_with(_LIST_TEST) - _test = tag_with(_TEST) - _logical = tag_with(_LOGICAL) - _if = tag_with(_IF) - _expr_var = tag_with(_VALUE) - _expr_test = tag_with(_TEST) - _integer = tag_with(_OBJ, int) - _number = tag_with(_OBJ, float) - - end = pp.StringEnd() - ignore_errors = pp.CaselessLiteral(_IGNORE_ERRORS) - all_envs = pp.CaselessLiteral(_ALL_ENVS) - option = (ignore_errors | all_envs).setParseAction(_option) - options = pp.Group(pp.ZeroOrMore(option)) - operator_test = (pp.Literal(_EQUAL) | - pp.Literal(_NOT_EQUAL)).setParseAction(_test) - operator_logical = (pp.CaselessLiteral(_AND) | - pp.CaselessLiteral(_OR)).setParseAction(_logical) - begin_if = pp.CaselessLiteral(_IF).setParseAction(_if) - obj = pp.Word(pp.printables).setParseAction(_object) - sign = pp.Optional(pp.Literal('-')) - number = pp.Word(pp.nums) - dpoint = pp.Literal('.') - integer = pp.Combine(sign + number + pp.WordEnd()).setParseAction(_integer) - real = pp.Combine(sign + - ((number + dpoint + number) | - (dpoint + number) | - (number + dpoint)) - ).setParseAction(_number) - item = integer | real | obj - - single_test = item + operator_test + item - additional_test = operator_logical + single_test - expr_var = pp.Group(obj + end).setParseAction(_expr_var) - expr_test = pp.Group(obj + begin_if + single_test + - pp.ZeroOrMore(additional_test) + - end).setParseAction(_expr_test) - expr_list_test = pp.Group(begin_if + single_test + - pp.ZeroOrMore(additional_test) + - end).setParseAction(_expr_list_test) - expr = expr_test | expr_var | expr_list_test - line = options + expr + end - return line - - -class Element(object): - def __init__(self, expression, delimiter): +# TODO: generalize expression handling. +class BaseTestExpression(object): + + known_operators = {} + def __init__(self, delimiter): self._delimiter = delimiter + self.refs = [] + self.inv_refs = [] + + +class EqualityTest(BaseTestExpression): + + known_operators = { parser_funcs.EQUAL: operator.eq, + parser_funcs.NOT_EQUAL: operator.ne} + + def __init__(self, expression, delimiter): + # expression is a list of at least three tuples, of which first element + # is a string tag, second is subelement value; other tuples apparently + # are not used. + # expression[0][1] effectively contains export path and apparently must + # be treated as such, also left hand operand in comparison + # expression[1][1] appa holds commparison operator == or != + # expression[2][1] is the righhand operand + super(EqualityTest, self).__init__(delimiter) # TODO: this double sommersault must be cleaned _ = self._get_vars(expression[2][1], *self._get_vars(expression[0][1])) self._export_path, self._parameter_path, self._parameter_value = _ - try: self._export_path.drop_first() except AttributeError: raise ExpressionError('No export') - - self._inv_refs = [ self._export_path ] - self._test = expression[1][1] - + try: + self._compare = self.known_operators[expression[1][1]] + except KeyError as e: + msg = 'Unknown test {0}'.format(expression[1][1]) + raise ExpressionError(msg, tbFlag=False) + self.inv_refs = [self._export_path] if self._parameter_path is not None: self._parameter_path.drop_first() - self._refs = [ str(self._parameter_path) ] - else: - self._refs = [] - - def refs(self): - return self._refs - - def inv_refs(self): - return self._inv_refs + self.refs = [str(self._parameter_path)] def value(self, context, items): if self._parameter_path is not None: - self._parameter_value = self._resolve(self._parameter_path, context) - - if self._parameter_value is None or self._test is None: - raise ExpressionError('Failed to render %s' % str(self), tbFlag=False) - + self._parameter_value = self._resolve(self._parameter_path, + context) + if self._parameter_value is None: + raise ExpressionError('Failed to render %s' % str(self), + tbFlag=False) if self._export_path.exists_in(items): - result = False export_value = self._resolve(self._export_path, items) - if self._test == _EQUAL: - if export_value == self._parameter_value: - result = True - elif self._test == _NOT_EQUAL: - if export_value != self._parameter_value: - result = True - else: - raise ExpressionError('Unknown test {0}'.format(self._test), - tbFlag=False) - return result - else: - return False + return self._compare(export_value, self._parameter_value) + return False def _resolve(self, path, dictionary): try: @@ -167,109 +100,82 @@ def _get_vars(self, var, export=None, parameter=None, value=None): return export, parameter, value -class Question(object): +class LogicTest(BaseTestExpression): - def __init__(self, expression, delimiter): - self._elements = [] - self._operators = [] - self._delimiter = delimiter - self._refs = [] - self._inv_refs = [] - i = 0 - while i < len(expression): - e = Element(expression[i:], self._delimiter) - self._elements.append(e) - self._refs.extend(e.refs()) - self._inv_refs.extend(e.inv_refs()) - i += 3 - if i < len(expression): - self._operators.append(expression[i][1]) - i += 1 - - def refs(self): - return self._refs - - def inv_refs(self): - return self._inv_refs + known_operators = { parser_funcs.AND: operator.and_, + parser_funcs.OR: operator.or_} + + def __init__(self, expr, delimiter): + super(LogicTest, self).__init__(delimiter) + subtests = list(it.compress(expr, it.cycle([1, 1, 1, 0]))) + self._els = [EqualityTest(subtests[j:j+3], self._delimiter) + for j in range(0, len(subtests), 3)] + self.refs = [x.refs for x in self._els] + self.inv_refs = [x.inv_refs for x in self._els] + try: + self._ops = [self.known_operators[x[1]] for x in expr[3::4]] + except KeyError as e: + msg = 'Unknown operator {0} {1}'.format(e.messsage, self._els) + raise ExpressionError(msg, tbFlag=False) def value(self, context, items): - if len(self._elements) == 0: + if len(self._els) == 0: # NOTE: possible logic error return True - elif len(self._elements) == 1: - return self._elements[0].value(context, items) - else: - result = self._elements[0].value(context, items) - for i in range(0, len(self._elements)-1): - next_result = self._elements[i+1].value(context, items) - if self._operators[i] == _AND: - result = result and next_result - elif self._operators[i] == _OR: - result = result or next_result - else: - emsg = 'Unknown operator {0} {1}'.format( - self._operators[i], self.elements) - raise ExpressionError(emsg, tbFlag=False) - return result + result = self._els[0].value(context, items) + for op, next_el in zip(self._ops, self._els[1:]): + result = op(result, next_el.value(context, items)) + return result -class InvItem(Item): +class InvItem(item.Item): + type = item.ItemTypes.INV_QUERY - _parser = _get_parser() - - def __init__(self, item, settings): - self.type = Item.INV_QUERY - self._settings = settings - self._needs_all_envs = False - self._ignore_failed_render = self._settings.inventory_ignore_failed_render - self._expr_text = item.render(None, None) - self._parse_expression(self._expr_text) + def __init__(self, newitem, settings): + super(InvItem, self).__init__(newitem.render(None, None), settings) + self.needs_all_envs = False + self.ignore_failed_render = ( + self._settings.inventory_ignore_failed_render) + self._parse_expression(self.contents) def _parse_expression(self, expr): + parser = parser_funcs.get_expression_parser() try: - tokens = self._parser.parseString(expr).asList() + tokens = parser.parseString(expr).asList() except pp.ParseException as e: raise ParseError(e.msg, e.line, e.col, e.lineno) - if len(tokens) == 1: - self._expr_type = tokens[0][0] - self._expr = list(tokens[0][1]) - elif len(tokens) == 2: - for opt in tokens[0]: - if opt[1] == _IGNORE_ERRORS: - self._ignore_failed_render = True - elif opt[1] == _ALL_ENVS: - self._needs_all_envs = True - self._expr_type = tokens[1][0] - self._expr = list(tokens[1][1]) - else: - raise ExpressionError('Failed to parse %s' % str(tokens), tbFlag=False) - - if self._expr_type == _VALUE: - self._value_path = DictPath(self._settings.delimiter, self._expr[0][1]).drop_first() - self._question = Question([], self._settings.delimiter) - self._refs = [] - self._inv_refs = [ self._value_path ] - elif self._expr_type == _TEST: - self._value_path = DictPath(self._settings.delimiter, self._expr[0][1]).drop_first() - self._question = Question(self._expr[2:], self._settings.delimiter) - self._refs = self._question.refs() - self._inv_refs = self._question.inv_refs() - self._inv_refs.append(self._value_path) - elif self._expr_type == _LIST_TEST: + if len(tokens) == 2: # options are set + passed_opts = [x[1] for x in tokens.pop(0)] + self.ignore_failed_render = parser_funcs.IGNORE_ERRORS in passed_opts + self.needs_all_envs = parser_funcs.ALL_ENVS in passed_opts + elif len(tokens) > 2: + raise ExpressionError('Failed to parse %s' % str(tokens), + tbFlag=False) + self._expr_type = tokens[0][0] + self._expr = list(tokens[0][1]) + + if self._expr_type == parser_funcs.VALUE: + self._value_path = DictPath(self._settings.delimiter, + self._expr[0][1]).drop_first() + self._question = LogicTest([], self._settings.delimiter) + self.refs = [] + self.inv_refs = [self._value_path] + elif self._expr_type == parser_funcs.TEST: + self._value_path = DictPath(self._settings.delimiter, + self._expr[0][1]).drop_first() + self._question = LogicTest(self._expr[2:], self._settings.delimiter) + self.refs = self._question.refs + self.inv_refs = self._question.inv_refs + self.inv_refs.append(self._value_path) + elif self._expr_type == parser_funcs.LIST_TEST: self._value_path = None - self._question = Question(self._expr[1:], self._settings.delimiter) - self._refs = self._question.refs() - self._inv_refs = self._question.inv_refs() + self._question = LogicTest(self._expr[1:], self._settings.delimiter) + self.refs = self._question.refs + self.inv_refs = self._question.inv_refs else: - raise ExpressionError('Unknown expression type: %s' % self._expr_type, tbFlag=False) - - def assembleRefs(self, context): - return - - @property - def contents(self): - return self._expr_text + msg = 'Unknown expression type: %s' + raise ExpressionError(msg % self._expr_type, tbFlag=False) @property def has_inv_query(self): @@ -277,19 +183,13 @@ def has_inv_query(self): @property def has_references(self): - return len(self._question.refs()) > 0 + return len(self._question.refs) > 0 def get_references(self): - return self._question.refs() + return self._question.refs def get_inv_references(self): - return self._inv_refs - - def needs_all_envs(self): - return self._needs_all_envs - - def ignore_failed_render(self): - return self._ignore_failed_render + return self.inv_refs def _resolve(self, path, dictionary): try: @@ -301,17 +201,21 @@ def _value_expression(self, inventory): results = {} for (node, items) in iteritems(inventory): if self._value_path.exists_in(items): - results[node] = copy.deepcopy(self._resolve(self._value_path, items)) + results[node] = copy.deepcopy(self._resolve(self._value_path, + items)) return results def _test_expression(self, context, inventory): if self._value_path is None: - ExpressionError('Failed to render %s' % str(self), tbFlag=False) + msg = 'Failed to render %s' + raise ExpressionError(msg % str(self), tbFlag=False) results = {} - for (node, items) in iteritems(inventory): - if self._question.value(context, items) and self._value_path.exists_in(items): - results[node] = copy.deepcopy(self._resolve(self._value_path, items)) + for node, items in iteritems(inventory): + if (self._question.value(context, items) and + self._value_path.exists_in(items)): + results[node] = copy.deepcopy( + self._resolve(self._value_path, items)) return results def _list_test_expression(self, context, inventory): @@ -322,11 +226,11 @@ def _list_test_expression(self, context, inventory): return results def render(self, context, inventory): - if self._expr_type == _VALUE: + if self._expr_type == parser_funcs.VALUE: return self._value_expression(inventory) - elif self._expr_type == _TEST: + elif self._expr_type == parser_funcs.TEST: return self._test_expression(context, inventory) - elif self._expr_type == _LIST_TEST: + elif self._expr_type == parser_funcs.LIST_TEST: return self._list_test_expression(context, inventory) raise ExpressionError('Failed to render %s' % str(self), tbFlag=False) @@ -334,4 +238,5 @@ def __str__(self): return ' '.join(str(j) for i,j in self._expr) def __repr__(self): + # had to leave it here for now as the behaviour differs from basic return 'InvItem(%r)' % self._expr diff --git a/reclass/values/item.py b/reclass/values/item.py index 4ab3f68e..ee469958 100644 --- a/reclass/values/item.py +++ b/reclass/values/item.py @@ -8,22 +8,20 @@ from __future__ import print_function from __future__ import unicode_literals +from enum import Enum + from reclass.utils.dictpath import DictPath -class Item(object): +ItemTypes = Enum('ItemTypes', + ['COMPOSITE', 'DICTIONARY', 'INV_QUERY', 'LIST', + 'REFERENCE', 'SCALAR']) + - # TODO: use enum.Enum - # TODO: consider DotMap - COMPOSITE = 1 - DICTIONARY = 2 - INV_QUERY = 3 - LIST = 4 - REFERENCE = 5 - SCALAR = 6 +class Item(object): - TYPE_STR = { COMPOSITE: 'composite', DICTIONARY: 'dictionary', - INV_QUERY: 'invventory query', LIST: 'list', - REFERENCE: 'reference', SCALAR: 'scalar' } + def __init__(self, item, settings): + self._settings = settings + self.contents = item def allRefs(self): return True @@ -43,11 +41,6 @@ def is_container(self): def is_complex(self): return (self.has_references | self.has_inv_query) - @property - def contents(self): - msg = "Item class {0} does not implement contents()" - raise NotImplementedError(msg.format(self.__class__.__name__)) - def merge_over(self, item): msg = "Item class {0} does not implement merge_over()" raise NotImplementedError(msg.format(self.__class__.__name__)) @@ -57,4 +50,41 @@ def render(self, context, exports): raise NotImplementedError(msg.format(self.__class__.__name__)) def type_str(self): - return self.TYPE_STR[self.type] + return self.type.name.lower() + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.contents) + + +class ItemWithReferences(Item): + + def __init__(self, items, settings): + super(ItemWithReferences, self).__init__(items, settings) + self.assembleRefs() + + @property + def has_references(self): + return len(self._refs) > 0 + + def get_references(self): + return self._refs + + # NOTE: possibility of confusion. Looks like 'assemble' should be either + # 'gather' or 'extract'. + def assembleRefs(self, context={}): + self._refs = [] + self.allRefs = True + for item in self.contents: + if item.has_references: + item.assembleRefs(context) + self._refs.extend(item.get_references()) + if item.allRefs is False: + self.allRefs = False + +class ContainerItem(Item): + + def is_container(self): + return True + + def render(self, context, inventory): + return self.contents diff --git a/reclass/values/listitem.py b/reclass/values/listitem.py index 0f0ee603..24bece1d 100644 --- a/reclass/values/listitem.py +++ b/reclass/values/listitem.py @@ -3,36 +3,16 @@ # # This file is part of reclass # -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from .item import Item -from reclass.settings import Settings +from reclass.values import item -class ListItem(Item): - def __init__(self, item, settings): - self.type = Item.LIST - self._list = item - self._settings = settings +class ListItem(item.ContainerItem): - @property - def contents(self): - return self._list + type = item.ItemTypes.LIST - def is_container(self): - return True - - def render(self, context, inventory): - return self._list - - def merge_over(self, item): - if item.type == Item.LIST: - item._list.extend(self._list) - return item - raise RuntimeError('Trying to merge %s over %s' % (repr(self), repr(item))) - - def __repr__(self): - return 'ListItem(%r)' % (self._list) + def merge_over(self, other): + if other.type == item.ItemTypes.LIST: + other.contents.extend(self.contents) + return other + raise RuntimeError('Failed to merge %s over %s' % (self, other)) diff --git a/reclass/values/parser_funcs.py b/reclass/values/parser_funcs.py index 46db7cc1..50babd0e 100644 --- a/reclass/values/parser_funcs.py +++ b/reclass/values/parser_funcs.py @@ -9,22 +9,76 @@ from __future__ import unicode_literals import pyparsing as pp +import functools STR = 1 REF = 2 INV = 3 -def _string(string, location, tokens): - token = tokens[0] - tokens[0] = (STR, token) - -def _reference(string, location, tokens): - token = list(tokens[0]) - tokens[0] = (REF, token) - -def _invquery(string, location, tokens): - token = list(tokens[0]) - tokens[0] = (INV, token) +_OBJ = 'OBJ' +_LOGICAL = 'LOGICAL' +_OPTION = 'OPTION' +_IF = 'IF' + +TEST = 'TEST' +LIST_TEST = 'LIST_TEST' + +VALUE = 'VALUE' +AND = 'AND' +OR = 'OR' + +EQUAL = '==' +NOT_EQUAL = '!=' + +IGNORE_ERRORS = '+IgnoreErrors' +ALL_ENVS = '+AllEnvs' + + +def _tag_with(tag, transform=lambda x:x): + def inner(tag, string, location, tokens): + token = transform(tokens[0]) + tokens[0] = (tag, token) + return functools.partial(inner, tag) + +def get_expression_parser(): + + s_end = pp.StringEnd() + sign = pp.Optional(pp.Literal('-')) + number = pp.Word(pp.nums) + dpoint = pp.Literal('.') + ignore_errors = pp.CaselessLiteral(IGNORE_ERRORS) + all_envs = pp.CaselessLiteral(ALL_ENVS) + eq, neq = pp.Literal(EQUAL), pp.Literal(NOT_EQUAL) + eand, eor = pp.CaselessLiteral(AND), pp.CaselessLiteral(OR) + + option = (ignore_errors | all_envs).setParseAction(_tag_with(_OPTION)) + options = pp.Group(pp.ZeroOrMore(option)) + operator_test = (eq | neq).setParseAction(_tag_with(TEST)) + operator_logical = (eand | eor).setParseAction(_tag_with(_LOGICAL)) + begin_if = pp.CaselessLiteral(_IF).setParseAction(_tag_with(_IF)) + obj = pp.Word(pp.printables).setParseAction(_tag_with(_OBJ)) + + integer = pp.Combine(sign + number + pp.WordEnd()).setParseAction( + _tag_with(_OBJ, int)) + real = pp.Combine(sign + + ((number + dpoint + number) | + (dpoint + number) | + (number + dpoint)) + ).setParseAction(_tag_with(_OBJ, float)) + expritem = integer | real | obj + single_test = expritem + operator_test + expritem + additional_test = operator_logical + single_test + + expr_var = pp.Group(obj + s_end).setParseAction(_tag_with(VALUE)) + expr_test = pp.Group(obj + begin_if + single_test + + pp.ZeroOrMore(additional_test) + + s_end).setParseAction(_tag_with(TEST)) + expr_list_test = pp.Group(begin_if + single_test + + pp.ZeroOrMore(additional_test) + + s_end).setParseAction(_tag_with(LIST_TEST)) + expr = expr_test | expr_var | expr_list_test + line = options + expr + s_end + return line def get_ref_parser(escape_character, reference_sentinels, export_sentinels): _ESCAPE = escape_character @@ -50,8 +104,12 @@ def get_ref_parser(escape_character, reference_sentinels, export_sentinels): _EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE + _INV_OPEN + _INV_CLOSE - double_escape = pp.Combine(pp.Literal(_DOUBLE_ESCAPE) + pp.MatchFirst([pp.FollowedBy(_REF_OPEN), pp.FollowedBy(_REF_CLOSE), - pp.FollowedBy(_INV_OPEN), pp.FollowedBy(_INV_CLOSE)])).setParseAction(pp.replaceWith(_ESCAPE)) + double_escape = pp.Combine(pp.Literal(_DOUBLE_ESCAPE) + + pp.MatchFirst([pp.FollowedBy(_REF_OPEN), + pp.FollowedBy(_REF_CLOSE), + pp.FollowedBy(_INV_OPEN), + pp.FollowedBy(_INV_CLOSE)])).setParseAction( + pp.replaceWith(_ESCAPE)) ref_open = pp.Literal(_REF_OPEN).suppress() ref_close = pp.Literal(_REF_CLOSE).suppress() @@ -61,10 +119,10 @@ def get_ref_parser(escape_character, reference_sentinels, export_sentinels): ref_escape_close = pp.Literal(_REF_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_REF_CLOSE)) ref_text = pp.CharsNotIn(_REF_EXCLUDES) | pp.CharsNotIn(_REF_CLOSE_FIRST, exact=1) ref_content = pp.Combine(pp.OneOrMore(ref_not_open + ref_not_close + ref_text)) - ref_string = pp.MatchFirst([double_escape, ref_escape_open, ref_escape_close, ref_content]).setParseAction(_string) + ref_string = pp.MatchFirst([double_escape, ref_escape_open, ref_escape_close, ref_content]).setParseAction(_tag_with(STR)) ref_item = pp.Forward() ref_items = pp.OneOrMore(ref_item) - reference = (ref_open + pp.Group(ref_items) + ref_close).setParseAction(_reference) + reference = (ref_open + pp.Group(ref_items) + ref_close).setParseAction(_tag_with(REF)) ref_item << (reference | ref_string) inv_open = pp.Literal(_INV_OPEN).suppress() @@ -75,13 +133,13 @@ def get_ref_parser(escape_character, reference_sentinels, export_sentinels): inv_escape_close = pp.Literal(_INV_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_INV_CLOSE)) inv_text = pp.CharsNotIn(_INV_CLOSE_FIRST) inv_content = pp.Combine(pp.OneOrMore(inv_not_close + inv_text)) - inv_string = pp.MatchFirst([double_escape, inv_escape_open, inv_escape_close, inv_content]).setParseAction(_string) + inv_string = pp.MatchFirst([double_escape, inv_escape_open, inv_escape_close, inv_content]).setParseAction(_tag_with(STR)) inv_items = pp.OneOrMore(inv_string) - export = (inv_open + pp.Group(inv_items) + inv_close).setParseAction(_invquery) + export = (inv_open + pp.Group(inv_items) + inv_close).setParseAction(_tag_with(INV)) text = pp.CharsNotIn(_EXCLUDES) | pp.CharsNotIn('', exact=1) content = pp.Combine(pp.OneOrMore(ref_not_open + inv_not_open + text)) - string = pp.MatchFirst([double_escape, ref_escape_open, inv_escape_open, content]).setParseAction(_string) + string = pp.MatchFirst([double_escape, ref_escape_open, inv_escape_open, content]).setParseAction(_tag_with(STR)) item = reference | export | string line = pp.OneOrMore(item) + pp.StringEnd() @@ -95,9 +153,9 @@ def get_simple_ref_parser(escape_character, reference_sentinels, export_sentinel _INV_CLOSE = export_sentinels[1] _EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE + _INV_OPEN + _INV_CLOSE - string = pp.CharsNotIn(_EXCLUDES).setParseAction(_string) + string = pp.CharsNotIn(_EXCLUDES).setParseAction(_tag_with(STR)) ref_open = pp.Literal(_REF_OPEN).suppress() ref_close = pp.Literal(_REF_CLOSE).suppress() - reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_reference) + reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_tag_with(REF)) line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + pp.StringEnd() return line diff --git a/reclass/values/refitem.py b/reclass/values/refitem.py index 5713346c..df713e1a 100644 --- a/reclass/values/refitem.py +++ b/reclass/values/refitem.py @@ -3,56 +3,24 @@ # # This file is part of reclass # -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from .item import Item -from reclass.defaults import REFERENCE_SENTINELS -from reclass.settings import Settings +from reclass.values import item from reclass.utils.dictpath import DictPath from reclass.errors import ResolveError -class RefItem(Item): +class RefItem(item.ItemWithReferences): - def __init__(self, items, settings): - self.type = Item.REFERENCE - self._settings = settings - self._items = items - self.assembleRefs() + type = item.ItemTypes.REFERENCE def assembleRefs(self, context={}): - self._refs = [] - self._allRefs = True - for item in self._items: - if item.has_references: - item.assembleRefs(context) - self._refs.extend(item.get_references()) - if item.allRefs == False: - self._allRefs = False + super(RefItem, self).assembleRefs(context) try: - strings = [ str(i.render(context, None)) for i in self._items ] + strings = [str(i.render(context, None)) for i in self.contents] value = "".join(strings) self._refs.append(value) except ResolveError as e: - self._allRefs = False - - @property - def contents(self): - return self._items - - @property - def allRefs(self): - return self._allRefs - - @property - def has_references(self): - return len(self._refs) > 0 - - def get_references(self): - return self._refs + self.allRefs = False def _resolve(self, ref, context): path = DictPath(self._settings.delimiter, ref) @@ -62,14 +30,13 @@ def _resolve(self, ref, context): raise ResolveError(ref) def render(self, context, inventory): - if len(self._items) == 1: - return self._resolve(self._items[0].render(context, inventory), context) - strings = [ str(i.render(context, inventory)) for i in self._items ] + if len(self.contents) == 1: + return self._resolve(self.contents[0].render(context, inventory), + context) + strings = [str(i.render(context, inventory)) for i in self.contents] return self._resolve("".join(strings), context) - def __repr__(self): - return 'RefItem(%r)' % self._items - def __str__(self): - strings = [ str(i) for i in self._items ] - return '{0}{1}{2}'.format(REFERENCE_SENTINELS[0], ''.join(strings), REFERENCE_SENTINELS[1]) + strings = [str(i) for i in self.contents] + rs = self._settings.reference_sentinels + return '{0}{1}{2}'.format(rs[0], ''.join(strings), rs[1]) diff --git a/reclass/values/scaitem.py b/reclass/values/scaitem.py index c65f3020..1bcbd2c9 100644 --- a/reclass/values/scaitem.py +++ b/reclass/values/scaitem.py @@ -9,29 +9,23 @@ from __future__ import unicode_literals from reclass.settings import Settings -from .item import Item +from reclass.values import item -class ScaItem(Item): - def __init__(self, value, settings): - self.type = Item.SCALAR - self._value = value - self._settings = settings +class ScaItem(item.Item): + + type = item.ItemTypes.SCALAR - @property - def contents(self): - return self._value + def __init__(self, value, settings): + super(ScaItem, self).__init__(value, settings) - def merge_over(self, item): - if item.type == Item.SCALAR or item.type == Item.COMPOSITE: + def merge_over(self, other): + if other.type in [item.ItemTypes.SCALAR, item.ItemTypes.COMPOSITE]: return self - raise RuntimeError('Trying to merge %s over %s' % (repr(self), repr(item))) + raise RuntimeError('Failed to merge %s over %s' % (self, other)) def render(self, context, inventory): - return self._value - - def __repr__(self): - return 'ScaItem({0!r})'.format(self._value) + return self.contents def __str__(self): - return str(self._value) + return str(self.contents) diff --git a/reclass/values/tests/test_compitem.py b/reclass/values/tests/test_compitem.py index 3d63d3b5..71a6f0e9 100644 --- a/reclass/values/tests/test_compitem.py +++ b/reclass/values/tests/test_compitem.py @@ -96,7 +96,6 @@ def test_merge_over_merge_scalar(self): self.assertEquals(result, composite) - def test_merge_over_merge_composite(self): val1 = Value(None, SETTINGS, '') val2 = Value(None, SETTINGS, '') @@ -107,32 +106,6 @@ def test_merge_over_merge_composite(self): self.assertEquals(result, composite2) - @unittest.skip("self._value bug") - def test_merge_over_merge_list_scalar_allowed(self): - # This nice bunch of lines below breaks merge because fuck you that's - # why. Seriously so, merger_over simply is not working for Composites - sets = Settings() - sets.allow_scalar_override = True - val1 = Value(None, SETTINGS, '') - listitem = ListItem([1], SETTINGS) - composite = CompItem([val1], sets) - - result = composite.merge_over(listitem) - - self.assertEquals(result, composite2) - - @unittest.skip("self._value bug") - def test_merge_over_merge_list_override_allowed(self): - sets = Settings() - sets.allow_none_override = True - val1 = Value(None, SETTINGS, '') - listitem = ListItem([1], SETTINGS) - composite = CompItem([val1], sets) - - result = composite.merge_over(listitem) - - self.assertEquals(result, composite2) - def test_merge_over_merge_list_not_allowed(self): val1 = Value(None, SETTINGS, '') listitem = ListItem([1], SETTINGS) @@ -140,31 +113,6 @@ def test_merge_over_merge_list_not_allowed(self): self.assertRaises(RuntimeError, composite.merge_over, listitem) - - @unittest.skip("self._value bug") - def test_merge_dict_scalar_allowed(self): - sets = Settings() - sets.allow_scalar_override = True - val1 = Value(None, SETTINGS, '') - dictitem = DictItem({'foo': 'bar'}, SETTINGS) - composite = CompItem([val1], sets) - - result = composite.merge_over(dictitem) - - self.assertEquals(result, composite) - - @unittest.skip("self._value bug") - def test_merge_dict_override_allowed(self): - sets = Settings() - sets.allow_none_override = True - val1 = Value(None, SETTINGS, '') - dictitem = DictItem([1], SETTINGS) - composite = CompItem([val1], sets) - - result = composite.merge_over(dictitem) - - self.assertEquals(result, composite) - def test_merge_dict_dict_not_allowed(self): val1 = Value(None, SETTINGS, '') dictitem = DictItem({'foo': 'bar'}, SETTINGS) diff --git a/reclass/values/value.py b/reclass/values/value.py index 613d5537..affd9441 100644 --- a/reclass/values/value.py +++ b/reclass/values/value.py @@ -23,7 +23,7 @@ class Value(object): def __init__(self, value, settings, uri, parse_string=True): self._settings = settings self._uri = uri - self._overwrite = False + self.overwrite = False self._constant = False if isinstance(value, string_types): if parse_string: @@ -42,12 +42,8 @@ def __init__(self, value, settings, uri, parse_string=True): self._item = ScaItem(value, self._settings) @property - def overwrite(self): - return self._overwrite - - @overwrite.setter - def overwrite(self, overwrite): - self._overwrite = overwrite + def uri(self): + return self._uri @property def constant(self): @@ -57,10 +53,6 @@ def constant(self): def constant(self, constant): self._constant = constant - @property - def uri(self): - return self._uri - def item_type(self): return self._item.type @@ -82,14 +74,15 @@ def has_references(self): def has_inv_query(self): return self._item.has_inv_query + @property def needs_all_envs(self): if self._item.has_inv_query: - return self._item.needs_all_envs() + return self._item.needs_all_envs else: return False def ignore_failed_render(self): - return self._item.ignore_failed_render() + return self._item.ignore_failed_render @property def is_complex(self): diff --git a/reclass/values/valuelist.py b/reclass/values/valuelist.py index b4a089da..a56395be 100644 --- a/reclass/values/valuelist.py +++ b/reclass/values/valuelist.py @@ -8,31 +8,28 @@ from __future__ import print_function from __future__ import unicode_literals -from __future__ import print_function - import copy import sys from reclass.errors import ChangedConstantError, ResolveError, TypeMergeError - class ValueList(object): def __init__(self, value, settings): self._settings = settings self._refs = [] - self._allRefs = True - self._values = [ value ] + self.allRefs = True + self._values = [value] self._inv_refs = [] self._has_inv_query = False - self._ignore_failed_render = False + self.ignore_failed_render = False self._is_complex = False self._update() @property def uri(self): - return '; '.join([ str(x.uri) for x in self._values ]) + return '; '.join([str(x.uri) for x in self._values]) def append(self, value): self._values.append(value) @@ -69,34 +66,27 @@ def is_complex(self): def get_references(self): return self._refs - @property - def allRefs(self): - return self._allRefs - - def ignore_failed_render(self): - return self._ignore_failed_render - def _check_for_inv_query(self): self._has_inv_query = False - self._ignore_failed_render = True + self.ignore_failed_render = True for value in self._values: if value.has_inv_query: self._inv_refs.extend(value.get_inv_references) self._has_inv_query = True if vale.ignore_failed_render() is False: - self._ignore_failed_render = False + self.ignore_failed_render = False if self._has_inv_query is False: - self._ignore_failed_render = False + self.ignore_failed_render = False def assembleRefs(self, context={}): self._refs = [] - self._allRefs = True + self.allRefs = True for value in self._values: value.assembleRefs(context) if value.has_references: self._refs.extend(value.get_references()) if value.allRefs is False: - self._allRefs = False + self.allRefs = False def merge(self): output = None @@ -118,12 +108,17 @@ def render(self, context, inventory): try: new = value.render(context, inventory) except ResolveError as e: - # only ignore failed renders if ignore_overwritten_missing_references is set and we are dealing with a scalar value - # and it's not the last item in the values list - if self._settings.ignore_overwritten_missing_references and not isinstance(output, (dict, list)) and n != (len(self._values)-1): + # only ignore failed renders if + # ignore_overwritten_missing_references is set and we are + # dealing with a scalar value and it's not the last item in the + # values list + if (self._settings.ignore_overwritten_missing_references + and not isinstance(output, (dict, list)) + and n != (len(self._values)-1)): new = None last_error = e - print("[WARNING] Reference '%s' undefined" % str(value), file=sys.stderr) + print("[WARNING] Reference '%s' undefined" % str(value), + file=sys.stderr) else: raise e @@ -190,6 +185,3 @@ def render(self, context, inventory): raise last_error return output - - def __repr__(self): - return 'ValueList(%r)' % self._values diff --git a/requirements.txt b/requirements.txt index 66f0f4b9..5b3aadd1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ pyparsing pyyaml -pygit2 six +enum34 diff --git a/setup.py b/setup.py index 789b0fdd..884be880 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ url = URL, packages = find_packages(exclude=['*tests']), #FIXME validate this entry_points = { 'console_scripts': console_scripts }, - install_requires = ['pyparsing', 'pyyaml', 'six'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) + install_requires = ['pyparsing', 'pyyaml', 'six', 'enum34'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) classifiers=[ 'Development Status :: 4 - Beta', From c04e1b04ad8bbd0a564572c23ee8a314b16637ea Mon Sep 17 00:00:00 2001 From: Luis Buriola Date: Tue, 17 Jul 2018 16:57:35 +0100 Subject: [PATCH 03/63] Add option add_subdir_to_node This allows files in different subfolders to have the same name More information on README-extentions.rst --- README-extentions.rst | 31 ++++++++++++++++++++++++++++- reclass/__init__.py | 4 ++-- reclass/adapters/ansible.py | 5 ++++- reclass/adapters/salt.py | 8 +++++--- reclass/cli.py | 5 ++++- reclass/config.py | 3 +++ reclass/defaults.py | 1 + reclass/settings.py | 4 +++- reclass/storage/yaml_fs/__init__.py | 8 ++++++-- reclass/tests/test_core.py | 2 +- 10 files changed, 59 insertions(+), 12 deletions(-) diff --git a/README-extentions.rst b/README-extentions.rst index ec10e489..f57b7604 100644 --- a/README-extentions.rst +++ b/README-extentions.rst @@ -228,7 +228,7 @@ Given the following classes: parameters: y: 1 - + The parameter ``a`` only depends on the parameter ``y`` through the reference set in class2. The fact that the parameter ``x`` referenced in class1 is not defined does not affect the final value of the parameter ``a``. For such overwritten missing references by default a warning is printed but no error is raised, providing the final value of the parameter being evaluated is a scalar. If the final value is a dictionary or list @@ -507,3 +507,32 @@ In practise the exports:cluster key can be set using a parameter reference: The above exports and parameter definitions could be put into a separate class and then included by nodes which require access to the database and included by the database server as well. + + +Add subfolders to node name +--------------------------- + +Nodes can be defined in subdirectories. However, node names (filename) must be unique across all subdirectories. + +For example, the following file structure is invalid: + +.. code-block:: yaml + + inventory/nodes/prod/mysql.yml + inventory/nodes/staging/mysql.yml + +With setting: + +.. code-block:: yaml + + add_subdir_to_node: True # default False + +This adds the subfolder to the node name and the structure above can then be used. It generates the following reclass objects: + +.. code-block:: yaml + + nodes: + prod.mysql: + ... + staging.mysql: + ... diff --git a/reclass/__init__.py b/reclass/__init__.py index 3739b5e1..82fa0ab4 100644 --- a/reclass/__init__.py +++ b/reclass/__init__.py @@ -15,9 +15,9 @@ from .storage.loader import StorageBackendLoader from .storage.memcache_proxy import MemcacheProxy -def get_storage(storage_type, nodes_uri, classes_uri, **kwargs): +def get_storage(storage_type, nodes_uri, classes_uri, add_subdir_to_node, **kwargs): storage_class = StorageBackendLoader(storage_type).load() - return MemcacheProxy(storage_class(nodes_uri, classes_uri, **kwargs)) + return MemcacheProxy(storage_class(nodes_uri, classes_uri, add_subdir_to_node, **kwargs)) def get_path_mangler(storage_type,**kwargs): return StorageBackendLoader(storage_type).path_mangler() diff --git a/reclass/adapters/ansible.py b/reclass/adapters/ansible.py index abf7df29..3ebe5752 100755 --- a/reclass/adapters/ansible.py +++ b/reclass/adapters/ansible.py @@ -66,7 +66,10 @@ def add_ansible_options_group(parser, defaults): add_options_cb=add_ansible_options_group, defaults=defaults) - storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri) + storage = get_storage(options.storage_type, + options.nodes_uri, + options.classes_uri, + options.add_subdir_to_node) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) diff --git a/reclass/adapters/salt.py b/reclass/adapters/salt.py index ce4e7925..10a6a43c 100755 --- a/reclass/adapters/salt.py +++ b/reclass/adapters/salt.py @@ -31,11 +31,12 @@ def ext_pillar(minion_id, pillar, classes_uri=OPT_CLASSES_URI, class_mappings=None, propagate_pillar_data_to_reclass=False, + add_subdir_to_node=OPT_ADD_SUBDIR_TO_NODE, **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) - storage = get_storage(storage_type, nodes_uri, classes_uri) + storage = get_storage(storage_type, nodes_uri, classes_uri, add_subdir_to_node) input_data = None if propagate_pillar_data_to_reclass: input_data = pillar @@ -54,11 +55,12 @@ def ext_pillar(minion_id, pillar, def top(minion_id, storage_type=OPT_STORAGE_TYPE, inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI, - classes_uri=OPT_CLASSES_URI, class_mappings=None, **kwargs): + classes_uri=OPT_CLASSES_URI, class_mappings=None, add_subdir_to_node=OPT_ADD_SUBDIR_TO_NODE, + **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) - storage = get_storage(storage_type, nodes_uri, classes_uri) + storage = get_storage(storage_type, nodes_uri, classes_uri, add_subdir_to_node) settings = Settings(kwargs) reclass = Core(storage, class_mappings, settings, input_data=None) diff --git a/reclass/cli.py b/reclass/cli.py index 44694c5a..89f3a8b2 100644 --- a/reclass/cli.py +++ b/reclass/cli.py @@ -31,7 +31,10 @@ def main(): defaults.update(find_and_read_configfile()) options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, defaults=defaults) - storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri) + storage = get_storage(options.storage_type, + options.nodes_uri, + options.classes_uri, + options.add_subdir_to_node) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) diff --git a/reclass/config.py b/reclass/config.py index 1a6ba81d..cc3c35eb 100644 --- a/reclass/config.py +++ b/reclass/config.py @@ -36,6 +36,9 @@ def make_db_options_group(parser, defaults={}): ret.add_option('-z', '--ignore-class-notfound', dest='ignore_class_notfound', default=defaults.get('ignore_class_notfound', OPT_IGNORE_CLASS_NOTFOUND), help='decision for not found classes [%default]') + ret.add_option('-a', '--add-subdir-to-node', dest='add_subdir_to_node', action="store_true", + default=defaults.get('add_subdir_to_node', OPT_ADD_SUBDIR_TO_NODE), + help='Add subdir when generating node names. [%default]') ret.add_option('-x', '--ignore-class-notfound-regexp', dest='ignore_class_notfound_regexp', default=defaults.get('ignore_class_notfound_regexp', OPT_IGNORE_CLASS_NOTFOUND_REGEXP), help='regexp for not found classes [%default]') diff --git a/reclass/defaults.py b/reclass/defaults.py index 1e50c0e7..095ff367 100644 --- a/reclass/defaults.py +++ b/reclass/defaults.py @@ -21,6 +21,7 @@ OPT_CLASSES_URI = 'classes' OPT_PRETTY_PRINT = True OPT_GROUP_ERRORS = True +OPT_ADD_SUBDIR_TO_NODE = False OPT_NO_REFS = False OPT_OUTPUT = 'yaml' diff --git a/reclass/settings.py b/reclass/settings.py index 51c518f1..f2a6e1ba 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -32,6 +32,7 @@ def __init__(self, options={}): self.reference_sentinels = options.get('reference_sentinels', REFERENCE_SENTINELS) self.ignore_class_notfound = options.get('ignore_class_notfound', OPT_IGNORE_CLASS_NOTFOUND) self.strict_constant_parameters = options.get('strict_constant_parameters', OPT_STRICT_CONSTANT_PARAMETERS) + self.add_subdir_to_node = options.get('add_subdir_to_node', OPT_ADD_SUBDIR_TO_NODE) self.ignore_class_notfound_regexp = options.get('ignore_class_notfound_regexp', OPT_IGNORE_CLASS_NOTFOUND_REGEXP) if isinstance(self.ignore_class_notfound_regexp, string_types): @@ -65,7 +66,8 @@ def __eq__(self, other): and self.ignore_class_notfound == other.ignore_class_notfound \ and self.ignore_class_notfound_regexp == other.ignore_class_notfound_regexp \ and self.ignore_class_notfound_warning == other.ignore_class_notfound_warning \ - and self.strict_constant_parameters == other.strict_constant_parameters + and self.strict_constant_parameters == other.strict_constant_parameters \ + and self.add_subdir_to_node == other.add_subdir_to_node def __copy__(self): cls = self.__class__ diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index a102f31e..e511d447 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -55,12 +55,16 @@ def _path_mangler_inner(path): class ExternalNodeStorage(NodeStorageBase): - def __init__(self, nodes_uri, classes_uri): + def __init__(self, nodes_uri, classes_uri, add_subdir_to_node): super(ExternalNodeStorage, self).__init__(STORAGE_NAME) if nodes_uri is not None: self._nodes_uri = nodes_uri - self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.nodes) + if add_subdir_to_node: + self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.classes) + else: + self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.nodes) + if classes_uri is not None: self._classes_uri = classes_uri diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index 679d6ca0..c79630a3 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -27,8 +27,8 @@ def _core(self, dataset, opts={}): inventory_uri = os.path.dirname(os.path.abspath(__file__)) + '/data/' + dataset path_mangler = get_path_mangler('yaml_fs') nodes_uri, classes_uri = path_mangler(inventory_uri, 'nodes', 'classes') - storage = get_storage('yaml_fs', nodes_uri, classes_uri) settings = Settings(opts) + storage = get_storage('yaml_fs', nodes_uri, classes_uri, settings.add_subdir_to_node) return Core(storage, None, settings) def test_type_conversion(self): From 294cc43644bc53b621be4d62b3026e13d3c5dcea Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Mon, 23 Jul 2018 07:04:31 +0200 Subject: [PATCH 04/63] Update third.yml Add mumeric keys to test model --- test/model/default/classes/third.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/model/default/classes/third.yml b/test/model/default/classes/third.yml index 135acd43..81fd9790 100644 --- a/test/model/default/classes/third.yml +++ b/test/model/default/classes/third.yml @@ -10,6 +10,12 @@ parameters: fail: at: tree: ${_param:notfound} + 1: + an_numeric_key: true + as_a_dict: 1 + 2: + - as_a_list + 3: value three: ${two} empty: list: [] From 4ae9ef7d2bebd14e020f67ca06e83eb4064f36eb Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Mon, 23 Jul 2018 07:39:55 +0200 Subject: [PATCH 05/63] Fix numeric keys at key prefix check --- reclass/datatypes/parameters.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/reclass/datatypes/parameters.py b/reclass/datatypes/parameters.py index 1db35eb5..ee404ce1 100644 --- a/reclass/datatypes/parameters.py +++ b/reclass/datatypes/parameters.py @@ -170,6 +170,8 @@ def _merge_dict(self, cur, new): """ for (key, value) in iteritems(new): + # check key for "control" preffixes (~,=,...) + key = str(key) if key[0] in self._settings.dict_key_prefixes: newkey = key[1:] if not isinstance(value, Value): From 8de37a3ee2dfd70d81219273a1ecc1699098bc85 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Mon, 23 Jul 2018 10:31:10 +0200 Subject: [PATCH 06/63] update changelog --- doc/source/changelog.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index d7aa7b2b..fb8cbc0f 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -5,6 +5,17 @@ ChangeLog ========= ========== ======================================================== Version Date Changes ========= ========== ======================================================== +1.5.5 2018-07 * Add immutable (constant) parameters + * Fixes +1.5.4 2018-05 * Add support for salt 2018.3 + * Add support for python 2.7/3.x + * Extend tests coverage +1.5.3 2018 * Add new features + fixes + - last 'known' full compatible release with original reclass + - release shipped as well as .deb package at mirror.mirantis.com +1.5.x 2017 * Project forked under salt-formulas/reclass + - based on @andrewpickford fork and community fixes + - features against original are in README-extensions.rst 1.4.1 2014-10-28 * Revert debug logging, which wasn't fault-free and so it needs more time to mature. 1.4 2014-10-25 * Add rudimentary debug logging From 96d7ace715144445ece2ec40554bf4badae7a339 Mon Sep 17 00:00:00 2001 From: Luis Buriola Date: Sun, 22 Jul 2018 22:00:02 +0100 Subject: [PATCH 07/63] Rename option add_subdir_to_node to compose_node_name --- README-extentions.rst | 4 ++-- reclass/__init__.py | 4 ++-- reclass/adapters/ansible.py | 2 +- reclass/adapters/salt.py | 8 ++++---- reclass/cli.py | 2 +- reclass/config.py | 4 ++-- reclass/defaults.py | 2 +- reclass/settings.py | 4 ++-- reclass/storage/yaml_fs/__init__.py | 4 ++-- reclass/tests/test_core.py | 2 +- 10 files changed, 18 insertions(+), 18 deletions(-) diff --git a/README-extentions.rst b/README-extentions.rst index f57b7604..2053a2d5 100644 --- a/README-extentions.rst +++ b/README-extentions.rst @@ -509,7 +509,7 @@ The above exports and parameter definitions could be put into a separate class a access to the database and included by the database server as well. -Add subfolders to node name +Compose node name --------------------------- Nodes can be defined in subdirectories. However, node names (filename) must be unique across all subdirectories. @@ -525,7 +525,7 @@ With setting: .. code-block:: yaml - add_subdir_to_node: True # default False + compose_node_name: True # default False This adds the subfolder to the node name and the structure above can then be used. It generates the following reclass objects: diff --git a/reclass/__init__.py b/reclass/__init__.py index 82fa0ab4..2167a303 100644 --- a/reclass/__init__.py +++ b/reclass/__init__.py @@ -15,9 +15,9 @@ from .storage.loader import StorageBackendLoader from .storage.memcache_proxy import MemcacheProxy -def get_storage(storage_type, nodes_uri, classes_uri, add_subdir_to_node, **kwargs): +def get_storage(storage_type, nodes_uri, classes_uri, compose_node_name, **kwargs): storage_class = StorageBackendLoader(storage_type).load() - return MemcacheProxy(storage_class(nodes_uri, classes_uri, add_subdir_to_node, **kwargs)) + return MemcacheProxy(storage_class(nodes_uri, classes_uri, compose_node_name, **kwargs)) def get_path_mangler(storage_type,**kwargs): return StorageBackendLoader(storage_type).path_mangler() diff --git a/reclass/adapters/ansible.py b/reclass/adapters/ansible.py index 3ebe5752..be671985 100755 --- a/reclass/adapters/ansible.py +++ b/reclass/adapters/ansible.py @@ -69,7 +69,7 @@ def add_ansible_options_group(parser, defaults): storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri, - options.add_subdir_to_node) + options.compose_node_name) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) diff --git a/reclass/adapters/salt.py b/reclass/adapters/salt.py index 10a6a43c..523b0c46 100755 --- a/reclass/adapters/salt.py +++ b/reclass/adapters/salt.py @@ -31,12 +31,12 @@ def ext_pillar(minion_id, pillar, classes_uri=OPT_CLASSES_URI, class_mappings=None, propagate_pillar_data_to_reclass=False, - add_subdir_to_node=OPT_ADD_SUBDIR_TO_NODE, + compose_node_name=OPT_COMPOSE_NODE_NAME, **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) - storage = get_storage(storage_type, nodes_uri, classes_uri, add_subdir_to_node) + storage = get_storage(storage_type, nodes_uri, classes_uri, compose_node_name) input_data = None if propagate_pillar_data_to_reclass: input_data = pillar @@ -55,12 +55,12 @@ def ext_pillar(minion_id, pillar, def top(minion_id, storage_type=OPT_STORAGE_TYPE, inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI, - classes_uri=OPT_CLASSES_URI, class_mappings=None, add_subdir_to_node=OPT_ADD_SUBDIR_TO_NODE, + classes_uri=OPT_CLASSES_URI, class_mappings=None, compose_node_name=OPT_COMPOSE_NODE_NAME, **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) - storage = get_storage(storage_type, nodes_uri, classes_uri, add_subdir_to_node) + storage = get_storage(storage_type, nodes_uri, classes_uri, compose_node_name) settings = Settings(kwargs) reclass = Core(storage, class_mappings, settings, input_data=None) diff --git a/reclass/cli.py b/reclass/cli.py index 89f3a8b2..38bd5fc4 100644 --- a/reclass/cli.py +++ b/reclass/cli.py @@ -34,7 +34,7 @@ def main(): storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri, - options.add_subdir_to_node) + options.compose_node_name) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) diff --git a/reclass/config.py b/reclass/config.py index cc3c35eb..d24f7fd3 100644 --- a/reclass/config.py +++ b/reclass/config.py @@ -36,8 +36,8 @@ def make_db_options_group(parser, defaults={}): ret.add_option('-z', '--ignore-class-notfound', dest='ignore_class_notfound', default=defaults.get('ignore_class_notfound', OPT_IGNORE_CLASS_NOTFOUND), help='decision for not found classes [%default]') - ret.add_option('-a', '--add-subdir-to-node', dest='add_subdir_to_node', action="store_true", - default=defaults.get('add_subdir_to_node', OPT_ADD_SUBDIR_TO_NODE), + ret.add_option('-a', '--compose-node-name', dest='compose_node_name', action="store_true", + default=defaults.get('compose_node_name', OPT_COMPOSE_NODE_NAME), help='Add subdir when generating node names. [%default]') ret.add_option('-x', '--ignore-class-notfound-regexp', dest='ignore_class_notfound_regexp', default=defaults.get('ignore_class_notfound_regexp', OPT_IGNORE_CLASS_NOTFOUND_REGEXP), diff --git a/reclass/defaults.py b/reclass/defaults.py index 095ff367..f240f3f9 100644 --- a/reclass/defaults.py +++ b/reclass/defaults.py @@ -21,7 +21,7 @@ OPT_CLASSES_URI = 'classes' OPT_PRETTY_PRINT = True OPT_GROUP_ERRORS = True -OPT_ADD_SUBDIR_TO_NODE = False +OPT_COMPOSE_NODE_NAME = False OPT_NO_REFS = False OPT_OUTPUT = 'yaml' diff --git a/reclass/settings.py b/reclass/settings.py index f2a6e1ba..3e223cc1 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -32,7 +32,7 @@ def __init__(self, options={}): self.reference_sentinels = options.get('reference_sentinels', REFERENCE_SENTINELS) self.ignore_class_notfound = options.get('ignore_class_notfound', OPT_IGNORE_CLASS_NOTFOUND) self.strict_constant_parameters = options.get('strict_constant_parameters', OPT_STRICT_CONSTANT_PARAMETERS) - self.add_subdir_to_node = options.get('add_subdir_to_node', OPT_ADD_SUBDIR_TO_NODE) + self.compose_node_name = options.get('compose_node_name', OPT_COMPOSE_NODE_NAME) self.ignore_class_notfound_regexp = options.get('ignore_class_notfound_regexp', OPT_IGNORE_CLASS_NOTFOUND_REGEXP) if isinstance(self.ignore_class_notfound_regexp, string_types): @@ -67,7 +67,7 @@ def __eq__(self, other): and self.ignore_class_notfound_regexp == other.ignore_class_notfound_regexp \ and self.ignore_class_notfound_warning == other.ignore_class_notfound_warning \ and self.strict_constant_parameters == other.strict_constant_parameters \ - and self.add_subdir_to_node == other.add_subdir_to_node + and self.compose_node_name == other.compose_node_name def __copy__(self): cls = self.__class__ diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index e511d447..88f0ec43 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -55,12 +55,12 @@ def _path_mangler_inner(path): class ExternalNodeStorage(NodeStorageBase): - def __init__(self, nodes_uri, classes_uri, add_subdir_to_node): + def __init__(self, nodes_uri, classes_uri, compose_node_name): super(ExternalNodeStorage, self).__init__(STORAGE_NAME) if nodes_uri is not None: self._nodes_uri = nodes_uri - if add_subdir_to_node: + if compose_node_name: self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.classes) else: self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.nodes) diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index c79630a3..047bf24b 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -28,7 +28,7 @@ def _core(self, dataset, opts={}): path_mangler = get_path_mangler('yaml_fs') nodes_uri, classes_uri = path_mangler(inventory_uri, 'nodes', 'classes') settings = Settings(opts) - storage = get_storage('yaml_fs', nodes_uri, classes_uri, settings.add_subdir_to_node) + storage = get_storage('yaml_fs', nodes_uri, classes_uri, settings.compose_node_name) return Core(storage, None, settings) def test_type_conversion(self): From a7bfd74a1067a667888e3bf01aff95ea56fdd516 Mon Sep 17 00:00:00 2001 From: Luis Buriola Date: Mon, 23 Jul 2018 10:43:46 +0100 Subject: [PATCH 08/63] Ignore path starting with _ on compose_node_name https://github.com/salt-formulas/reclass/pull/48#issuecomment-406823623 --- README-extentions.rst | 2 ++ reclass/storage/common.py | 11 +++++++++++ reclass/storage/yaml_fs/__init__.py | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/README-extentions.rst b/README-extentions.rst index 2053a2d5..2bc4816e 100644 --- a/README-extentions.rst +++ b/README-extentions.rst @@ -536,3 +536,5 @@ This adds the subfolder to the node name and the structure above can then be use ... staging.mysql: ... + +If the subfolder path starts with the underscore character ``_``, then the subfolder path is NOT added to the node name. diff --git a/reclass/storage/common.py b/reclass/storage/common.py index 7de71d00..13db7ec8 100644 --- a/reclass/storage/common.py +++ b/reclass/storage/common.py @@ -13,6 +13,17 @@ def nodes(relpath, name): # no mangling required return relpath, name + @staticmethod + def composed_nodes(relpath, name): + if relpath == '.' or relpath == '': + # './' is converted to None + return None, name + parts = relpath.split(os.path.sep) + if parts[0].startswith("_"): + return relpath, name + parts.append(name) + return relpath, '.'.join(parts) + @staticmethod def classes(relpath, name): if relpath == '.' or relpath == '': diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index 88f0ec43..7ed3fe46 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -61,7 +61,7 @@ def __init__(self, nodes_uri, classes_uri, compose_node_name): if nodes_uri is not None: self._nodes_uri = nodes_uri if compose_node_name: - self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.classes) + self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.composed_nodes) else: self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.nodes) From 7102e87e13c2dfd3f2988776ccbac7a50f8da185 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Wed, 1 Aug 2018 13:27:36 +0200 Subject: [PATCH 09/63] bump 1.5.6 version --- doc/source/changelog.rst | 3 +++ reclass/version.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index fb8cbc0f..d29e8377 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -5,6 +5,9 @@ ChangeLog ========= ========== ======================================================== Version Date Changes ========= ========== ======================================================== +1.5.6 2018-07-30 * Fix, usage of integers as pillar keys + * Refactoring python codebase by @a-ovchinkonv + * New feature, "compose node name" from node subdirectory structure (by @gburiola) 1.5.5 2018-07 * Add immutable (constant) parameters * Fixes 1.5.4 2018-05 * Add support for salt 2018.3 diff --git a/reclass/version.py b/reclass/version.py index ee7098f7..6d7d7eb3 100644 --- a/reclass/version.py +++ b/reclass/version.py @@ -13,7 +13,7 @@ RECLASS_NAME = 'reclass' DESCRIPTION = 'merge data by recursive descent down an ancestry hierarchy (forked extended version)' -VERSION = '1.5.5' +VERSION = '1.5.6' AUTHOR = 'martin f. krafft / Andrew Pickford / salt-formulas community' AUTHOR_EMAIL = 'salt-formulas@freelists.org' MAINTAINER = 'salt-formulas community' From d15bffcc766d386c040d2891174f1b003e1bfe6e Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Sat, 4 Aug 2018 16:22:40 +0200 Subject: [PATCH 10/63] Fix, class name references in 2nd level Classes processed deeper in the "descent" had merge_base cleared (note, merge happens later, on line 143); then references that actually worked on 1st step failed on the another descend. --- reclass/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reclass/core.py b/reclass/core.py index 2facfbee..bc897387 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -136,7 +136,7 @@ def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, node e.uri = entity.uri raise - descent = self._recurse_entity(class_entity, context=merge_base, seen=seen, + descent = self._recurse_entity(class_entity, context=context, seen=seen, nodename=nodename, environment=environment) # on every iteration, we merge the result of the recursive # descent into what we have so far… From 7ce295c67f20d434890882308f6f163fc8d9ab31 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Fri, 24 Aug 2018 13:15:06 +0200 Subject: [PATCH 11/63] Fix references bug introduced in refactoring BaseTestExpression objects return lists of references so LogicTest objects should extend and not append their master reference list with the lists from BaseTestExpression objects --- reclass/values/invitem.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py index 0179f4f3..3874f397 100644 --- a/reclass/values/invitem.py +++ b/reclass/values/invitem.py @@ -110,8 +110,9 @@ def __init__(self, expr, delimiter): subtests = list(it.compress(expr, it.cycle([1, 1, 1, 0]))) self._els = [EqualityTest(subtests[j:j+3], self._delimiter) for j in range(0, len(subtests), 3)] - self.refs = [x.refs for x in self._els] - self.inv_refs = [x.inv_refs for x in self._els] + for x in self._els: + self.refs.extend(x.refs) + self.inv_refs.extend(x.inv_refs) try: self._ops = [self.known_operators[x[1]] for x in expr[3::4]] except KeyError as e: From 97b87cbf789067b69654f1829431832e170bda68 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Tue, 28 Aug 2018 13:25:58 +0200 Subject: [PATCH 12/63] Update mixed and yaml_git for node name changes introduced with compose_node_name option This fixes method signatures in mixed and yaml_git to match the changes in yaml_fs. The mixed storage type just passes on the new compose_node_name option which will work correctly. However for the yaml_git storage type the compose_node_name=true option has not been tested only the old functionality which corresponds to compose_node_name=false has been tested. --- reclass/storage/__init__.py | 12 ++++++++++++ reclass/storage/mixed/__init__.py | 14 +++++++------- reclass/storage/yaml_fs/__init__.py | 15 +++++---------- reclass/storage/yaml_git/__init__.py | 21 +++++++++++++-------- 4 files changed, 37 insertions(+), 25 deletions(-) diff --git a/reclass/storage/__init__.py b/reclass/storage/__init__.py index 3b46a2af..fe873e31 100644 --- a/reclass/storage/__init__.py +++ b/reclass/storage/__init__.py @@ -11,6 +11,7 @@ from __future__ import print_function from __future__ import unicode_literals +from reclass.storage.common import NameMangler class NodeStorageBase(object): @@ -34,3 +35,14 @@ def enumerate_nodes(self): def path_mangler(self): msg = "Storage class '{0}' does not implement path_mangler." raise NotImplementedError(msg.format(self.name)) + + +class ExternalNodeStorageBase(NodeStorageBase): + + def __init__(self, name, compose_node_name): + super(ExternalNodeStorageBase, self).__init__(name) + self.class_name_mangler = NameMangler.classes + if compose_node_name: + self.node_name_mangler = NameMangler.composed_nodes + else: + self.node_name_mangler = NameMangler.nodes diff --git a/reclass/storage/mixed/__init__.py b/reclass/storage/mixed/__init__.py index 6324c747..45262cca 100644 --- a/reclass/storage/mixed/__init__.py +++ b/reclass/storage/mixed/__init__.py @@ -14,7 +14,7 @@ import reclass.errors from reclass import get_storage -from reclass.storage import NodeStorageBase +from reclass.storage import ExternalNodeStorageBase def path_mangler(inventory_base_uri, nodes_uri, classes_uri): if nodes_uri == classes_uri: @@ -23,17 +23,17 @@ def path_mangler(inventory_base_uri, nodes_uri, classes_uri): STORAGE_NAME = 'mixed' -class ExternalNodeStorage(NodeStorageBase): +class ExternalNodeStorage(ExternalNodeStorageBase): MixedUri = collections.namedtuple('MixedURI', 'storage_type options') - def __init__(self, nodes_uri, classes_uri): - super(ExternalNodeStorage, self).__init__(STORAGE_NAME) + def __init__(self, nodes_uri, classes_uri, compose_node_name): + super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) self._nodes_uri = self._uri(nodes_uri) - self._nodes_storage = get_storage(self._nodes_uri.storage_type, self._nodes_uri.options, None) + self._nodes_storage = get_storage(self._nodes_uri.storage_type, self._nodes_uri.options, None, compose_node_name) self._classes_default_uri = self._uri(classes_uri) - self._classes_default_storage = get_storage(self._classes_default_uri.storage_type, None, self._classes_default_uri.options) + self._classes_default_storage = get_storage(self._classes_default_uri.storage_type, None, self._classes_default_uri.options, compose_node_name) self._classes_storage = dict() if 'env_overrides' in classes_uri: @@ -42,7 +42,7 @@ def __init__(self, nodes_uri, classes_uri): uri = copy.deepcopy(classes_uri) uri.update(options) uri = self._uri(uri) - self._classes_storage[env] = get_storage(uri.storage_type, None, uri.options) + self._classes_storage[env] = get_storage(uri.storage_type, None, uri.options, compose_node_name) def _uri(self, uri): ret = copy.deepcopy(uri) diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index 7ed3fe46..0ea0b05f 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -15,8 +15,7 @@ import fnmatch import yaml from reclass.output.yaml_outputter import ExplicitDumper -from reclass.storage import NodeStorageBase -from reclass.storage.common import NameMangler +from reclass.storage import ExternalNodeStorageBase from reclass.storage.yamldata import YamlData from .directory import Directory from reclass.datatypes import Entity @@ -53,22 +52,18 @@ def _path_mangler_inner(path): return n, c -class ExternalNodeStorage(NodeStorageBase): +class ExternalNodeStorage(ExternalNodeStorageBase): def __init__(self, nodes_uri, classes_uri, compose_node_name): - super(ExternalNodeStorage, self).__init__(STORAGE_NAME) + super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) if nodes_uri is not None: self._nodes_uri = nodes_uri - if compose_node_name: - self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.composed_nodes) - else: - self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.nodes) - + self._nodes = self._enumerate_inventory(nodes_uri, self.node_name_mangler) if classes_uri is not None: self._classes_uri = classes_uri - self._classes = self._enumerate_inventory(classes_uri, NameMangler.classes) + self._classes = self._enumerate_inventory(classes_uri, self.class_name_mangler) nodes_uri = property(lambda self: self._nodes_uri) classes_uri = property(lambda self: self._classes_uri) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 38de092e..45cb6c00 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -28,8 +28,7 @@ from six import iteritems import reclass.errors -from reclass.storage import NodeStorageBase -from reclass.storage.common import NameMangler +from reclass.storage import ExternalNodeStorageBase from reclass.storage.yamldata import YamlData FILE_EXTENSION = '.yml' @@ -75,7 +74,7 @@ def __repr__(self): class GitRepo(object): - def __init__(self, uri): + def __init__(self, uri, node_name_mangler, class_name_mangler): if pygit2 is None: raise errors.MissingModuleError('pygit2') self.transport, _, self.url = uri.repo.partition('://') @@ -87,6 +86,8 @@ def __init__(self, uri): else: self.cache_dir = '{0}/{1}'.format(uri.cache_dir, self.name) + self._node_name_mangler = node_name_mangler + self._class_name_mangler = class_name_mangler self._init_repo(uri) self._fetch() self.branches = self.repo.listall_branches() @@ -184,7 +185,8 @@ def files_in_repo(self): if fnmatch.fnmatch(file.name, '*{0}'.format(FILE_EXTENSION)): name = os.path.splitext(file.name)[0] relpath = os.path.dirname(file.path) - relpath, name = NameMangler.classes(relpath, name) + if callable(self._class_name_mangler): + relpath, name = self._class_name_mangler(relpath, name) if name in ret: raise reclass.errors.DuplicateNodeNameError(self.name + ' - ' + bname, name, ret[name], path) else: @@ -197,16 +199,19 @@ def nodes(self, branch, subdir): for (name, file) in iteritems(self.files[branch]): if subdir is None or name.startswith(subdir): node_name = os.path.splitext(file.name)[0] + relpath = os.path.dirname(file.path) + if callable(self._node_name_mangler): + relpath, node_name = self._node_name_mangler(relpath, node_name) if node_name in ret: raise reclass.errors.DuplicateNodeNameError(self.name, name, files[name], path) else: ret[node_name] = file return ret -class ExternalNodeStorage(NodeStorageBase): +class ExternalNodeStorage(ExternalNodeStorageBase): - def __init__(self, nodes_uri, classes_uri): - super(ExternalNodeStorage, self).__init__(STORAGE_NAME) + def __init__(self, nodes_uri, classes_uri, compose_node_name): + super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) self._repos = dict() if nodes_uri is not None: @@ -261,7 +266,7 @@ def enumerate_nodes(self): def _load_repo(self, uri): if uri.repo not in self._repos: - self._repos[uri.repo] = GitRepo(uri) + self._repos[uri.repo] = GitRepo(uri, self.node_name_mangler, self.class_name_mangler) def _env_to_uri(self, environment): ret = None From 30abae27e3ceb0d229e2d9a73cfbd81dd8efd622 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Fri, 24 Aug 2018 13:15:06 +0200 Subject: [PATCH 13/63] Fix references bug introduced in refactoring BaseTestExpression objects return lists of references so LogicTest objects should extend and not append their master reference list with the lists from BaseTestExpression objects --- reclass/values/invitem.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py index 0179f4f3..3874f397 100644 --- a/reclass/values/invitem.py +++ b/reclass/values/invitem.py @@ -110,8 +110,9 @@ def __init__(self, expr, delimiter): subtests = list(it.compress(expr, it.cycle([1, 1, 1, 0]))) self._els = [EqualityTest(subtests[j:j+3], self._delimiter) for j in range(0, len(subtests), 3)] - self.refs = [x.refs for x in self._els] - self.inv_refs = [x.inv_refs for x in self._els] + for x in self._els: + self.refs.extend(x.refs) + self.inv_refs.extend(x.inv_refs) try: self._ops = [self.known_operators[x[1]] for x in expr[3::4]] except KeyError as e: From d2762b0c447c83a5a03b5d58b8bff4434d274265 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Wed, 5 Sep 2018 14:01:52 +0200 Subject: [PATCH 14/63] Feature, classes, use relative reference Change-Id: I1942580e78d3c9e83fdad4927532186441fe3298 --- README-extentions.rst | 31 +++++++++++++++++++ README.rst | 2 +- reclass/core.py | 5 +++ reclass/storage/yaml_fs/__init__.py | 8 ++++- reclass/storage/yaml_git/__init__.py | 10 +++++- reclass/storage/yamldata.py | 5 ++- test/model/extensions/classes/defaults.yml | 4 +++ .../extensions/classes/relative/init.yml | 3 ++ .../classes/relative/nested/common.yml | 5 +++ .../classes/relative/nested/deep/common.yml | 5 +++ .../classes/relative/nested/deep/init.yml | 9 ++++++ .../classes/relative/nested/dive/session.yml | 5 +++ .../classes/relative/nested/init.yml | 10 ++++++ test/model/extensions/classes/second.yml | 1 + test/model/extensions/classes/third.yml | 1 + test/model/extensions/nodes/reclass.yml | 2 +- 16 files changed, 101 insertions(+), 5 deletions(-) create mode 100644 test/model/extensions/classes/defaults.yml create mode 100644 test/model/extensions/classes/relative/init.yml create mode 100644 test/model/extensions/classes/relative/nested/common.yml create mode 100644 test/model/extensions/classes/relative/nested/deep/common.yml create mode 100644 test/model/extensions/classes/relative/nested/deep/init.yml create mode 100644 test/model/extensions/classes/relative/nested/dive/session.yml create mode 100644 test/model/extensions/classes/relative/nested/init.yml diff --git a/README-extentions.rst b/README-extentions.rst index 2bc4816e..66932563 100644 --- a/README-extentions.rst +++ b/README-extentions.rst @@ -336,6 +336,37 @@ Reclass --nodeinfo then returns: ... +Load classes with relative names +-------------------------------- + +Load referenced class from a relative location to the current class. +To load class from relative location start the class uri with "." char. +The only supported reference is to nested tree structure below the current class. + +You are allowed to use syntax for relative uri to required class on any place on your model (first class loaded, init.yml, regular class .yml). + +The feature is expected to improve flexibility while sharing classes between your models. + +It's a new feature use it with care and mind that using "relative syntax" lower traceability of +your pillar composition. + +Example usage of relative class name: + +.. code-block:: yaml + + #/etc/reclass/classes/component/defaults.yml + classes: + component: + config: + a: b + +.. code-block:: yaml + + #/etc/reclass/classes/component/init.yml + classes: + - .defaults + + Inventory Queries ----------------- diff --git a/README.rst b/README.rst index b865e4ff..7461ea72 100644 --- a/README.rst +++ b/README.rst @@ -23,7 +23,7 @@ Documentation ============= Documentation covering the original version is in the doc directory. -See the README-extensions.rst file for documentation on the extentions. +See the `README-extensions.rst` file for documentation on the extentions. diff --git a/reclass/core.py b/reclass/core.py index bc897387..66c74f5a 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -113,6 +113,7 @@ def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, node context = Entity(self._settings, name='empty (@{0})'.format(nodename)) for klass in entity.classes.as_list(): + # class name contain reference if klass.count('$') > 0: try: klass = str(self._parser.parse(klass, self._settings).render(merge_base.parameters.as_dict(), {})) @@ -122,6 +123,10 @@ def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, node except ResolveError as e: raise ClassNameResolveError(klass, nodename, entity.uri) + # for convenience, first level classes_uri/class.yml can have un-interpolated "." + if klass.startswith('.'): + klass = klass[1:] + if klass not in seen: try: class_entity = self._storage.get_class(klass, environment, self._settings) diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index 7ed3fe46..5b7b7f58 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -113,7 +113,13 @@ def get_class(self, name, environment, settings): path = os.path.join(self.classes_uri, self._classes[name]) except KeyError as e: raise reclass.errors.ClassNotFound(self.name, name, self.classes_uri) - entity = YamlData.from_file(path).get_entity(name, settings) + + if path.endswith('init{}'.format(FILE_EXTENSION)): + parent_class=name + else: + # for regular class yml file, strip its name + parent_class='.'.join(name.split('.')[:-1]) + entity = YamlData.from_file(path).get_entity(name, settings, parent_class) return entity def enumerate_nodes(self): diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 38de092e..c429c67f 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -253,7 +253,15 @@ def get_class(self, name, environment, settings): raise reclass.errors.NotFoundError("File " + name + " missing from " + uri.repo + " branch " + uri.branch) file = self._repos[uri.repo].files[uri.branch][name] blob = self._repos[uri.repo].get(file.id) - entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, file.path)).get_entity(name, settings) + + if file.name.endswith('init{}'.format(FILE_EXTENSION)): + parent_class=name + else: + # for regular class yml file, strip its name + parent_class='.'.join(name.split('.')[:-1]) + + entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, + file.path)).get_entity(name, settings, parent_class) return entity def enumerate_nodes(self): diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py index a8611548..52547ac4 100644 --- a/reclass/storage/yamldata.py +++ b/reclass/storage/yamldata.py @@ -53,13 +53,16 @@ def __init__(self, uri): def get_data(self): return self._data - def get_entity(self, name, settings): + def get_entity(self, name, settings, parent_class=None): #if name is None: # name = self._uri classes = self._data.get('classes') if classes is None: classes = [] + if parent_class: + classes = \ + [parent_class + c for c in classes if c.startswith('.')] classes = datatypes.Classes(classes) applications = self._data.get('applications') diff --git a/test/model/extensions/classes/defaults.yml b/test/model/extensions/classes/defaults.yml new file mode 100644 index 00000000..5d17c2be --- /dev/null +++ b/test/model/extensions/classes/defaults.yml @@ -0,0 +1,4 @@ + +parameters: + config: + defaults: True diff --git a/test/model/extensions/classes/relative/init.yml b/test/model/extensions/classes/relative/init.yml new file mode 100644 index 00000000..117e4fad --- /dev/null +++ b/test/model/extensions/classes/relative/init.yml @@ -0,0 +1,3 @@ + +classes: + - .nested diff --git a/test/model/extensions/classes/relative/nested/common.yml b/test/model/extensions/classes/relative/nested/common.yml new file mode 100644 index 00000000..28cc0b2b --- /dev/null +++ b/test/model/extensions/classes/relative/nested/common.yml @@ -0,0 +1,5 @@ + +parameters: + nested: + deep: + common: to be overriden diff --git a/test/model/extensions/classes/relative/nested/deep/common.yml b/test/model/extensions/classes/relative/nested/deep/common.yml new file mode 100644 index 00000000..b77a24c4 --- /dev/null +++ b/test/model/extensions/classes/relative/nested/deep/common.yml @@ -0,0 +1,5 @@ + +parameters: + nested: + deep: + common: False diff --git a/test/model/extensions/classes/relative/nested/deep/init.yml b/test/model/extensions/classes/relative/nested/deep/init.yml new file mode 100644 index 00000000..cd12d103 --- /dev/null +++ b/test/model/extensions/classes/relative/nested/deep/init.yml @@ -0,0 +1,9 @@ + +classes: + - .common + +parameters: + nested: + deep: + init: True + common: True diff --git a/test/model/extensions/classes/relative/nested/dive/session.yml b/test/model/extensions/classes/relative/nested/dive/session.yml new file mode 100644 index 00000000..9abd1eea --- /dev/null +++ b/test/model/extensions/classes/relative/nested/dive/session.yml @@ -0,0 +1,5 @@ + +parameters: + nested: + deep: + session: True diff --git a/test/model/extensions/classes/relative/nested/init.yml b/test/model/extensions/classes/relative/nested/init.yml new file mode 100644 index 00000000..9f023830 --- /dev/null +++ b/test/model/extensions/classes/relative/nested/init.yml @@ -0,0 +1,10 @@ + +classes: + - .common + - .deep + - .dive.session + +parameters: + nested: + deep: + init: True diff --git a/test/model/extensions/classes/second.yml b/test/model/extensions/classes/second.yml index a9babd34..929d7465 100644 --- a/test/model/extensions/classes/second.yml +++ b/test/model/extensions/classes/second.yml @@ -1,5 +1,6 @@ classes: - first +- relative parameters: will: diff --git a/test/model/extensions/classes/third.yml b/test/model/extensions/classes/third.yml index 20a937c5..a5157cf6 100644 --- a/test/model/extensions/classes/third.yml +++ b/test/model/extensions/classes/third.yml @@ -1,6 +1,7 @@ classes: - missing.class - second +- .defaults parameters: _param: diff --git a/test/model/extensions/nodes/reclass.yml b/test/model/extensions/nodes/reclass.yml index 94b75197..5d5b3ec4 100644 --- a/test/model/extensions/nodes/reclass.yml +++ b/test/model/extensions/nodes/reclass.yml @@ -1,3 +1,3 @@ classes: -- third +- .third From 23fcc23940fc9affb5f1753ef5e158c89d604f75 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:09:46 +0200 Subject: [PATCH 15/63] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 7461ea72..fef633c8 100644 --- a/README.rst +++ b/README.rst @@ -23,7 +23,7 @@ Documentation ============= Documentation covering the original version is in the doc directory. -See the `README-extensions.rst` file for documentation on the extentions. +See the [README-extensions.rst](./README-extensions.rst) file for documentation on the extentions. From 26b20f76b7ceeef48102b450a7255664bb74778a Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:10:59 +0200 Subject: [PATCH 16/63] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index fef633c8..5a78b92a 100644 --- a/README.rst +++ b/README.rst @@ -23,7 +23,7 @@ Documentation ============= Documentation covering the original version is in the doc directory. -See the [README-extensions.rst](./README-extensions.rst) file for documentation on the extentions. +See the [README-extensions.rst] file for documentation on the extentions. From 0344dcc8736725240f238daf40dc267d0e0bb170 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:14:30 +0200 Subject: [PATCH 17/63] rename README-extensions.rst Change-Id: I40e2078fd9bc5e498d22d35abbb849c135ce1c2d --- README-extentions.rst => README-extensions.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename README-extentions.rst => README-extensions.rst (100%) diff --git a/README-extentions.rst b/README-extensions.rst similarity index 100% rename from README-extentions.rst rename to README-extensions.rst From cd8abe9e474cb81e461432a7e24a2483068e2307 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:15:22 +0200 Subject: [PATCH 18/63] Update README.rst --- README.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index 5a78b92a..0e87ea27 100644 --- a/README.rst +++ b/README.rst @@ -16,16 +16,15 @@ List of the core features: * Ignore class notfound/regexp option -.. include:: ./README-extensions.rst - - Documentation ============= Documentation covering the original version is in the doc directory. -See the [README-extensions.rst] file for documentation on the extentions. +See the [README-extensions.rst](README-extensions.rst) file for documentation on the extentions. +.. include:: ./README-extensions.rst + Reclass related projects/tools ============================== From 871c61d20432a0f81b329ec77b490d7a4a284257 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:16:12 +0200 Subject: [PATCH 19/63] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 0e87ea27..4ad355fd 100644 --- a/README.rst +++ b/README.rst @@ -20,7 +20,7 @@ Documentation ============= Documentation covering the original version is in the doc directory. -See the [README-extensions.rst](README-extensions.rst) file for documentation on the extentions. +See the [README-extensions.rst](./README-extensions.rst) file for documentation on the extentions. .. include:: ./README-extensions.rst From eb62d355df7a76cc53a3cdd85f35204548bf045f Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:17:37 +0200 Subject: [PATCH 20/63] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 4ad355fd..4fc25afe 100644 --- a/README.rst +++ b/README.rst @@ -20,7 +20,7 @@ Documentation ============= Documentation covering the original version is in the doc directory. -See the [README-extensions.rst](./README-extensions.rst) file for documentation on the extentions. +See the [README-extensions.rst file](https://github.com/salt-formulas/reclass/blob/develop/README-extensions.rst) for documentation on the extentions. .. include:: ./README-extensions.rst From 36bc995d511d4992c8a9cd0106bf59f6b88b3a35 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:19:17 +0200 Subject: [PATCH 21/63] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 4fc25afe..b5e15e4d 100644 --- a/README.rst +++ b/README.rst @@ -20,7 +20,7 @@ Documentation ============= Documentation covering the original version is in the doc directory. -See the [README-extensions.rst file](https://github.com/salt-formulas/reclass/blob/develop/README-extensions.rst) for documentation on the extentions. +See the `README-extensions.rst`_ file for documentation on the extentions. .. include:: ./README-extensions.rst From bf3597a04cbdc16d306fa0532f17620c9e41361f Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 7 Sep 2018 13:22:45 +0200 Subject: [PATCH 22/63] Update README.rst --- README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index b5e15e4d..5dcb5a19 100644 --- a/README.rst +++ b/README.rst @@ -19,8 +19,10 @@ List of the core features: Documentation ============= +.. _README-extensions: README-extensions.rst + Documentation covering the original version is in the doc directory. -See the `README-extensions.rst`_ file for documentation on the extentions. +See the `README-extensions`_ file for documentation on the extentions. .. include:: ./README-extensions.rst From 45a675ad21b5f608c34f38a374cbe509dd23428e Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Tue, 11 Sep 2018 11:05:33 +0200 Subject: [PATCH 23/63] Fix merging inv query results --- reclass/datatypes/tests/test_exports.py | 9 +++++++++ reclass/values/invitem.py | 3 +++ reclass/values/valuelist.py | 11 +++++++++-- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/reclass/datatypes/tests/test_exports.py b/reclass/datatypes/tests/test_exports.py index a0acce77..16a45cb4 100644 --- a/reclass/datatypes/tests/test_exports.py +++ b/reclass/datatypes/tests/test_exports.py @@ -120,5 +120,14 @@ def test_list_if_expr_invquery_with_and(self): p.interpolate(e) self.assertIn(p.as_dict(), [ r1, r2 ]) + def test_merging_inv_queries(self): + e = {'node1': {'a': 1}, 'node2': {'a': 1}, 'node3': {'a': 2}} + p1 = Parameters({'exp': '$[ if exports:a == 1 ]'}, SETTINGS, '') + p2 = Parameters({'exp': '$[ if exports:a == 2 ]'}, SETTINGS, '') + r = { 'exp': [ 'node1', 'node2', 'node3' ] } + p1.merge(p2) + p1.interpolate(e) + self.assertEqual(p1.as_dict(), r) + if __name__ == '__main__': unittest.main() diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py index 3874f397..54616124 100644 --- a/reclass/values/invitem.py +++ b/reclass/values/invitem.py @@ -189,6 +189,9 @@ def has_references(self): def get_references(self): return self._question.refs + def assembleRefs(self, context): + return + def get_inv_references(self): return self.inv_refs diff --git a/reclass/values/valuelist.py b/reclass/values/valuelist.py index a56395be..86563fae 100644 --- a/reclass/values/valuelist.py +++ b/reclass/values/valuelist.py @@ -71,9 +71,9 @@ def _check_for_inv_query(self): self.ignore_failed_render = True for value in self._values: if value.has_inv_query: - self._inv_refs.extend(value.get_inv_references) + self._inv_refs.extend(value.get_inv_references()) self._has_inv_query = True - if vale.ignore_failed_render() is False: + if value.ignore_failed_render() is False: self.ignore_failed_render = False if self._has_inv_query is False: self.ignore_failed_render = False @@ -88,6 +88,13 @@ def assembleRefs(self, context={}): if value.allRefs is False: self.allRefs = False + @property + def needs_all_envs(self): + for value in self._values: + if value.needs_all_envs: + return True + return False + def merge(self): output = None for n, value in enumerate(self._values): From 168a03403ccc931fcd5c2deff65a07ed4768cd81 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Fri, 21 Sep 2018 12:14:04 +0200 Subject: [PATCH 24/63] Fix relative class name functionality Remove the old implementation of the relative class name functionality as this has bugs, leading to classes not being loaded.The new implementation catches relative class names when they are first encountered and converts them into absolute class names straight away. Also add some tests for the relative class names. --- reclass/core.py | 4 ---- reclass/storage/yaml_fs/__init__.py | 9 +-------- reclass/storage/yaml_git/__init__.py | 10 +--------- reclass/storage/yamldata.py | 18 ++++++++++++++---- reclass/tests/data/02/classes/one/alpha.yml | 7 +++++++ reclass/tests/data/02/classes/one/beta.yml | 2 ++ reclass/tests/data/02/classes/three.yml | 2 ++ reclass/tests/data/02/classes/two/beta.yml | 2 ++ reclass/tests/data/02/nodes/relative.yml | 2 ++ reclass/tests/data/02/nodes/top_relative.yml | 2 ++ reclass/tests/test_core.py | 12 ++++++++++++ 11 files changed, 45 insertions(+), 25 deletions(-) create mode 100644 reclass/tests/data/02/classes/one/alpha.yml create mode 100644 reclass/tests/data/02/classes/one/beta.yml create mode 100644 reclass/tests/data/02/classes/three.yml create mode 100644 reclass/tests/data/02/classes/two/beta.yml create mode 100644 reclass/tests/data/02/nodes/relative.yml create mode 100644 reclass/tests/data/02/nodes/top_relative.yml diff --git a/reclass/core.py b/reclass/core.py index 66c74f5a..75eea548 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -123,10 +123,6 @@ def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, node except ResolveError as e: raise ClassNameResolveError(klass, nodename, entity.uri) - # for convenience, first level classes_uri/class.yml can have un-interpolated "." - if klass.startswith('.'): - klass = klass[1:] - if klass not in seen: try: class_entity = self._storage.get_class(klass, environment, self._settings) diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index 7540aa4d..20e8eecb 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -96,7 +96,6 @@ def get_node(self, name, settings): try: relpath = self._nodes[name] path = os.path.join(self.nodes_uri, relpath) - name = os.path.splitext(relpath)[0] except KeyError as e: raise reclass.errors.NodeNotFound(self.name, name, self.nodes_uri) entity = YamlData.from_file(path).get_entity(name, settings) @@ -108,13 +107,7 @@ def get_class(self, name, environment, settings): path = os.path.join(self.classes_uri, self._classes[name]) except KeyError as e: raise reclass.errors.ClassNotFound(self.name, name, self.classes_uri) - - if path.endswith('init{}'.format(FILE_EXTENSION)): - parent_class=name - else: - # for regular class yml file, strip its name - parent_class='.'.join(name.split('.')[:-1]) - entity = YamlData.from_file(path).get_entity(name, settings, parent_class) + entity = YamlData.from_file(path).get_entity(name, settings) return entity def enumerate_nodes(self): diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 61c45519..45cb6c00 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -258,15 +258,7 @@ def get_class(self, name, environment, settings): raise reclass.errors.NotFoundError("File " + name + " missing from " + uri.repo + " branch " + uri.branch) file = self._repos[uri.repo].files[uri.branch][name] blob = self._repos[uri.repo].get(file.id) - - if file.name.endswith('init{}'.format(FILE_EXTENSION)): - parent_class=name - else: - # for regular class yml file, strip its name - parent_class='.'.join(name.split('.')[:-1]) - - entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, - file.path)).get_entity(name, settings, parent_class) + entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, file.path)).get_entity(name, settings) return entity def enumerate_nodes(self): diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py index 52547ac4..034832df 100644 --- a/reclass/storage/yamldata.py +++ b/reclass/storage/yamldata.py @@ -53,16 +53,26 @@ def __init__(self, uri): def get_data(self): return self._data - def get_entity(self, name, settings, parent_class=None): + def set_absolute_names(self, name, names): + parent = '.'.join(name.split('.')[0:-1]) + new_names = [] + for n in names: + if n[0] == '.': + if parent == '': + n = n[1:] + else: + n = parent + n + new_names.append(n) + return new_names + + def get_entity(self, name, settings): #if name is None: # name = self._uri classes = self._data.get('classes') if classes is None: classes = [] - if parent_class: - classes = \ - [parent_class + c for c in classes if c.startswith('.')] + classes = self.set_absolute_names(name, classes) classes = datatypes.Classes(classes) applications = self._data.get('applications') diff --git a/reclass/tests/data/02/classes/one/alpha.yml b/reclass/tests/data/02/classes/one/alpha.yml new file mode 100644 index 00000000..a13cc5c8 --- /dev/null +++ b/reclass/tests/data/02/classes/one/alpha.yml @@ -0,0 +1,7 @@ +classes: +- .beta +- two.beta + +parameters: + test1: ${one_beta} + test2: ${two_beta} diff --git a/reclass/tests/data/02/classes/one/beta.yml b/reclass/tests/data/02/classes/one/beta.yml new file mode 100644 index 00000000..f754252a --- /dev/null +++ b/reclass/tests/data/02/classes/one/beta.yml @@ -0,0 +1,2 @@ +parameters: + one_beta: 1 diff --git a/reclass/tests/data/02/classes/three.yml b/reclass/tests/data/02/classes/three.yml new file mode 100644 index 00000000..987fde06 --- /dev/null +++ b/reclass/tests/data/02/classes/three.yml @@ -0,0 +1,2 @@ +classes: +- .one.alpha diff --git a/reclass/tests/data/02/classes/two/beta.yml b/reclass/tests/data/02/classes/two/beta.yml new file mode 100644 index 00000000..1f578b2f --- /dev/null +++ b/reclass/tests/data/02/classes/two/beta.yml @@ -0,0 +1,2 @@ +parameters: + two_beta: 2 diff --git a/reclass/tests/data/02/nodes/relative.yml b/reclass/tests/data/02/nodes/relative.yml new file mode 100644 index 00000000..1f2bbdc7 --- /dev/null +++ b/reclass/tests/data/02/nodes/relative.yml @@ -0,0 +1,2 @@ +classes: + - one.alpha diff --git a/reclass/tests/data/02/nodes/top_relative.yml b/reclass/tests/data/02/nodes/top_relative.yml new file mode 100644 index 00000000..5dae5beb --- /dev/null +++ b/reclass/tests/data/02/nodes/top_relative.yml @@ -0,0 +1,2 @@ +classes: + - three diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index 047bf24b..b20b268d 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -59,6 +59,18 @@ def test_ignore_class_notfound_with_regexp(self): params = { 'node_test': 'class not found', '_reclass_': { 'environment': 'base', 'name': {'full': 'class_notfound', 'short': 'class_notfound' } } } self.assertEqual(node['parameters'], params) + def test_relative_class_names(self): + reclass = self._core('02') + node = reclass.nodeinfo('relative') + params = { 'test1': 1, 'test2': 2, 'one_beta': 1, 'two_beta': 2, '_reclass_': { 'environment': 'base', 'name': { 'full': 'relative', 'short': 'relative' } } } + self.assertEqual(node['parameters'], params) + + def test_top_relative_class_names(self): + reclass = self._core('02') + node = reclass.nodeinfo('top_relative') + params = { 'test1': 1, 'test2': 2, 'one_beta': 1, 'two_beta': 2, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } + self.assertEqual(node['parameters'], params) + if __name__ == '__main__': unittest.main() From 0206c9ffa7d57e4fa24c31dc4e0228f360fabba2 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Fri, 21 Sep 2018 12:19:35 +0200 Subject: [PATCH 25/63] Add tests for compose node name functionality --- reclass/tests/data/03/classes/a.yml | 6 ++++++ reclass/tests/data/03/classes/b.yml | 6 ++++++ reclass/tests/data/03/classes/c.yml | 6 ++++++ reclass/tests/data/03/classes/d.yml | 6 ++++++ reclass/tests/data/03/nodes/alpha/one.yml | 3 +++ reclass/tests/data/03/nodes/alpha/two.yml | 3 +++ reclass/tests/data/03/nodes/beta/one.yml | 3 +++ reclass/tests/data/03/nodes/beta/two.yml | 3 +++ reclass/tests/test_core.py | 15 +++++++++++++++ 9 files changed, 51 insertions(+) create mode 100644 reclass/tests/data/03/classes/a.yml create mode 100644 reclass/tests/data/03/classes/b.yml create mode 100644 reclass/tests/data/03/classes/c.yml create mode 100644 reclass/tests/data/03/classes/d.yml create mode 100644 reclass/tests/data/03/nodes/alpha/one.yml create mode 100644 reclass/tests/data/03/nodes/alpha/two.yml create mode 100644 reclass/tests/data/03/nodes/beta/one.yml create mode 100644 reclass/tests/data/03/nodes/beta/two.yml diff --git a/reclass/tests/data/03/classes/a.yml b/reclass/tests/data/03/classes/a.yml new file mode 100644 index 00000000..748a2974 --- /dev/null +++ b/reclass/tests/data/03/classes/a.yml @@ -0,0 +1,6 @@ +parameters: + a: 1 + alpha: + - ${a} + beta: + a: ${a} diff --git a/reclass/tests/data/03/classes/b.yml b/reclass/tests/data/03/classes/b.yml new file mode 100644 index 00000000..cce2609e --- /dev/null +++ b/reclass/tests/data/03/classes/b.yml @@ -0,0 +1,6 @@ +parameters: + b: 2 + alpha: + - ${b} + beta: + b: ${b} diff --git a/reclass/tests/data/03/classes/c.yml b/reclass/tests/data/03/classes/c.yml new file mode 100644 index 00000000..7441417e --- /dev/null +++ b/reclass/tests/data/03/classes/c.yml @@ -0,0 +1,6 @@ +parameters: + c: 3 + alpha: + - ${c} + beta: + c: ${c} diff --git a/reclass/tests/data/03/classes/d.yml b/reclass/tests/data/03/classes/d.yml new file mode 100644 index 00000000..e61a1ff0 --- /dev/null +++ b/reclass/tests/data/03/classes/d.yml @@ -0,0 +1,6 @@ +parameters: + d: 4 + alpha: + - ${d} + beta: + d: ${d} diff --git a/reclass/tests/data/03/nodes/alpha/one.yml b/reclass/tests/data/03/nodes/alpha/one.yml new file mode 100644 index 00000000..f2b613d0 --- /dev/null +++ b/reclass/tests/data/03/nodes/alpha/one.yml @@ -0,0 +1,3 @@ +classes: +- a +- b diff --git a/reclass/tests/data/03/nodes/alpha/two.yml b/reclass/tests/data/03/nodes/alpha/two.yml new file mode 100644 index 00000000..b020af36 --- /dev/null +++ b/reclass/tests/data/03/nodes/alpha/two.yml @@ -0,0 +1,3 @@ +classes: +- a +- c diff --git a/reclass/tests/data/03/nodes/beta/one.yml b/reclass/tests/data/03/nodes/beta/one.yml new file mode 100644 index 00000000..168a4fb8 --- /dev/null +++ b/reclass/tests/data/03/nodes/beta/one.yml @@ -0,0 +1,3 @@ +classes: +- b +- c diff --git a/reclass/tests/data/03/nodes/beta/two.yml b/reclass/tests/data/03/nodes/beta/two.yml new file mode 100644 index 00000000..56c63433 --- /dev/null +++ b/reclass/tests/data/03/nodes/beta/two.yml @@ -0,0 +1,3 @@ +classes: +- c +- d diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index b20b268d..ced18e84 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -71,6 +71,21 @@ def test_top_relative_class_names(self): params = { 'test1': 1, 'test2': 2, 'one_beta': 1, 'two_beta': 2, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } self.assertEqual(node['parameters'], params) + def test_compose_node_names(self): + reclass = self._core('03', {'compose_node_name': True}) + alpha_one_node = reclass.nodeinfo('alpha.one') + alpha_one_res = {'a': 1, 'alpha': [1, 2], 'beta': {'a': 1, 'b': 2}, 'b': 2, '_reclass_': {'environment': 'base', 'name': {'full': 'alpha.one', 'short': 'alpha'}}} + alpha_two_node = reclass.nodeinfo('alpha.two') + alpha_two_res = {'a': 1, 'alpha': [1, 3], 'beta': {'a': 1, 'c': 3}, 'c': 3, '_reclass_': {'environment': 'base', 'name': {'full': 'alpha.two', 'short': 'alpha'}}} + beta_one_node = reclass.nodeinfo('beta.one') + beta_one_res = {'alpha': [2, 3], 'beta': {'c': 3, 'b': 2}, 'b': 2, 'c': 3, '_reclass_': {'environment': 'base', 'name': {'full': 'beta.one', 'short': 'beta'}}} + beta_two_node = reclass.nodeinfo('beta.two') + beta_two_res = {'alpha': [3, 4], 'c': 3, 'beta': {'c': 3, 'd': 4}, 'd': 4, '_reclass_': {'environment': u'base', 'name': {'full': u'beta.two', 'short': u'beta'}}} + self.assertEqual(alpha_one_node['parameters'], alpha_one_res) + self.assertEqual(alpha_two_node['parameters'], alpha_two_res) + self.assertEqual(beta_one_node['parameters'], beta_one_res) + self.assertEqual(beta_two_node['parameters'], beta_two_res) + if __name__ == '__main__': unittest.main() From 424f65118812d79ea14a69ee1401acfae4692c57 Mon Sep 17 00:00:00 2001 From: Ales Komarek Date: Sun, 23 Sep 2018 16:17:39 +0200 Subject: [PATCH 26/63] Defaulted compose_node_name in get_storage to fix backward compatibility issue --- reclass/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reclass/__init__.py b/reclass/__init__.py index 2167a303..fe788109 100644 --- a/reclass/__init__.py +++ b/reclass/__init__.py @@ -15,7 +15,7 @@ from .storage.loader import StorageBackendLoader from .storage.memcache_proxy import MemcacheProxy -def get_storage(storage_type, nodes_uri, classes_uri, compose_node_name, **kwargs): +def get_storage(storage_type, nodes_uri, classes_uri, compose_node_name=False, **kwargs): storage_class = StorageBackendLoader(storage_type).load() return MemcacheProxy(storage_class(nodes_uri, classes_uri, compose_node_name, **kwargs)) From fa2785f226796c8c2790c5e6a4312fe850aab0f1 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Tue, 25 Sep 2018 14:11:02 +0200 Subject: [PATCH 27/63] Add a locking mechanism to yaml_git storage This stops multiple processes from updating the locale checkout of a remote git repo simultaneously --- reclass/storage/yaml_git/__init__.py | 49 +++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 9 deletions(-) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 45cb6c00..a28079b6 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -9,8 +9,11 @@ import collections import distutils.version +import errno +import fcntl import fnmatch import os +import time # Squelch warning on centos7 due to upgrading cffi # see https://github.com/saltstack/salt/pull/39871 @@ -50,6 +53,7 @@ def __init__(self, dictionary): self.branch = None self.root = None self.cache_dir = None + self.lock_dir = None self.pubkey = None self.privkey = None self.password = None @@ -59,6 +63,7 @@ def update(self, dictionary): if 'repo' in dictionary: self.repo = dictionary['repo'] if 'branch' in dictionary: self.branch = dictionary['branch'] if 'cache_dir' in dictionary: self.cache_dir = dictionary['cache_dir'] + if 'lock_dir' in dictionary: self.lock_dir = dictionary['lock_dir'] if 'pubkey' in dictionary: self.pubkey = dictionary['pubkey'] if 'privkey' in dictionary: self.privkey = dictionary['privkey'] if 'password' in dictionary: self.password = dictionary['password'] @@ -72,8 +77,31 @@ def __repr__(self): return '<{0}: {1} {2} {3}>'.format(self.__class__.__name__, self.repo, self.branch, self.root) -class GitRepo(object): +class LockFile(): + def __init__(self, file): + self._file = file + + def __enter__(self): + self._fd = open(self._file, 'w+') + start = time.time() + while True: + if (time.time() - start) > 120: + raise IOError('Timeout waiting to lock file: {0}'.format(self._file)) + try: + fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + break + except IOError as e: + # raise on unrelated IOErrors + if e.errno != errno.EAGAIN: + raise + else: + time.sleep(0.1) + + def __exit__(self, type, value, traceback): + self._fd.close() + +class GitRepo(object): def __init__(self, uri, node_name_mangler, class_name_mangler): if pygit2 is None: raise errors.MissingModuleError('pygit2') @@ -85,11 +113,18 @@ def __init__(self, uri, node_name_mangler, class_name_mangler): self.cache_dir = '{0}/{1}/{2}'.format(os.path.expanduser("~"), '.reclass/cache/git', self.name) else: self.cache_dir = '{0}/{1}'.format(uri.cache_dir, self.name) - + if uri.lock_dir is None: + self.lock_file = '{0}/{1}/{2}'.format(os.path.expanduser("~"), '.reclass/cache/lock', self.name) + else: + self.lock_file = '{0}/{1}'.format(uri.lock_dir, self.name) + lock_dir = os.path.dirname(self.lock_file) + if not os.path.exists(lock_dir): + os.makedirs(lock_dir) self._node_name_mangler = node_name_mangler self._class_name_mangler = class_name_mangler - self._init_repo(uri) - self._fetch() + with LockFile(self.lock_file): + self._init_repo(uri) + self._fetch() self.branches = self.repo.listall_branches() self.files = self.files_in_repo() @@ -99,10 +134,7 @@ def _init_repo(self, uri): else: os.makedirs(self.cache_dir) self.repo = pygit2.init_repository(self.cache_dir, bare=True) - - if not self.repo.remotes: self.repo.create_remote('origin', self.url) - if 'ssh' in self.transport: if '@' in self.url: user, _, _ = self.url.partition('@') @@ -130,7 +162,6 @@ def _fetch(self): if self.credentials is not None: origin.credentials = self.credentials fetch_results = origin.fetch(**fetch_kwargs) - remote_branches = self.repo.listall_branches(pygit2.GIT_BRANCH_REMOTE) local_branches = self.repo.listall_branches() for remote_branch_name in remote_branches: @@ -208,8 +239,8 @@ def nodes(self, branch, subdir): ret[node_name] = file return ret -class ExternalNodeStorage(ExternalNodeStorageBase): +class ExternalNodeStorage(ExternalNodeStorageBase): def __init__(self, nodes_uri, classes_uri, compose_node_name): super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) self._repos = dict() From 424769f46a8063d5728bb2ccfe088bdcb4e748f8 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Tue, 25 Sep 2018 14:12:58 +0200 Subject: [PATCH 28/63] Add documentation for yaml_git and mixed storage types --- README-extensions.rst | 114 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/README-extensions.rst b/README-extensions.rst index 66932563..26f3ca88 100644 --- a/README-extensions.rst +++ b/README-extensions.rst @@ -569,3 +569,117 @@ This adds the subfolder to the node name and the structure above can then be use ... If the subfolder path starts with the underscore character ``_``, then the subfolder path is NOT added to the node name. + + +Git storage type +---------------- + +Reclass node and class yaml files can be read from a remote git repository with the yaml_git storage type. Use nodes_uri and +classes_uri to define the git repos to use for nodes and classes. These can be the same repo. + +For salt masters using ssh connections the private and public keys must be readable by the salt daemon, which requires the +private key NOT be password protected. For stand alone reclass using ssh connections if the privkey and pubkey options +are not defined then any in memory key (from ssh-add) will be used. + +Salt master reclass config example: + +.. code-block:: yaml + + storage_type:yaml_git + nodes_uri: + # branch to use + branch: master + + # cache directory (default: ~/.reclass/git/cache) + cache_dir: /var/cache/reclass/git + + # lock directory (default: ~/.reclass/git/lock) + lock_dir: /var/cache/reclass/lock + + # private key for ssh connections (no default, but will used keys stored + # by ssh-add in memory if privkey and pubkey are not set) + privkey: /root/salt_rsa + # public key for ssh connections + pubkey: /root/salt_rsa.pub + + repo: git+ssh://gitlab@remote.server:salt/nodes.git + + classes_uri: + # branch to use or __env__ to use the branch matching the node + # environment name + branch: __env__ + + # cache directory (default: ~/.reclass/git/cache) + cache_dir: /var/cache/reclass/git + + # lock directory (default: ~/.reclass/git/lock) + lock_dir: /var/cache/reclass/lock + + # private key for ssh connections (no default, but will used keys stored + # by ssh-add in memory if privkey and pubkey are not set) + privkey: /root/salt_rsa + # public key for ssh connections + pubkey: /root/salt_rsa.pub + + # branch/env overrides for specific branches + env_overrides: + # prod env uses master branch + - prod: + branch: master + # use master branch for nodes with no environment defined + - none: + branch: master + + repo: git+ssh://gitlab@remote.server:salt/site.git + + # root directory of the class hierarcy in git repo + # defaults to root directory of git repo if not given + root: classes + + +Mixed storage type +------------------ + +Use a mixture of storage types. + +Salt master reclass config example, which by default uses yaml_git storage but overrides the location for +classes for the pre-prod environment to use a directory on the local disc: + +.. code-block:: yaml + + storage_type: mixed + nodes_uri: + # storage type to use + storage_type: yaml_git + + # yaml_git storage options + branch: master + cache_dir: /var/cache/reclass/git + lock_dir: /var/cache/reclass/lock + privkey: /root/salt_rsa + pubkey: /root/salt_rsa.pub + repo: git+ssh://gitlab@remote.server:salt/nodes.git + + classes_uri: + # storage type to use + storage_type: yaml_git + + # yaml_git storage options + branch: __env__ + cache_dir: /var/cache/reclass/git + lock_dir: /var/cache/reclass/lock + privkey: /root/salt_rsa + pubkey: /root/salt_rsa.pub + repo: git+ssh://gitlab@remote.server:salt/site.git + root: classes + + env_overrides: + - prod: + branch: master + - none: + branch: master + - pre-prod: + # override storage type for this environment + storage_type: yaml_fs + # options for yaml_fs storage type + uri: /srv/salt/env/pre-prod/classes From 27dea66b7937933855646db087da312c0e11db9e Mon Sep 17 00:00:00 2001 From: Martin Polreich Date: Fri, 5 Oct 2018 13:05:12 +0200 Subject: [PATCH 29/63] Allow to use '..' as a reference to higher level in class structure --- reclass/storage/yamldata.py | 15 +++++++++++++-- reclass/tests/data/02/classes/one/alpha.yml | 4 ++++ reclass/tests/data/02/classes/three.yml | 3 +++ reclass/tests/data/02/classes/two/gamma.yml | 2 ++ reclass/tests/test_core.py | 2 +- 5 files changed, 23 insertions(+), 3 deletions(-) create mode 100644 reclass/tests/data/02/classes/two/gamma.yml diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py index 034832df..c549089a 100644 --- a/reclass/storage/yamldata.py +++ b/reclass/storage/yamldata.py @@ -54,12 +54,23 @@ def get_data(self): return self._data def set_absolute_names(self, name, names): - parent = '.'.join(name.split('.')[0:-1]) + structure = name.split('.') + parent = '.'.join(structure[0:-1]) new_names = [] for n in names: if n[0] == '.': - if parent == '': + if len(n) > 1 and n[1] == '.': + grandparent = '.'.join(structure[0:-2]) + if len(n) == 2: + n = grandparent + elif parent == '' or grandparent == '': + n = n[2:] + else: + n = grandparent + n[1:] + elif parent == '': n = n[1:] + elif len(n) == 1: + n = parent else: n = parent + n new_names.append(n) diff --git a/reclass/tests/data/02/classes/one/alpha.yml b/reclass/tests/data/02/classes/one/alpha.yml index a13cc5c8..9eb13f43 100644 --- a/reclass/tests/data/02/classes/one/alpha.yml +++ b/reclass/tests/data/02/classes/one/alpha.yml @@ -1,7 +1,11 @@ classes: - .beta - two.beta +- ..three +- ..two.delta parameters: test1: ${one_beta} test2: ${two_beta} + test3: ${three_alpha} + test4: ${two_gamma} diff --git a/reclass/tests/data/02/classes/three.yml b/reclass/tests/data/02/classes/three.yml index 987fde06..940603c4 100644 --- a/reclass/tests/data/02/classes/three.yml +++ b/reclass/tests/data/02/classes/three.yml @@ -1,2 +1,5 @@ classes: - .one.alpha + +parameters: + three_alpha: 3 diff --git a/reclass/tests/data/02/classes/two/gamma.yml b/reclass/tests/data/02/classes/two/gamma.yml new file mode 100644 index 00000000..a1d71da2 --- /dev/null +++ b/reclass/tests/data/02/classes/two/gamma.yml @@ -0,0 +1,2 @@ +parameters: + two_gamma: 4 diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index ced18e84..94f7ae88 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -68,7 +68,7 @@ def test_relative_class_names(self): def test_top_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('top_relative') - params = { 'test1': 1, 'test2': 2, 'one_beta': 1, 'two_beta': 2, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'three_aplha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } self.assertEqual(node['parameters'], params) def test_compose_node_names(self): From c394ea5744cad60ada891ca91821276c0f539c11 Mon Sep 17 00:00:00 2001 From: Martin Polreich Date: Mon, 8 Oct 2018 10:54:20 +0200 Subject: [PATCH 30/63] Fix tests --- reclass/tests/data/02/classes/four.yml | 2 ++ reclass/tests/data/02/classes/one/alpha.yml | 4 ++-- reclass/tests/data/02/classes/three.yml | 3 --- reclass/tests/test_core.py | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) create mode 100644 reclass/tests/data/02/classes/four.yml diff --git a/reclass/tests/data/02/classes/four.yml b/reclass/tests/data/02/classes/four.yml new file mode 100644 index 00000000..e5aec4c7 --- /dev/null +++ b/reclass/tests/data/02/classes/four.yml @@ -0,0 +1,2 @@ +parameters: + four_alpha: 3 \ No newline at end of file diff --git a/reclass/tests/data/02/classes/one/alpha.yml b/reclass/tests/data/02/classes/one/alpha.yml index 9eb13f43..7b298255 100644 --- a/reclass/tests/data/02/classes/one/alpha.yml +++ b/reclass/tests/data/02/classes/one/alpha.yml @@ -1,11 +1,11 @@ classes: - .beta - two.beta -- ..three +- ..four - ..two.delta parameters: test1: ${one_beta} test2: ${two_beta} - test3: ${three_alpha} + test3: ${four_alpha} test4: ${two_gamma} diff --git a/reclass/tests/data/02/classes/three.yml b/reclass/tests/data/02/classes/three.yml index 940603c4..987fde06 100644 --- a/reclass/tests/data/02/classes/three.yml +++ b/reclass/tests/data/02/classes/three.yml @@ -1,5 +1,2 @@ classes: - .one.alpha - -parameters: - three_alpha: 3 diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index 94f7ae88..2489da90 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -68,7 +68,7 @@ def test_relative_class_names(self): def test_top_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('top_relative') - params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'three_aplha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'four_aplha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } self.assertEqual(node['parameters'], params) def test_compose_node_names(self): From 767b772f002634d2f1633728062f66a9ebbbe564 Mon Sep 17 00:00:00 2001 From: Martin Polreich Date: Mon, 8 Oct 2018 10:57:30 +0200 Subject: [PATCH 31/63] Add test case for '..' --- Pipfile | 1 - README-extensions.rst | 10 ++++++++-- reclass/tests/data/02/classes/four.yml | 2 +- reclass/tests/data/02/classes/one/alpha.yml | 2 +- reclass/tests/test_core.py | 6 ++++-- 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/Pipfile b/Pipfile index fc2022b5..e7b12a97 100644 --- a/Pipfile +++ b/Pipfile @@ -9,7 +9,6 @@ name = "pypi" pyparsing = "*" PyYAML = "*" six = "*" -pyyaml = "*" enum34 = "*" # FIXME, issues with compile phase #"pygit2" = "*" diff --git a/README-extensions.rst b/README-extensions.rst index 66932563..22991c47 100644 --- a/README-extensions.rst +++ b/README-extensions.rst @@ -340,7 +340,7 @@ Load classes with relative names -------------------------------- Load referenced class from a relative location to the current class. -To load class from relative location start the class uri with "." char. +To load class from relative location start the class uri with "." or ".." char. The only supported reference is to nested tree structure below the current class. You are allowed to use syntax for relative uri to required class on any place on your model (first class loaded, init.yml, regular class .yml). @@ -350,7 +350,7 @@ The feature is expected to improve flexibility while sharing classes between you It's a new feature use it with care and mind that using "relative syntax" lower traceability of your pillar composition. -Example usage of relative class name: +Example usage of relative class name using '.' and '..': .. code-block:: yaml @@ -366,6 +366,12 @@ Example usage of relative class name: classes: - .defaults +.. code-block:: yaml + + #/etc/reclass/classes/component/configuration/init.yml + classes: + - ..defaults + Inventory Queries ----------------- diff --git a/reclass/tests/data/02/classes/four.yml b/reclass/tests/data/02/classes/four.yml index e5aec4c7..1f9873cb 100644 --- a/reclass/tests/data/02/classes/four.yml +++ b/reclass/tests/data/02/classes/four.yml @@ -1,2 +1,2 @@ parameters: - four_alpha: 3 \ No newline at end of file + four_alpha: 3 diff --git a/reclass/tests/data/02/classes/one/alpha.yml b/reclass/tests/data/02/classes/one/alpha.yml index 7b298255..7f7d0a00 100644 --- a/reclass/tests/data/02/classes/one/alpha.yml +++ b/reclass/tests/data/02/classes/one/alpha.yml @@ -2,7 +2,7 @@ classes: - .beta - two.beta - ..four -- ..two.delta +- ..two.gamma parameters: test1: ${one_beta} diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index 2489da90..8f28f7a9 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -62,13 +62,15 @@ def test_ignore_class_notfound_with_regexp(self): def test_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('relative') - params = { 'test1': 1, 'test2': 2, 'one_beta': 1, 'two_beta': 2, '_reclass_': { 'environment': 'base', 'name': { 'full': 'relative', 'short': 'relative' } } } + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'relative', 'short': 'relative' } } } + self.maxDiff = None self.assertEqual(node['parameters'], params) def test_top_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('top_relative') - params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'four_aplha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } + self.maxDiff = None self.assertEqual(node['parameters'], params) def test_compose_node_names(self): From 3471d5d5eeaa1583b1068ccc3ede3ca04dbf8d8b Mon Sep 17 00:00:00 2001 From: Martin Polreich Date: Mon, 8 Oct 2018 15:01:07 +0200 Subject: [PATCH 32/63] Edit tests --- Pipfile | 1 + reclass/tests/data/02/classes/init.yml | 2 ++ reclass/tests/data/02/classes/one/alpha.yml | 2 ++ reclass/tests/test_core.py | 6 ++---- 4 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 reclass/tests/data/02/classes/init.yml diff --git a/Pipfile b/Pipfile index e7b12a97..fc2022b5 100644 --- a/Pipfile +++ b/Pipfile @@ -9,6 +9,7 @@ name = "pypi" pyparsing = "*" PyYAML = "*" six = "*" +pyyaml = "*" enum34 = "*" # FIXME, issues with compile phase #"pygit2" = "*" diff --git a/reclass/tests/data/02/classes/init.yml b/reclass/tests/data/02/classes/init.yml new file mode 100644 index 00000000..e40b8992 --- /dev/null +++ b/reclass/tests/data/02/classes/init.yml @@ -0,0 +1,2 @@ +parameters: + alpha_init: 5 \ No newline at end of file diff --git a/reclass/tests/data/02/classes/one/alpha.yml b/reclass/tests/data/02/classes/one/alpha.yml index 7f7d0a00..9454cd04 100644 --- a/reclass/tests/data/02/classes/one/alpha.yml +++ b/reclass/tests/data/02/classes/one/alpha.yml @@ -3,9 +3,11 @@ classes: - two.beta - ..four - ..two.gamma +- ..init parameters: test1: ${one_beta} test2: ${two_beta} test3: ${four_alpha} test4: ${two_gamma} + test5: ${alpha_init} diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index 8f28f7a9..4827177b 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -62,15 +62,13 @@ def test_ignore_class_notfound_with_regexp(self): def test_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('relative') - params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'relative', 'short': 'relative' } } } - self.maxDiff = None + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'test5': 5, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, 'alpha_init': 5, '_reclass_': { 'environment': 'base', 'name': { 'full': 'relative', 'short': 'relative' } } } self.assertEqual(node['parameters'], params) def test_top_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('top_relative') - params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } - self.maxDiff = None + params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'test5': 5, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, 'alpha_init': 5, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } self.assertEqual(node['parameters'], params) def test_compose_node_names(self): From 5a2f9dabc1cfe54e641329c0e2f95e1ba519f703 Mon Sep 17 00:00:00 2001 From: Martin Polreich Date: Thu, 11 Oct 2018 14:25:56 +0200 Subject: [PATCH 33/63] Split ifs to separated functions --- README-extensions.rst | 2 ++ reclass/storage/yamldata.py | 38 ++++++++++++++++++++++--------------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/README-extensions.rst b/README-extensions.rst index 22991c47..d321fa1f 100644 --- a/README-extensions.rst +++ b/README-extensions.rst @@ -347,6 +347,8 @@ You are allowed to use syntax for relative uri to required class on any place on The feature is expected to improve flexibility while sharing classes between your models. +Please mpte that you can't use '..' without any calss following. If you want simply up in the sctructure, type in '..init'. + It's a new feature use it with care and mind that using "relative syntax" lower traceability of your pillar composition. diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py index c549089a..f1533032 100644 --- a/reclass/storage/yamldata.py +++ b/reclass/storage/yamldata.py @@ -58,24 +58,32 @@ def set_absolute_names(self, name, names): parent = '.'.join(structure[0:-1]) new_names = [] for n in names: - if n[0] == '.': - if len(n) > 1 and n[1] == '.': - grandparent = '.'.join(structure[0:-2]) - if len(n) == 2: - n = grandparent - elif parent == '' or grandparent == '': - n = n[2:] - else: - n = grandparent + n[1:] - elif parent == '': - n = n[1:] - elif len(n) == 1: - n = parent - else: - n = parent + n + if n[0] == '.' and len(n) > 1 and n[1] == '.': + grandparent = '.'.join(structure[0:-2]) + n = self.get_grandparent_directory(n, parent, grandparent) + else: + n = self.get_parent_directory(n, parent) new_names.append(n) return new_names + def get_parent_directory(self, name, parent): + if parent == '': + name = name[1:] + elif len(name) == 1: + name = parent + else: + name = parent + name + return name + + def get_grandparent_directory(self, name, parent, grandparent): + if len(name) == 2: + name = grandparent + elif parent == '' or grandparent == '': + name = name[2:] + else: + name = grandparent + name[1:] + return name + def get_entity(self, name, settings): #if name is None: # name = self._uri From 7794b68897faf16ccdef0167f0a38f3ec62b1b54 Mon Sep 17 00:00:00 2001 From: Martin Polreich Date: Thu, 11 Oct 2018 15:44:58 +0200 Subject: [PATCH 34/63] Simplify the absolute name parser --- reclass/storage/yamldata.py | 44 +++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py index f1533032..a38b589a 100644 --- a/reclass/storage/yamldata.py +++ b/reclass/storage/yamldata.py @@ -54,35 +54,31 @@ def get_data(self): return self._data def set_absolute_names(self, name, names): - structure = name.split('.') - parent = '.'.join(structure[0:-1]) new_names = [] for n in names: - if n[0] == '.' and len(n) > 1 and n[1] == '.': - grandparent = '.'.join(structure[0:-2]) - n = self.get_grandparent_directory(n, parent, grandparent) - else: - n = self.get_parent_directory(n, parent) + if n[0] == '.': + dots = self.count_dots(n) + levels_up = (dots * (-1)) + parent = '.'.join(name.split('.')[0:levels_up]) + if parent == '': + n = n[dots:] + else: + n = parent + n[dots - 1:] new_names.append(n) return new_names - def get_parent_directory(self, name, parent): - if parent == '': - name = name[1:] - elif len(name) == 1: - name = parent - else: - name = parent + name - return name - - def get_grandparent_directory(self, name, parent, grandparent): - if len(name) == 2: - name = grandparent - elif parent == '' or grandparent == '': - name = name[2:] - else: - name = grandparent + name[1:] - return name + def yield_dots(self, value): + try: + idx = value.index('.') + except ValueError: + return + if idx == 0: + yield '.' + for dot in self.yield_dots(value[1:]): + yield dot + + def count_dots(self, value): + return len(list(self.yield_dots(value))) def get_entity(self, name, settings): #if name is None: From d159be10f9314b932db2f1d39f8e34f3f5b84784 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Thu, 11 Oct 2018 16:09:38 +0200 Subject: [PATCH 35/63] Fix error reporting for inventory queries Inventory query errors were raising exceptions due to a missed change of a value.contents() method to a class property --- reclass/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reclass/core.py b/reclass/core.py index 75eea548..6dac5c38 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -189,7 +189,7 @@ def _get_inventory(self, all_envs, environment, queries): node.interpolate_single_export(q) except InterpolationError as e: e.nodename = nodename - raise InvQueryError(q.contents(), e, context=p, uri=q.uri) + raise InvQueryError(q.contents, e, context=p, uri=q.uri) inventory[nodename] = node.exports.as_dict() return inventory From 3af1e0deed69f2d564c37e3e21c692fd379a7eb5 Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Fri, 19 Oct 2018 18:09:21 +0400 Subject: [PATCH 36/63] More refactoring Added unit tests, removed some redundant code, removed parser from settings -- it was hardcoded, so no real reason to keep it there, amended logic for parser application: previosly only default sentinels were used in parser selection optimization, now a sentinel is picked from settings. --- reclass/datatypes/parameters.py | 19 +++++---- reclass/settings.py | 3 -- reclass/values/invitem.py | 5 +-- reclass/values/item.py | 10 +++-- reclass/values/parser.py | 40 ++++++++++--------- reclass/values/parser_funcs.py | 4 +- reclass/values/refitem.py | 17 ++++---- reclass/values/tests/test_compitem.py | 22 ++++------- reclass/values/tests/test_item.py | 48 ++++++++++++++++++++++ reclass/values/tests/test_listitem.py | 31 +++++++++++++++ reclass/values/tests/test_refitem.py | 57 +++++++++++++++++++++++++++ reclass/values/tests/test_scaitem.py | 38 ++++++++++++++++++ reclass/values/value.py | 10 ++--- reclass/values/valuelist.py | 22 ++++------- 14 files changed, 241 insertions(+), 85 deletions(-) create mode 100644 reclass/values/tests/test_item.py create mode 100644 reclass/values/tests/test_listitem.py create mode 100644 reclass/values/tests/test_refitem.py create mode 100644 reclass/values/tests/test_scaitem.py diff --git a/reclass/datatypes/parameters.py b/reclass/datatypes/parameters.py index ee404ce1..bab2a281 100644 --- a/reclass/datatypes/parameters.py +++ b/reclass/datatypes/parameters.py @@ -105,24 +105,23 @@ def _wrap_value(self, value): e.context = DictPath(self._settings.delimiter) raise + def _get_wrapped(self, position, value): + try: + return self._wrap_value(value) + except InterpolationError as e: + e.context.add_ancestor(str(position)) + raise + def _wrap_list(self, source): l = ParameterList(uri=self._uri) for (k, v) in enumerate(source): - try: - l.append(self._wrap_value(v)) - except InterpolationError as e: - e.context.add_ancestor(str(k)) - raise + l.append(self._get_wrapped(k, v)) return l def _wrap_dict(self, source): d = ParameterDict(uri=self._uri) for (k, v) in iteritems(source): - try: - d[k] = self._wrap_value(v) - except InterpolationError as e: - e.context.add_ancestor(str(k)) - raise + d[k] = self._get_wrapped(k, v) return d def _update_value(self, cur, new): diff --git a/reclass/settings.py b/reclass/settings.py index 3e223cc1..57602393 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -6,7 +6,6 @@ from __future__ import unicode_literals import copy -import reclass.values.parser_funcs from reclass.defaults import * from six import string_types @@ -43,8 +42,6 @@ def __init__(self, options={}): self.group_errors = options.get('group_errors', OPT_GROUP_ERRORS) - self.ref_parser = reclass.values.parser_funcs.get_ref_parser(self.escape_character, self.reference_sentinels, self.export_sentinels) - self.simple_ref_parser = reclass.values.parser_funcs.get_simple_ref_parser(self.escape_character, self.reference_sentinels, self.export_sentinels) def __eq__(self, other): return isinstance(other, type(self)) \ diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py index 54616124..adb1cb6c 100644 --- a/reclass/values/invitem.py +++ b/reclass/values/invitem.py @@ -135,6 +135,7 @@ class InvItem(item.Item): def __init__(self, newitem, settings): super(InvItem, self).__init__(newitem.render(None, None), settings) self.needs_all_envs = False + self.has_inv_query = True self.ignore_failed_render = ( self._settings.inventory_ignore_failed_render) self._parse_expression(self.contents) @@ -178,10 +179,6 @@ def _parse_expression(self, expr): msg = 'Unknown expression type: %s' raise ExpressionError(msg % self._expr_type, tbFlag=False) - @property - def has_inv_query(self): - return True - @property def has_references(self): return len(self._question.refs) > 0 diff --git a/reclass/values/item.py b/reclass/values/item.py index ee469958..45aeb77b 100644 --- a/reclass/values/item.py +++ b/reclass/values/item.py @@ -22,6 +22,7 @@ class Item(object): def __init__(self, item, settings): self._settings = settings self.contents = item + self.has_inv_query = False def allRefs(self): return True @@ -30,10 +31,6 @@ def allRefs(self): def has_references(self): return False - @property - def has_inv_query(self): - return False - def is_container(self): return False @@ -60,6 +57,10 @@ class ItemWithReferences(Item): def __init__(self, items, settings): super(ItemWithReferences, self).__init__(items, settings) + try: + iter(self.contents) + except TypeError: + self.contents = [self.contents] self.assembleRefs() @property @@ -81,6 +82,7 @@ def assembleRefs(self, context={}): if item.allRefs is False: self.allRefs = False + class ContainerItem(Item): def is_container(self): diff --git a/reclass/values/parser.py b/reclass/values/parser.py index 914e8257..4d4e12ca 100644 --- a/reclass/values/parser.py +++ b/reclass/values/parser.py @@ -17,37 +17,41 @@ from reclass.errors import ParseError from reclass.values.parser_funcs import STR, REF, INV +import reclass.values.parser_funcs as parsers class Parser(object): def parse(self, value, settings): + def full_parse(): + try: + return ref_parser.parseString(value).asList() + except pp.ParseException as e: + raise ParseError(e.msg, e.line, e.col, e.lineno) + self._settings = settings - dollars = value.count('$') - if dollars == 0: - # speed up: only use pyparsing if there is a $ in the string + parser_settings = (settings.escape_character, + settings.reference_sentinels, + settings.export_sentinels) + ref_parser = parsers.get_ref_parser(*parser_settings) + simple_ref_parser = parsers.get_simple_ref_parser(*parser_settings) + + sentinel_count = (value.count(settings.reference_sentinels[0]) + + value.count(settings.export_sentinels[0])) + if sentinel_count == 0: + # speed up: only use pyparsing if there are sentinels in the value return ScaItem(value, self._settings) - elif dollars == 1: - # speed up: try a simple reference + elif sentinel_count == 1: # speed up: try a simple reference try: - tokens = self._settings.simple_ref_parser.leaveWhitespace().parseString(value).asList() + tokens = simple_ref_parser.parseString(value).asList() except pp.ParseException: - # fall back on the full parser - try: - tokens = self._settings.ref_parser.leaveWhitespace().parseString(value).asList() - except pp.ParseException as e: - raise ParseError(e.msg, e.line, e.col, e.lineno) + tokens = full_parse() # fall back on the full parser else: - # use the full parser - try: - tokens = self._settings.ref_parser.leaveWhitespace().parseString(value).asList() - except pp.ParseException as e: - raise ParseError(e.msg, e.line, e.col, e.lineno) + tokens = full_parse() # use the full parser items = self._create_items(tokens) if len(items) == 1: return items[0] - else: - return CompItem(items, self._settings) + return CompItem(items, self._settings) _create_dict = { STR: (lambda s, v: ScaItem(v, s._settings)), REF: (lambda s, v: s._create_ref(v)), diff --git a/reclass/values/parser_funcs.py b/reclass/values/parser_funcs.py index 50babd0e..3c24b40d 100644 --- a/reclass/values/parser_funcs.py +++ b/reclass/values/parser_funcs.py @@ -143,7 +143,7 @@ def get_ref_parser(escape_character, reference_sentinels, export_sentinels): item = reference | export | string line = pp.OneOrMore(item) + pp.StringEnd() - return line + return line.leaveWhitespace() def get_simple_ref_parser(escape_character, reference_sentinels, export_sentinels): _ESCAPE = escape_character @@ -158,4 +158,4 @@ def get_simple_ref_parser(escape_character, reference_sentinels, export_sentinel ref_close = pp.Literal(_REF_CLOSE).suppress() reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_tag_with(REF)) line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + pp.StringEnd() - return line + return line.leaveWhitespace() diff --git a/reclass/values/refitem.py b/reclass/values/refitem.py index df713e1a..64bf4503 100644 --- a/reclass/values/refitem.py +++ b/reclass/values/refitem.py @@ -16,12 +16,14 @@ class RefItem(item.ItemWithReferences): def assembleRefs(self, context={}): super(RefItem, self).assembleRefs(context) try: - strings = [str(i.render(context, None)) for i in self.contents] - value = "".join(strings) - self._refs.append(value) + self._refs.append(self._flatten_contents(context)) except ResolveError as e: self.allRefs = False + def _flatten_contents(self, context, inventory=None): + result = [str(i.render(context, inventory)) for i in self.contents] + return "".join(result) + def _resolve(self, ref, context): path = DictPath(self._settings.delimiter, ref) try: @@ -30,11 +32,10 @@ def _resolve(self, ref, context): raise ResolveError(ref) def render(self, context, inventory): - if len(self.contents) == 1: - return self._resolve(self.contents[0].render(context, inventory), - context) - strings = [str(i.render(context, inventory)) for i in self.contents] - return self._resolve("".join(strings), context) + #strings = [str(i.render(context, inventory)) for i in self.contents] + #return self._resolve("".join(strings), context) + return self._resolve(self._flatten_contents(context, inventory), + context) def __str__(self): strings = [str(i) for i in self.contents] diff --git a/reclass/values/tests/test_compitem.py b/reclass/values/tests/test_compitem.py index 71a6f0e9..c3ee6909 100644 --- a/reclass/values/tests/test_compitem.py +++ b/reclass/values/tests/test_compitem.py @@ -71,6 +71,14 @@ def test_assembleRefs_two_items_with_two_refs(self): self.assertTrue(composite.has_references) self.assertEquals(composite.get_references(), expected_refs) + def test_string_representation(self): + composite = CompItem(Value(1, SETTINGS, ''), SETTINGS) + expected = '1' + + result = str(composite) + + self.assertEquals(result, expected) + def test_render_single_item(self): val1 = Value('${foo}', SETTINGS, '') @@ -106,20 +114,6 @@ def test_merge_over_merge_composite(self): self.assertEquals(result, composite2) - def test_merge_over_merge_list_not_allowed(self): - val1 = Value(None, SETTINGS, '') - listitem = ListItem([1], SETTINGS) - composite = CompItem([val1], SETTINGS) - - self.assertRaises(RuntimeError, composite.merge_over, listitem) - - def test_merge_dict_dict_not_allowed(self): - val1 = Value(None, SETTINGS, '') - dictitem = DictItem({'foo': 'bar'}, SETTINGS) - composite = CompItem([val1], SETTINGS) - - self.assertRaises(RuntimeError, composite.merge_over, dictitem) - def test_merge_other_types_not_allowed(self): other = type('Other', (object,), {'type': 34}) val1 = Value(None, SETTINGS, '') diff --git a/reclass/values/tests/test_item.py b/reclass/values/tests/test_item.py new file mode 100644 index 00000000..4b91f6e6 --- /dev/null +++ b/reclass/values/tests/test_item.py @@ -0,0 +1,48 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +from reclass.values.item import ContainerItem +from reclass.values.item import ItemWithReferences +import unittest +from mock import MagicMock + +SETTINGS = Settings() + + +class TestItemWithReferences(unittest.TestCase): + + def test_assembleRef_allrefs(self): + phonyitem = MagicMock() + phonyitem.has_references = True + phonyitem.get_references = lambda *x: [1] + + iwr = ItemWithReferences([phonyitem], {}) + + self.assertEquals(iwr.get_references(), [1]) + self.assertTrue(iwr.allRefs) + + def test_assembleRef_partial(self): + phonyitem = MagicMock() + phonyitem.has_references = True + phonyitem.allRefs = False + phonyitem.get_references = lambda *x: [1] + + iwr = ItemWithReferences([phonyitem], {}) + + self.assertEquals(iwr.get_references(), [1]) + self.assertFalse(iwr.allRefs) + + +class TestContainerItem(unittest.TestCase): + + def test_render(self): + container = ContainerItem('foo', SETTINGS) + + self.assertEquals(container.render(None, None), 'foo') + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_listitem.py b/reclass/values/tests/test_listitem.py new file mode 100644 index 00000000..618b7797 --- /dev/null +++ b/reclass/values/tests/test_listitem.py @@ -0,0 +1,31 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +import unittest + +SETTINGS = Settings() + +class TestListItem(unittest.TestCase): + + def test_merge_over_merge_list(self): + listitem1 = ListItem([1], SETTINGS) + listitem2 = ListItem([2], SETTINGS) + expected = ListItem([1, 2], SETTINGS) + + result = listitem2.merge_over(listitem1) + + self.assertEquals(result.contents, expected.contents) + + def test_merge_other_types_not_allowed(self): + other = type('Other', (object,), {'type': 34}) + val1 = Value(None, SETTINGS, '') + listitem = ListItem(val1, SETTINGS) + + self.assertRaises(RuntimeError, listitem.merge_over, other) + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_refitem.py b/reclass/values/tests/test_refitem.py new file mode 100644 index 00000000..65814782 --- /dev/null +++ b/reclass/values/tests/test_refitem.py @@ -0,0 +1,57 @@ +from reclass import errors + +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +from reclass.values.refitem import RefItem +import unittest +from mock import MagicMock + +SETTINGS = Settings() + +class TestRefItem(unittest.TestCase): + + def test_assembleRefs_ok(self): + phonyitem = MagicMock() + phonyitem.render = lambda x, k: 'bar' + phonyitem.has_references = True + phonyitem.get_references = lambda *x: ['foo'] + + iwr = RefItem([phonyitem], {}) + + self.assertEquals(iwr.get_references(), ['foo', 'bar']) + self.assertTrue(iwr.allRefs) + + def test_assembleRefs_failedrefs(self): + phonyitem = MagicMock() + phonyitem.render.side_effect = errors.ResolveError('foo') + phonyitem.has_references = True + phonyitem.get_references = lambda *x: ['foo'] + + iwr = RefItem([phonyitem], {}) + + self.assertEquals(iwr.get_references(), ['foo']) + self.assertFalse(iwr.allRefs) + + def test__resolve_ok(self): + reference = RefItem('', Settings({'delimiter': ':'})) + + result = reference._resolve('foo:bar', {'foo':{'bar': 1}}) + + self.assertEquals(result, 1) + + def test__resolve_fails(self): + refitem = RefItem('', Settings({'delimiter': ':'})) + context = {'foo':{'bar': 1}} + reference = 'foo:baz' + + self.assertRaises(errors.ResolveError, refitem._resolve, reference, + context) + + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/tests/test_scaitem.py b/reclass/values/tests/test_scaitem.py new file mode 100644 index 00000000..b6d038de --- /dev/null +++ b/reclass/values/tests/test_scaitem.py @@ -0,0 +1,38 @@ +from reclass.settings import Settings +from reclass.values.value import Value +from reclass.values.compitem import CompItem +from reclass.values.scaitem import ScaItem +from reclass.values.valuelist import ValueList +from reclass.values.listitem import ListItem +from reclass.values.dictitem import DictItem +import unittest + +SETTINGS = Settings() + +class TestScaItem(unittest.TestCase): + + def test_merge_over_merge_scalar(self): + scalar1 = ScaItem([1], SETTINGS) + scalar2 = ScaItem([2], SETTINGS) + + result = scalar2.merge_over(scalar1) + + self.assertEquals(result.contents, scalar2.contents) + + def test_merge_over_merge_composite(self): + scalar1 = CompItem(Value(1, SETTINGS, ''), SETTINGS) + scalar2 = ScaItem([2], SETTINGS) + + result = scalar2.merge_over(scalar1) + + self.assertEquals(result.contents, scalar2.contents) + + def test_merge_other_types_not_allowed(self): + other = type('Other', (object,), {'type': 34}) + val1 = Value(None, SETTINGS, '') + scalar = ScaItem(val1, SETTINGS) + + self.assertRaises(RuntimeError, scalar.merge_over, other) + +if __name__ == '__main__': + unittest.main() diff --git a/reclass/values/value.py b/reclass/values/value.py index affd9441..736e01ab 100644 --- a/reclass/values/value.py +++ b/reclass/values/value.py @@ -22,7 +22,7 @@ class Value(object): def __init__(self, value, settings, uri, parse_string=True): self._settings = settings - self._uri = uri + self.uri = uri self.overwrite = False self._constant = False if isinstance(value, string_types): @@ -30,7 +30,7 @@ def __init__(self, value, settings, uri, parse_string=True): try: self._item = self._parser.parse(value, self._settings) except InterpolationError as e: - e.uri = self._uri + e.uri = self.uri raise else: self._item = ScaItem(value, self._settings) @@ -41,10 +41,6 @@ def __init__(self, value, settings, uri, parse_string=True): else: self._item = ScaItem(value, self._settings) - @property - def uri(self): - return self._uri - @property def constant(self): return self._constant @@ -102,7 +98,7 @@ def render(self, context, inventory): try: return self._item.render(context, inventory) except InterpolationError as e: - e.uri = self._uri + e.uri = self.uri raise @property diff --git a/reclass/values/valuelist.py b/reclass/values/valuelist.py index 86563fae..e8c3a0ce 100644 --- a/reclass/values/valuelist.py +++ b/reclass/values/valuelist.py @@ -22,9 +22,9 @@ def __init__(self, value, settings): self.allRefs = True self._values = [value] self._inv_refs = [] - self._has_inv_query = False + self.has_inv_query = False self.ignore_failed_render = False - self._is_complex = False + self.is_complex = False self._update() @property @@ -42,40 +42,32 @@ def extend(self, values): def _update(self): self.assembleRefs() self._check_for_inv_query() - self._is_complex = False + self.is_complex = False item_type = self._values[0].item_type() for v in self._values: if v.is_complex or v.constant or v.overwrite or v.item_type() != item_type: - self._is_complex = True + self.is_complex = True @property def has_references(self): return len(self._refs) > 0 - @property - def has_inv_query(self): - return self._has_inv_query - def get_inv_references(self): return self._inv_refs - @property - def is_complex(self): - return self._is_complex - def get_references(self): return self._refs def _check_for_inv_query(self): - self._has_inv_query = False + self.has_inv_query = False self.ignore_failed_render = True for value in self._values: if value.has_inv_query: self._inv_refs.extend(value.get_inv_references()) - self._has_inv_query = True + self.has_inv_query = True if value.ignore_failed_render() is False: self.ignore_failed_render = False - if self._has_inv_query is False: + if self.has_inv_query is False: self.ignore_failed_render = False def assembleRefs(self, context={}): From c193191c8e097d7e9793b669a864455a9de38f42 Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Fri, 19 Oct 2018 18:14:32 +0400 Subject: [PATCH 37/63] Settings cleanup Settings code deduplicated. --- reclass/core.py | 3 +- reclass/settings.py | 97 +++++++++++++++++++++------------------------ reclass/version.py | 6 ++- 3 files changed, 52 insertions(+), 54 deletions(-) diff --git a/reclass/core.py b/reclass/core.py index 6dac5c38..d07b232b 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -39,7 +39,8 @@ def __init__(self, storage, class_mappings, settings, input_data=None): self._settings = settings self._input_data = input_data if self._settings.ignore_class_notfound: - self._cnf_r = re.compile('|'.join([x for x in self._settings.ignore_class_notfound_regexp])) + self._cnf_r = re.compile( + '|'.join(self._settings.ignore_class_notfound_regexp)) @staticmethod def _get_timestamp(): diff --git a/reclass/settings.py b/reclass/settings.py index 57602393..b7f52526 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -5,66 +5,61 @@ from __future__ import print_function from __future__ import unicode_literals -import copy -from reclass.defaults import * +import reclass.defaults as defaults + +from six import string_types, iteritems -from six import string_types class Settings(object): + known_opts = { + 'allow_scalar_over_dict': defaults.OPT_ALLOW_SCALAR_OVER_DICT, + 'allow_scalar_over_list': defaults.OPT_ALLOW_SCALAR_OVER_LIST, + 'allow_list_over_scalar': defaults.OPT_ALLOW_LIST_OVER_SCALAR, + 'allow_dict_over_scalar': defaults.OPT_ALLOW_DICT_OVER_SCALAR, + 'allow_none_override': defaults.OPT_ALLOW_NONE_OVERRIDE, + 'automatic_parameters': defaults.AUTOMATIC_RECLASS_PARAMETERS, + 'default_environment': defaults.DEFAULT_ENVIRONMENT, + 'delimiter': defaults.PARAMETER_INTERPOLATION_DELIMITER, + 'dict_key_override_prefix': + defaults.PARAMETER_DICT_KEY_OVERRIDE_PREFIX, + 'dict_key_constant_prefix': + defaults.PARAMETER_DICT_KEY_CONSTANT_PREFIX, + 'escape_character': defaults.ESCAPE_CHARACTER, + 'export_sentinels': defaults.EXPORT_SENTINELS, + 'inventory_ignore_failed_node': + defaults.OPT_INVENTORY_IGNORE_FAILED_NODE, + 'inventory_ignore_failed_render': + defaults.OPT_INVENTORY_IGNORE_FAILED_RENDER, + 'reference_sentinels': defaults.REFERENCE_SENTINELS, + 'ignore_class_notfound': defaults.OPT_IGNORE_CLASS_NOTFOUND, + 'strict_constant_parameters': + defaults.OPT_STRICT_CONSTANT_PARAMETERS, + 'ignore_class_notfound_regexp': + defaults.OPT_IGNORE_CLASS_NOTFOUND_REGEXP, + 'ignore_class_notfound_warning': + defaults.OPT_IGNORE_CLASS_NOTFOUND_WARNING, + 'ignore_overwritten_missing_referencesdefaults.': + defaults.OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES, + 'group_errors': defaults.OPT_GROUP_ERRORS, + 'compose_node_name': defaults.OPT_COMPOSE_NODE_NAME, + } + def __init__(self, options={}): - self.allow_scalar_over_dict = options.get('allow_scalar_over_dict', OPT_ALLOW_SCALAR_OVER_DICT) - self.allow_scalar_over_list = options.get('allow_scalar_over_list', OPT_ALLOW_SCALAR_OVER_LIST) - self.allow_list_over_scalar = options.get('allow_list_over_scalar', OPT_ALLOW_LIST_OVER_SCALAR) - self.allow_dict_over_scalar = options.get('allow_dict_over_scalar', OPT_ALLOW_DICT_OVER_SCALAR) - self.allow_none_override = options.get('allow_none_override', OPT_ALLOW_NONE_OVERRIDE) - self.automatic_parameters = options.get('automatic_parameters', AUTOMATIC_RECLASS_PARAMETERS) - self.default_environment = options.get('default_environment', DEFAULT_ENVIRONMENT) - self.delimiter = options.get('delimiter', PARAMETER_INTERPOLATION_DELIMITER) - self.dict_key_override_prefix = options.get('dict_key_override_prefix', PARAMETER_DICT_KEY_OVERRIDE_PREFIX) - self.dict_key_constant_prefix = options.get('dict_key_constant_prefix', PARAMETER_DICT_KEY_CONSTANT_PREFIX) - self.dict_key_prefixes = [ str(self.dict_key_override_prefix), str(self.dict_key_constant_prefix) ] - self.escape_character = options.get('escape_character', ESCAPE_CHARACTER) - self.export_sentinels = options.get('export_sentinels', EXPORT_SENTINELS) - self.inventory_ignore_failed_node = options.get('inventory_ignore_failed_node', OPT_INVENTORY_IGNORE_FAILED_NODE) - self.inventory_ignore_failed_render = options.get('inventory_ignore_failed_render', OPT_INVENTORY_IGNORE_FAILED_RENDER) - self.reference_sentinels = options.get('reference_sentinels', REFERENCE_SENTINELS) - self.ignore_class_notfound = options.get('ignore_class_notfound', OPT_IGNORE_CLASS_NOTFOUND) - self.strict_constant_parameters = options.get('strict_constant_parameters', OPT_STRICT_CONSTANT_PARAMETERS) - self.compose_node_name = options.get('compose_node_name', OPT_COMPOSE_NODE_NAME) + for opt_name, opt_value in iteritems(self.known_opts): + setattr(self, opt_name, options.get(opt_name, opt_value)) - self.ignore_class_notfound_regexp = options.get('ignore_class_notfound_regexp', OPT_IGNORE_CLASS_NOTFOUND_REGEXP) + self.dict_key_prefixes = [str(self.dict_key_override_prefix), + str(self.dict_key_constant_prefix)] if isinstance(self.ignore_class_notfound_regexp, string_types): - self.ignore_class_notfound_regexp = [ self.ignore_class_notfound_regexp ] - - self.ignore_class_notfound_warning = options.get('ignore_class_notfound_warning', OPT_IGNORE_CLASS_NOTFOUND_WARNING) - self.ignore_overwritten_missing_references = options.get('ignore_overwritten_missing_references', OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES) - - self.group_errors = options.get('group_errors', OPT_GROUP_ERRORS) - + self.ignore_class_notfound_regexp = [ + self.ignore_class_notfound_regexp] def __eq__(self, other): - return isinstance(other, type(self)) \ - and self.allow_scalar_over_dict == other.allow_scalar_over_dict \ - and self.allow_scalar_over_list == other.allow_scalar_over_list \ - and self.allow_list_over_scalar == other.allow_list_over_scalar \ - and self.allow_dict_over_scalar == other.allow_dict_over_scalar \ - and self.allow_none_override == other.allow_none_override \ - and self.automatic_parameters == other.automatic_parameters \ - and self.default_environment == other.default_environment \ - and self.delimiter == other.delimiter \ - and self.dict_key_override_prefix == other.dict_key_override_prefix \ - and self.dict_key_constant_prefix == other.dict_key_constant_prefix \ - and self.escape_character == other.escape_character \ - and self.export_sentinels == other.export_sentinels \ - and self.inventory_ignore_failed_node == other.inventory_ignore_failed_node \ - and self.inventory_ignore_failed_render == other.inventory_ignore_failed_render \ - and self.reference_sentinels == other.reference_sentinels \ - and self.ignore_class_notfound == other.ignore_class_notfound \ - and self.ignore_class_notfound_regexp == other.ignore_class_notfound_regexp \ - and self.ignore_class_notfound_warning == other.ignore_class_notfound_warning \ - and self.strict_constant_parameters == other.strict_constant_parameters \ - and self.compose_node_name == other.compose_node_name + if isinstance(other, type(self)): + return all(getattr(self, opt) == getattr(other, opt) + for opt in self.known_opts) + return False def __copy__(self): cls = self.__class__ diff --git a/reclass/version.py b/reclass/version.py index 6d7d7eb3..63fda269 100644 --- a/reclass/version.py +++ b/reclass/version.py @@ -12,12 +12,14 @@ from __future__ import unicode_literals RECLASS_NAME = 'reclass' -DESCRIPTION = 'merge data by recursive descent down an ancestry hierarchy (forked extended version)' +DESCRIPTION = ('merge data by recursive descent down an ancestry hierarchy ' + '(forked extended version)') VERSION = '1.5.6' AUTHOR = 'martin f. krafft / Andrew Pickford / salt-formulas community' AUTHOR_EMAIL = 'salt-formulas@freelists.org' MAINTAINER = 'salt-formulas community' MAINTAINER_EMAIL = 'salt-formulas@freelists.org' -COPYRIGHT = 'Copyright © 2007–14 martin f. krafft, extensions © 2017 Andrew Pickford, extensions © salt-formulas community' +COPYRIGHT = ('Copyright © 2007–14 martin f. krafft, extensions © 2017 Andrew' + ' Pickford, extensions © salt-formulas community') LICENCE = 'Artistic Licence 2.0' URL = 'https://github.com/salt-formulas/reclass' From 2294aef0dd2a3692f287d5f4ab7f3cd4a2a41aaa Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Fri, 19 Oct 2018 18:16:18 +0400 Subject: [PATCH 38/63] Minor changes Mostly minor changes to make code more compliant with proper coding style, also some repetitions are removed. --- reclass/__init__.py | 2 +- reclass/core.py | 21 ++++++++++++++------- reclass/datatypes/applications.py | 12 ++++-------- reclass/datatypes/classes.py | 2 +- reclass/datatypes/entity.py | 21 ++++++++++----------- reclass/datatypes/exports.py | 3 --- reclass/storage/loader.py | 3 ++- reclass/values/value.py | 13 ++----------- 8 files changed, 34 insertions(+), 43 deletions(-) diff --git a/reclass/__init__.py b/reclass/__init__.py index 2167a303..d5f34103 100644 --- a/reclass/__init__.py +++ b/reclass/__init__.py @@ -19,7 +19,7 @@ def get_storage(storage_type, nodes_uri, classes_uri, compose_node_name, **kwarg storage_class = StorageBackendLoader(storage_type).load() return MemcacheProxy(storage_class(nodes_uri, classes_uri, compose_node_name, **kwargs)) -def get_path_mangler(storage_type,**kwargs): +def get_path_mangler(storage_type, **kwargs): return StorageBackendLoader(storage_type).path_mangler() def output(data, fmt, pretty_print=False, no_refs=False): diff --git a/reclass/core.py b/reclass/core.py index d07b232b..e3142af7 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -23,7 +23,6 @@ from six import iteritems from reclass.settings import Settings -from reclass.output.yaml_outputter import ExplicitDumper from reclass.datatypes import Entity, Classes, Parameters, Exports from reclass.errors import MappingFormatError, ClassNameResolveError, ClassNotFound, InvQueryClassNameResolveError, InvQueryClassNotFound, InvQueryError, InterpolationError, ResolveError from reclass.values.parser import Parser @@ -153,8 +152,16 @@ def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, node def _get_automatic_parameters(self, nodename, environment): if self._settings.automatic_parameters: - return Parameters({ '_reclass_': { 'name': { 'full': nodename, 'short': nodename.split('.')[0] }, - 'environment': environment } }, self._settings, '__auto__') + pars = { + '_reclass_': { + 'name': { + 'full': nodename, + 'short': nodename.split('.')[0] + }, + 'environment': environment + } + } + return Parameters(pars, self._settings, '__auto__') else: return Parameters({}, self._settings, '') @@ -163,13 +170,12 @@ def _get_inventory(self, all_envs, environment, queries): for nodename in self._storage.enumerate_nodes(): try: node_base = self._storage.get_node(nodename, self._settings) - if node_base.environment == None: + if node_base.environment is None: node_base.environment = self._settings.default_environment except yaml.scanner.ScannerError as e: if self._settings.inventory_ignore_failed_node: continue - else: - raise + raise if all_envs or node_base.environment == environment: try: @@ -221,7 +227,8 @@ def _nodeinfo(self, nodename, inventory): raise def _nodeinfo_as_dict(self, nodename, entity): - ret = {'__reclass__' : {'node': entity.name, 'name': nodename, + ret = {'__reclass__' : {'node': entity.name, + 'name': nodename, 'uri': entity.uri, 'environment': entity.environment, 'timestamp': Core._get_timestamp() diff --git a/reclass/datatypes/applications.py b/reclass/datatypes/applications.py index 90ae54c5..4f6ee10b 100644 --- a/reclass/datatypes/applications.py +++ b/reclass/datatypes/applications.py @@ -28,18 +28,14 @@ class Applications(Classes): def __init__(self, iterable=None, negation_prefix=DEFAULT_NEGATION_PREFIX): - self._negation_prefix = negation_prefix + self.negation_prefix = negation_prefix self._offset = len(negation_prefix) self._negations = [] super(Applications, self).__init__(iterable) - @property - def negation_prefix(self): - return self._negation_prefix - def append_if_new(self, item): self._assert_is_string(item) - if item.startswith(self._negation_prefix): + if item.startswith(self.negation_prefix): item = item[self._offset:] self._negations.append(item) try: @@ -64,6 +60,6 @@ def merge_unique(self, iterable): def __repr__(self): contents = self._items + \ - ['%s%s' % (self._negation_prefix, i) for i in self._negations] + ['%s%s' % (self.negation_prefix, i) for i in self._negations] return "%s(%r, %r)" % (self.__class__.__name__, contents, - str(self._negation_prefix)) + str(self.negation_prefix)) diff --git a/reclass/datatypes/classes.py b/reclass/datatypes/classes.py index 5270e280..fa9cbcfb 100644 --- a/reclass/datatypes/classes.py +++ b/reclass/datatypes/classes.py @@ -57,7 +57,7 @@ def merge_unique(self, iterable): def _assert_is_string(self, item): if not isinstance(item, six.string_types): - raise TypeError('%s instances can only contain strings, '\ + raise TypeError('%s instances can only contain strings, ' 'not %s' % (self.__class__.__name__, type(item))) def _assert_valid_characters(self, item): diff --git a/reclass/datatypes/entity.py b/reclass/datatypes/entity.py index 8133de53..2e0e1e43 100644 --- a/reclass/datatypes/entity.py +++ b/reclass/datatypes/entity.py @@ -35,7 +35,6 @@ def __init__(self, settings, classes=None, applications=None, self._environment = environment name = property(lambda s: s._name) - short_name = property(lambda s: s._short_name) uri = property(lambda s: s._uri) classes = property(lambda s: s._classes) applications = property(lambda s: s._applications) @@ -61,10 +60,10 @@ def _set_field(self, received_value, expected_type, parameters=None): return received_value def merge(self, other): - self._classes.merge_unique(other._classes) - self._applications.merge_unique(other._applications) - self._parameters.merge(other._parameters) - self._exports.merge(other._exports) + self._classes.merge_unique(other.classes) + self._applications.merge_unique(other.applications) + self._parameters.merge(other.parameters) + self._exports.merge(other.exports) self._name = other.name self._uri = other.uri self._parameters._uri = other.uri @@ -91,12 +90,12 @@ def interpolate_single_export(self, references): def __eq__(self, other): return isinstance(other, type(self)) \ - and self._applications == other._applications \ - and self._classes == other._classes \ - and self._parameters == other._parameters \ - and self._exports == other._exports \ - and self._name == other._name \ - and self._uri == other._uri + and self._applications == other.applications \ + and self._classes == other.classes \ + and self._parameters == other.parameters \ + and self._exports == other.exports \ + and self._name == other.name \ + and self._uri == other.uri def __ne__(self, other): return not self.__eq__(other) diff --git a/reclass/datatypes/exports.py b/reclass/datatypes/exports.py index 04ab2007..984a15a5 100644 --- a/reclass/datatypes/exports.py +++ b/reclass/datatypes/exports.py @@ -23,9 +23,6 @@ class Exports(Parameters): def __init__(self, mapping, settings, uri): super(Exports, self).__init__(mapping, settings, uri) - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, self._base) - def delete_key(self, key): self._base.pop(key, None) self._unrendered.pop(key, None) diff --git a/reclass/storage/loader.py b/reclass/storage/loader.py index aab554a8..0a66a666 100644 --- a/reclass/storage/loader.py +++ b/reclass/storage/loader.py @@ -32,5 +32,6 @@ def load(self, klassname='ExternalNodeStorage'): def path_mangler(self, name='path_mangler'): function = getattr(self._module, name, None) if function is None: - raise AttributeError('Storage backend class {0} does not export "{1}"'.format(self._name, name)) + raise AttributeError('Storage backend class {0} does not export ' + '"{1}"'.format(self._name, name)) return function diff --git a/reclass/values/value.py b/reclass/values/value.py index 736e01ab..451617ec 100644 --- a/reclass/values/value.py +++ b/reclass/values/value.py @@ -24,7 +24,7 @@ def __init__(self, value, settings, uri, parse_string=True): self._settings = settings self.uri = uri self.overwrite = False - self._constant = False + self.constant = False if isinstance(value, string_types): if parse_string: try: @@ -41,14 +41,6 @@ def __init__(self, value, settings, uri, parse_string=True): else: self._item = ScaItem(value, self._settings) - @property - def constant(self): - return self._constant - - @constant.setter - def constant(self, constant): - self._constant = constant - def item_type(self): return self._item.type @@ -74,8 +66,7 @@ def has_inv_query(self): def needs_all_envs(self): if self._item.has_inv_query: return self._item.needs_all_envs - else: - return False + return False def ignore_failed_render(self): return self._item.ignore_failed_render From bb76a54ff6aa3ad024ff8e8d21185ffe59911d97 Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Fri, 19 Oct 2018 18:21:15 +0400 Subject: [PATCH 39/63] Fix for reference check in core Reference sentinel was hardcoded in core, replaced it with proper counting of actual sentinels that are present in settings. --- reclass/core.py | 4 +++- reclass/values/parser.py | 11 ++++------ reclass/values/parser_funcs.py | 38 ++++++++++++++++------------------ 3 files changed, 25 insertions(+), 28 deletions(-) diff --git a/reclass/core.py b/reclass/core.py index e3142af7..3e0ab34d 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -114,7 +114,9 @@ def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, node for klass in entity.classes.as_list(): # class name contain reference - if klass.count('$') > 0: + num_references = klass.count(self._settings.reference_sentinels[0]) +\ + klass.count(self._settings.export_sentinels[0]) + if num_references > 0: try: klass = str(self._parser.parse(klass, self._settings).render(merge_base.parameters.as_dict(), {})) except ResolveError as e: diff --git a/reclass/values/parser.py b/reclass/values/parser.py index 4d4e12ca..255e477f 100644 --- a/reclass/values/parser.py +++ b/reclass/values/parser.py @@ -29,14 +29,12 @@ def full_parse(): raise ParseError(e.msg, e.line, e.col, e.lineno) self._settings = settings - parser_settings = (settings.escape_character, - settings.reference_sentinels, - settings.export_sentinels) - ref_parser = parsers.get_ref_parser(*parser_settings) - simple_ref_parser = parsers.get_simple_ref_parser(*parser_settings) + ref_parser = parsers.get_ref_parser(settings) + simple_ref_parser = parsers.get_simple_ref_parser(settings) sentinel_count = (value.count(settings.reference_sentinels[0]) + value.count(settings.export_sentinels[0])) + if sentinel_count == 0: # speed up: only use pyparsing if there are sentinels in the value return ScaItem(value, self._settings) @@ -68,5 +66,4 @@ def _create_inv(self, tokens): items = [ ScaItem(v, self._settings) for t, v in tokens ] if len(items) == 1: return InvItem(items[0], self._settings) - else: - return InvItem(CompItem(items), self._settings) + return InvItem(CompItem(items), self._settings) diff --git a/reclass/values/parser_funcs.py b/reclass/values/parser_funcs.py index 3c24b40d..f7029107 100644 --- a/reclass/values/parser_funcs.py +++ b/reclass/values/parser_funcs.py @@ -34,6 +34,8 @@ ALL_ENVS = '+AllEnvs' +s_end = pp.StringEnd() + def _tag_with(tag, transform=lambda x:x): def inner(tag, string, location, tokens): token = transform(tokens[0]) @@ -41,8 +43,6 @@ def inner(tag, string, location, tokens): return functools.partial(inner, tag) def get_expression_parser(): - - s_end = pp.StringEnd() sign = pp.Optional(pp.Literal('-')) number = pp.Word(pp.nums) dpoint = pp.Literal('.') @@ -80,12 +80,11 @@ def get_expression_parser(): line = options + expr + s_end return line -def get_ref_parser(escape_character, reference_sentinels, export_sentinels): - _ESCAPE = escape_character +def get_ref_parser(settings): + _ESCAPE = settings.escape_character _DOUBLE_ESCAPE = _ESCAPE + _ESCAPE - _REF_OPEN = reference_sentinels[0] - _REF_CLOSE = reference_sentinels[1] + _REF_OPEN, _REF_CLOSE = settings.reference_sentinels _REF_CLOSE_FIRST = _REF_CLOSE[0] _REF_ESCAPE_OPEN = _ESCAPE + _REF_OPEN _REF_ESCAPE_CLOSE = _ESCAPE + _REF_CLOSE @@ -93,8 +92,7 @@ def get_ref_parser(escape_character, reference_sentinels, export_sentinels): _REF_DOUBLE_ESCAPE_CLOSE = _DOUBLE_ESCAPE + _REF_CLOSE _REF_EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE - _INV_OPEN = export_sentinels[0] - _INV_CLOSE = export_sentinels[1] + _INV_OPEN, _INV_CLOSE = settings.export_sentinels _INV_CLOSE_FIRST = _INV_CLOSE[0] _INV_ESCAPE_OPEN = _ESCAPE + _INV_OPEN _INV_ESCAPE_CLOSE = _ESCAPE + _INV_CLOSE @@ -142,20 +140,20 @@ def get_ref_parser(escape_character, reference_sentinels, export_sentinels): string = pp.MatchFirst([double_escape, ref_escape_open, inv_escape_open, content]).setParseAction(_tag_with(STR)) item = reference | export | string - line = pp.OneOrMore(item) + pp.StringEnd() + line = pp.OneOrMore(item) + s_end return line.leaveWhitespace() -def get_simple_ref_parser(escape_character, reference_sentinels, export_sentinels): - _ESCAPE = escape_character - _REF_OPEN = reference_sentinels[0] - _REF_CLOSE = reference_sentinels[1] - _INV_OPEN = export_sentinels[0] - _INV_CLOSE = export_sentinels[1] - _EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE + _INV_OPEN + _INV_CLOSE - string = pp.CharsNotIn(_EXCLUDES).setParseAction(_tag_with(STR)) - ref_open = pp.Literal(_REF_OPEN).suppress() - ref_close = pp.Literal(_REF_CLOSE).suppress() +def get_simple_ref_parser(settings): + + ESCAPE = settings.escape_character + REF_OPEN, REF_CLOSE = settings.reference_sentinels + INV_OPEN, INV_CLOSE = settings.export_sentinels + EXCLUDES = ESCAPE + REF_OPEN + REF_CLOSE + INV_OPEN + INV_CLOSE + + string = pp.CharsNotIn(EXCLUDES).setParseAction(_tag_with(STR)) + ref_open = pp.Literal(REF_OPEN).suppress() + ref_close = pp.Literal(REF_CLOSE).suppress() reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_tag_with(REF)) - line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + pp.StringEnd() + line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + s_end return line.leaveWhitespace() From ece09c62d795d4013b8a697d1a41eabbe30130da Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Fri, 26 Oct 2018 17:00:03 +0400 Subject: [PATCH 40/63] Fix for parser reinstantiation bug In the course of refactoring a bug was introduced which manifested itself in excessive constructions of parsers and considerable slowdown of reclass. This patch limits the number of parsers constructed. --- reclass/values/parser.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/reclass/values/parser.py b/reclass/values/parser.py index 255e477f..27e6d2da 100644 --- a/reclass/values/parser.py +++ b/reclass/values/parser.py @@ -21,26 +21,41 @@ class Parser(object): + def __init__(self): + self._ref_parser = None + self._simple_parser = None + self._old_settings = None + + @property + def ref_parser(self): + if self._ref_parser is None or self._settings != self._old_settings: + self._ref_parser = parsers.get_ref_parser(self._settings) + self._old_settings = self._settings + return self._ref_parser + + @property + def simple_ref_parser(self): + if self._simple_parser is None or self._settings != self._old_settings: + self._simple_parser = parsers.get_simple_ref_parser(self._settings) + self._old_settings = self._settings + return self._simple_parser + def parse(self, value, settings): def full_parse(): try: - return ref_parser.parseString(value).asList() + return self.ref_parser.parseString(value).asList() except pp.ParseException as e: raise ParseError(e.msg, e.line, e.col, e.lineno) self._settings = settings - ref_parser = parsers.get_ref_parser(settings) - simple_ref_parser = parsers.get_simple_ref_parser(settings) - sentinel_count = (value.count(settings.reference_sentinels[0]) + value.count(settings.export_sentinels[0])) - if sentinel_count == 0: # speed up: only use pyparsing if there are sentinels in the value return ScaItem(value, self._settings) elif sentinel_count == 1: # speed up: try a simple reference try: - tokens = simple_ref_parser.parseString(value).asList() + tokens = self.simple_ref_parser.parseString(value).asList() except pp.ParseException: tokens = full_parse() # fall back on the full parser else: From 8cdce3905943c0e2a90a68d5f7e64409e9f21694 Mon Sep 17 00:00:00 2001 From: Alexey Ovchinnikov Date: Tue, 6 Nov 2018 00:46:23 +0400 Subject: [PATCH 41/63] Tests for parsers are added Tests for full parser and simplified reference parser are added. The new tests simultaneously act as documentation for parsers. Also some mostly cosmetic changes are applied to parser building functions and Parser() class. --- reclass/values/parser.py | 23 ++-- reclass/values/parser_funcs.py | 41 +++++-- reclass/values/tests/test_parser_functions.py | 116 ++++++++++++++++++ requirements.txt | 1 + setup.py | 2 +- 5 files changed, 162 insertions(+), 21 deletions(-) create mode 100644 reclass/values/tests/test_parser_functions.py diff --git a/reclass/values/parser.py b/reclass/values/parser.py index 27e6d2da..3f7ac1f7 100644 --- a/reclass/values/parser.py +++ b/reclass/values/parser.py @@ -16,9 +16,13 @@ from .scaitem import ScaItem from reclass.errors import ParseError -from reclass.values.parser_funcs import STR, REF, INV +from reclass.values.parser_funcs import tags import reclass.values.parser_funcs as parsers +import collections +import six + + class Parser(object): def __init__(self): @@ -43,7 +47,7 @@ def simple_ref_parser(self): def parse(self, value, settings): def full_parse(): try: - return self.ref_parser.parseString(value).asList() + return self.ref_parser.parseString(value) except pp.ParseException as e: raise ParseError(e.msg, e.line, e.col, e.lineno) @@ -55,30 +59,31 @@ def full_parse(): return ScaItem(value, self._settings) elif sentinel_count == 1: # speed up: try a simple reference try: - tokens = self.simple_ref_parser.parseString(value).asList() + tokens = self.simple_ref_parser.parseString(value) except pp.ParseException: tokens = full_parse() # fall back on the full parser else: tokens = full_parse() # use the full parser + tokens = parsers.listify(tokens) items = self._create_items(tokens) if len(items) == 1: return items[0] return CompItem(items, self._settings) - _create_dict = { STR: (lambda s, v: ScaItem(v, s._settings)), - REF: (lambda s, v: s._create_ref(v)), - INV: (lambda s, v: s._create_inv(v)) } + _item_builders = {tags.STR: (lambda s, v: ScaItem(v, s._settings)), + tags.REF: (lambda s, v: s._create_ref(v)), + tags.INV: (lambda s, v: s._create_inv(v)) } def _create_items(self, tokens): - return [ self._create_dict[t](self, v) for t, v in tokens ] + return [self._item_builders[t](self, v) for t, v in tokens ] def _create_ref(self, tokens): - items = [ self._create_dict[t](self, v) for t, v in tokens ] + items = [ self._item_builders[t](self, v) for t, v in tokens ] return RefItem(items, self._settings) def _create_inv(self, tokens): - items = [ ScaItem(v, self._settings) for t, v in tokens ] + items = [ScaItem(v, self._settings) for t, v in tokens] if len(items) == 1: return InvItem(items[0], self._settings) return InvItem(CompItem(items), self._settings) diff --git a/reclass/values/parser_funcs.py b/reclass/values/parser_funcs.py index f7029107..db34cebc 100644 --- a/reclass/values/parser_funcs.py +++ b/reclass/values/parser_funcs.py @@ -8,12 +8,13 @@ from __future__ import print_function from __future__ import unicode_literals -import pyparsing as pp +import collections +import enum import functools +import pyparsing as pp +import six -STR = 1 -REF = 2 -INV = 3 +tags = enum.Enum('Tags', ['STR', 'REF', 'INV']) _OBJ = 'OBJ' _LOGICAL = 'LOGICAL' @@ -42,6 +43,20 @@ def inner(tag, string, location, tokens): tokens[0] = (tag, token) return functools.partial(inner, tag) +def _asList(x): + if isinstance(x, pp.ParseResults): + return x.asList() + return x + +def listify(w, modifier=_asList): + if (isinstance(w, collections.Iterable) and + not isinstance(w, six.string_types)): + cls = type(w) + if cls == pp.ParseResults: + cls = list + return cls([listify(x) for x in w]) + return modifier(w) + def get_expression_parser(): sign = pp.Optional(pp.Literal('-')) number = pp.Word(pp.nums) @@ -117,10 +132,10 @@ def get_ref_parser(settings): ref_escape_close = pp.Literal(_REF_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_REF_CLOSE)) ref_text = pp.CharsNotIn(_REF_EXCLUDES) | pp.CharsNotIn(_REF_CLOSE_FIRST, exact=1) ref_content = pp.Combine(pp.OneOrMore(ref_not_open + ref_not_close + ref_text)) - ref_string = pp.MatchFirst([double_escape, ref_escape_open, ref_escape_close, ref_content]).setParseAction(_tag_with(STR)) + ref_string = pp.MatchFirst([double_escape, ref_escape_open, ref_escape_close, ref_content]).setParseAction(_tag_with(tags.STR)) ref_item = pp.Forward() ref_items = pp.OneOrMore(ref_item) - reference = (ref_open + pp.Group(ref_items) + ref_close).setParseAction(_tag_with(REF)) + reference = (ref_open + pp.Group(ref_items) + ref_close).setParseAction(_tag_with(tags.REF)) ref_item << (reference | ref_string) inv_open = pp.Literal(_INV_OPEN).suppress() @@ -131,13 +146,17 @@ def get_ref_parser(settings): inv_escape_close = pp.Literal(_INV_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_INV_CLOSE)) inv_text = pp.CharsNotIn(_INV_CLOSE_FIRST) inv_content = pp.Combine(pp.OneOrMore(inv_not_close + inv_text)) - inv_string = pp.MatchFirst([double_escape, inv_escape_open, inv_escape_close, inv_content]).setParseAction(_tag_with(STR)) + inv_string = pp.MatchFirst( + [double_escape, inv_escape_open, inv_escape_close, inv_content] + ).setParseAction(_tag_with(tags.STR)) inv_items = pp.OneOrMore(inv_string) - export = (inv_open + pp.Group(inv_items) + inv_close).setParseAction(_tag_with(INV)) + export = (inv_open + pp.Group(inv_items) + inv_close).setParseAction(_tag_with(tags.INV)) text = pp.CharsNotIn(_EXCLUDES) | pp.CharsNotIn('', exact=1) content = pp.Combine(pp.OneOrMore(ref_not_open + inv_not_open + text)) - string = pp.MatchFirst([double_escape, ref_escape_open, inv_escape_open, content]).setParseAction(_tag_with(STR)) + string = pp.MatchFirst( + [double_escape, ref_escape_open, inv_escape_open, content] + ).setParseAction(_tag_with(tags.STR)) item = reference | export | string line = pp.OneOrMore(item) + s_end @@ -151,9 +170,9 @@ def get_simple_ref_parser(settings): INV_OPEN, INV_CLOSE = settings.export_sentinels EXCLUDES = ESCAPE + REF_OPEN + REF_CLOSE + INV_OPEN + INV_CLOSE - string = pp.CharsNotIn(EXCLUDES).setParseAction(_tag_with(STR)) + string = pp.CharsNotIn(EXCLUDES).setParseAction(_tag_with(tags.STR)) ref_open = pp.Literal(REF_OPEN).suppress() ref_close = pp.Literal(REF_CLOSE).suppress() - reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_tag_with(REF)) + reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_tag_with(tags.REF)) line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + s_end return line.leaveWhitespace() diff --git a/reclass/values/tests/test_parser_functions.py b/reclass/values/tests/test_parser_functions.py new file mode 100644 index 00000000..a660c76c --- /dev/null +++ b/reclass/values/tests/test_parser_functions.py @@ -0,0 +1,116 @@ +from reclass import settings +from reclass.values import parser_funcs as pf +import unittest +import ddt + + +SETTINGS = settings.Settings() + +# Test cases for parsers. Each test case is a two-tuple of input string and +# expected output. NOTE: default values for sentinels are used here to avoid +# cluttering up the code. +test_pairs_simple = ( + # Basic test cases. + ('${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + # Basic combinations. + ('bar${foo}', [(pf.tags.STR, 'bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')])]), + ('bar${foo}baz', [(pf.tags.STR, 'bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + ('${foo}baz', [(pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + # Whitespace preservation cases. + ('bar ${foo}', [(pf.tags.STR, 'bar '), + (pf.tags.REF, [(pf.tags.STR, 'foo')])]), + ('bar ${foo baz}', [(pf.tags.STR, 'bar '), + (pf.tags.REF, [(pf.tags.STR, 'foo baz')])]), + ('bar${foo} baz', [(pf.tags.STR, 'bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, ' baz')]), + (' bar${foo} baz ', [(pf.tags.STR, ' bar'), + (pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, ' baz ')]), +) + +# Simple parser test cases are also included in this test grouop. +test_pairs_full = ( + # Single elements sanity. + ('foo', [(pf.tags.STR, 'foo')]), + ('$foo', [(pf.tags.STR, '$foo')]), + ('{foo}', [(pf.tags.STR, '{foo}')]), + ('[foo]', [(pf.tags.STR, '[foo]')]), + ('$(foo)', [(pf.tags.STR, '$(foo)')]), + ('$[foo]', [(pf.tags.INV, [(pf.tags.STR, 'foo')])]), + + # Escape sequences. + # NOTE: these sequences apparently are not working as expected. + #(r'\\\\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + #(r'\\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + #(r'\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), + + # Basic combinations. + ('bar$[foo]', [(pf.tags.STR, 'bar'), + (pf.tags.INV, [(pf.tags.STR, 'foo')])]), + ('bar$[foo]baz', [(pf.tags.STR, 'bar'), + (pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + ('$[foo]baz', [(pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, 'baz')]), + + # Whitespace preservation in various positions. + (' foo ', [(pf.tags.STR, ' foo ')]), + ('foo bar', [(pf.tags.STR, 'foo bar')]), + ('bar $[foo baz]', [(pf.tags.STR, 'bar '), + (pf.tags.INV, [(pf.tags.STR, 'foo baz')])]), + ('bar$[foo] baz ', [(pf.tags.STR, 'bar'), + (pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.STR, ' baz ')]), + + # Nested references and inventory items. + ('${foo}${bar}',[(pf.tags.REF, [(pf.tags.STR, 'foo')]), + (pf.tags.REF, [(pf.tags.STR, 'bar')])]), + ('${foo${bar}}',[(pf.tags.REF, [(pf.tags.STR, 'foo'), + (pf.tags.REF, [(pf.tags.STR, 'bar')])])]), + ('$[foo]$[bar]',[(pf.tags.INV, [(pf.tags.STR, 'foo')]), + (pf.tags.INV, [(pf.tags.STR, 'bar')])]), + # NOTE: the cases below do not work as expected, which is probably a bug. + # Any nesting in INV creates a string. + #('${$[foo]}', [(pf.tags.REF, [(pf.tags.INV, [(pf.tags.STR, 'foo')])])]), + #('$[${foo}]', [(pf.tags.INV, [(pf.tags.REF, [(pf.tags.STR, 'foo')])])]), + #('$[foo$[bar]]',[(pf.tags.INV, [(pf.tags.STR, 'foo'), + # (pf.tags.INV, [(pf.tags.STR, 'bar')])])]), + +) + test_pairs_simple + + +@ddt.ddt +class TestRefParser(unittest.TestCase): + + @ddt.data(*test_pairs_full) + def test_standard_reference_parser(self, data): + instring, expected = data + parser = pf.get_ref_parser(SETTINGS) + + result = pf.listify(parser.parseString(instring).asList()) + + self.assertEquals(expected, result) + + +@ddt.ddt +class TestSimpleRefParser(unittest.TestCase): + + @ddt.data(*test_pairs_simple) + def test_standard_reference_parser(self, data): + # NOTE: simple reference parser can parse references only. It fails + # on inventory items. + instring, expected = data + parser = pf.get_simple_ref_parser(SETTINGS) + + result = pf.listify(parser.parseString(instring).asList()) + + self.assertEquals(expected, result) + + +if __name__ == '__main__': + unittest.main() diff --git a/requirements.txt b/requirements.txt index 5b3aadd1..5f6aed18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ pyparsing pyyaml six enum34 +ddt diff --git a/setup.py b/setup.py index 884be880..ab23207f 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ url = URL, packages = find_packages(exclude=['*tests']), #FIXME validate this entry_points = { 'console_scripts': console_scripts }, - install_requires = ['pyparsing', 'pyyaml', 'six', 'enum34'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) + install_requires = ['pyparsing', 'pyyaml', 'six', 'enum34', 'ddt'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) classifiers=[ 'Development Status :: 4 - Beta', From 637a9099dd9344e14b08101223efb6e9ee8718da Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Tue, 6 Nov 2018 11:44:10 +0100 Subject: [PATCH 42/63] bump version 1.7.0 Change-Id: Id8fa33004bbf0e18d7e94c2da9a7d3120bf8a3c4 --- doc/source/changelog.rst | 4 ++++ reclass/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index d29e8377..c3134c5b 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -5,6 +5,10 @@ ChangeLog ========= ========== ======================================================== Version Date Changes ========= ========== ======================================================== +1.7.0 2018-11-06 * Python code and parser refactoring by a-ovchinnikov + * Improvements in yaml_git and mixed setup by Andrew Pickford + * Relative paths in class names by Petr Michalec, Martin Polreich and Andrew Pickford + * Bug Fixes for recently added features 1.5.6 2018-07-30 * Fix, usage of integers as pillar keys * Refactoring python codebase by @a-ovchinkonv * New feature, "compose node name" from node subdirectory structure (by @gburiola) diff --git a/reclass/version.py b/reclass/version.py index 63fda269..5a40c2ed 100644 --- a/reclass/version.py +++ b/reclass/version.py @@ -14,7 +14,7 @@ RECLASS_NAME = 'reclass' DESCRIPTION = ('merge data by recursive descent down an ancestry hierarchy ' '(forked extended version)') -VERSION = '1.5.6' +VERSION = '1.6.0' AUTHOR = 'martin f. krafft / Andrew Pickford / salt-formulas community' AUTHOR_EMAIL = 'salt-formulas@freelists.org' MAINTAINER = 'salt-formulas community' From 4ba69256743bc5b5769bee747c76257801dc54f7 Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Tue, 6 Nov 2018 12:20:36 +0100 Subject: [PATCH 43/63] fix version name to 1.6.x Change-Id: Id3e474a8bd77bb474ef1fcbc5df327f7b84db894 --- doc/source/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index c3134c5b..dccf34fe 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -5,7 +5,7 @@ ChangeLog ========= ========== ======================================================== Version Date Changes ========= ========== ======================================================== -1.7.0 2018-11-06 * Python code and parser refactoring by a-ovchinnikov +1.6.0 2018-11-06 * Python code and parser refactoring by a-ovchinnikov * Improvements in yaml_git and mixed setup by Andrew Pickford * Relative paths in class names by Petr Michalec, Martin Polreich and Andrew Pickford * Bug Fixes for recently added features From 8c1979d6a5907773f53b1d27c0ed6de7696cb186 Mon Sep 17 00:00:00 2001 From: Marwan Al Jubeh Date: Thu, 8 Nov 2018 11:31:22 +0000 Subject: [PATCH 44/63] Fix a typo: Canot -> Cannot --- reclass/datatypes/tests/test_parameters.py | 14 +++++++------- reclass/errors.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/reclass/datatypes/tests/test_parameters.py b/reclass/datatypes/tests/test_parameters.py index 79322e69..80fd8de1 100644 --- a/reclass/datatypes/tests/test_parameters.py +++ b/reclass/datatypes/tests/test_parameters.py @@ -194,7 +194,7 @@ def test_merge_list_into_scalar(self): with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() - self.assertEqual(e.exception.message, "-> \n Canot merge list over scalar, at key, in ; ") + self.assertEqual(e.exception.message, "-> \n Cannot merge list over scalar, at key, in ; ") def test_merge_list_into_scalar_allow(self): settings = Settings({'allow_list_over_scalar': True}) @@ -212,7 +212,7 @@ def test_merge_scalar_over_list(self): with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() - self.assertEqual(e.exception.message, "-> \n Canot merge scalar over list, at key, in ; ") + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over list, at key, in ; ") def test_merge_scalar_over_list_allow(self): l = ['foo', 1, 2] @@ -231,7 +231,7 @@ def test_merge_none_over_list(self): with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() - self.assertEqual(e.exception.message, "-> \n Canot merge scalar over list, at key, in ; ") + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over list, at key, in ; ") def test_merge_none_over_list_allow(self): l = ['foo', 1, 2] @@ -249,7 +249,7 @@ def test_merge_dict_over_scalar(self): with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() - self.assertEqual(e.exception.message, "-> \n Canot merge dictionary over scalar, at a, in ; ") + self.assertEqual(e.exception.message, "-> \n Cannot merge dictionary over scalar, at a, in ; ") def test_merge_dict_over_scalar_allow(self): settings = Settings({'allow_dict_over_scalar': True}) @@ -267,7 +267,7 @@ def test_merge_scalar_over_dict(self): with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() - self.assertEqual(e.exception.message, "-> \n Canot merge scalar over dictionary, at a, in ; ") + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over dictionary, at a, in ; ") def test_merge_scalar_over_dict_allow(self): d = { 'one': 1, 'two': 2} @@ -284,7 +284,7 @@ def test_merge_none_over_dict(self): with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() - self.assertEqual(e.exception.message, "-> \n Canot merge scalar over dictionary, at key, in ; ") + self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over dictionary, at key, in ; ") def test_merge_none_over_dict_allow(self): settings = Settings({'allow_none_override': True}) @@ -302,7 +302,7 @@ def test_merge_list_over_dict(self): p1.merge(p2) p1.merge(p3) p1.interpolate() - self.assertEqual(e.exception.message, "-> \n Canot merge list over dictionary, at one:a, in second; third") + self.assertEqual(e.exception.message, "-> \n Cannot merge list over dictionary, at one:a, in second; third") # def test_merge_bare_dict_over_dict(self): # settings = Settings({'allow_bare_override': True}) diff --git a/reclass/errors.py b/reclass/errors.py index 330ad4cc..24bdfaaa 100644 --- a/reclass/errors.py +++ b/reclass/errors.py @@ -291,7 +291,7 @@ def __init__(self, value1, value2, uri): self.type2 = value2.item_type_str() def _get_error_message(self): - msg = [ 'Canot merge {0} over {1}'.format(self.type1, self.type2) + self._add_context_and_uri() ] + msg = [ 'Cannot merge {0} over {1}'.format(self.type1, self.type2) + self._add_context_and_uri() ] return msg From 3f0171d8786acae2591a4793ca9d941b98a5f6c3 Mon Sep 17 00:00:00 2001 From: Alexandru Avadanii Date: Tue, 20 Nov 2018 23:28:17 +0200 Subject: [PATCH 45/63] Fix ignore_overwritten_missing_reference default Fixes: https://github.com/salt-formulas/reclass/issues/77 Signed-off-by: Alexandru Avadanii --- reclass/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reclass/settings.py b/reclass/settings.py index b7f52526..62af976b 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -39,7 +39,7 @@ class Settings(object): defaults.OPT_IGNORE_CLASS_NOTFOUND_REGEXP, 'ignore_class_notfound_warning': defaults.OPT_IGNORE_CLASS_NOTFOUND_WARNING, - 'ignore_overwritten_missing_referencesdefaults.': + 'ignore_overwritten_missing_references': defaults.OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES, 'group_errors': defaults.OPT_GROUP_ERRORS, 'compose_node_name': defaults.OPT_COMPOSE_NODE_NAME, From d00e3ba84b55095f00fb008d82a44e8b6aa3a4ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Hor=C3=A1k?= Date: Wed, 21 Nov 2018 16:25:54 +0100 Subject: [PATCH 46/63] Fixed typo in settings definitions. --- reclass/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reclass/settings.py b/reclass/settings.py index b7f52526..62af976b 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -39,7 +39,7 @@ class Settings(object): defaults.OPT_IGNORE_CLASS_NOTFOUND_REGEXP, 'ignore_class_notfound_warning': defaults.OPT_IGNORE_CLASS_NOTFOUND_WARNING, - 'ignore_overwritten_missing_referencesdefaults.': + 'ignore_overwritten_missing_references': defaults.OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES, 'group_errors': defaults.OPT_GROUP_ERRORS, 'compose_node_name': defaults.OPT_COMPOSE_NODE_NAME, From bba9562e4fd5ac7974aeed66562e63d8467380ab Mon Sep 17 00:00:00 2001 From: pranavgupta1234 Date: Thu, 2 May 2019 12:15:05 +0530 Subject: [PATCH 47/63] added support for .yaml along with .yml --- reclass/storage/yaml_fs/__init__.py | 4 ++-- reclass/storage/yaml_fs/directory.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index 20e8eecb..e47ea91a 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -21,7 +21,7 @@ from reclass.datatypes import Entity import reclass.errors -FILE_EXTENSION = '.yml' +FILE_EXTENSION = ('.yml', '.yaml') STORAGE_NAME = 'yaml_fs' def vvv(msg): @@ -71,7 +71,7 @@ def __init__(self, nodes_uri, classes_uri, compose_node_name): def _enumerate_inventory(self, basedir, name_mangler): ret = {} def register_fn(dirpath, filenames): - filenames = fnmatch.filter(filenames, '*{0}'.format(FILE_EXTENSION)) + filenames = [f for f in filenames if f.endswith(FILE_EXTENSION)] vvv('REGISTER {0} in path {1}'.format(filenames, dirpath)) for f in filenames: name = os.path.splitext(f)[0] diff --git a/reclass/storage/yaml_fs/directory.py b/reclass/storage/yaml_fs/directory.py index a8916b31..4e11643d 100644 --- a/reclass/storage/yaml_fs/directory.py +++ b/reclass/storage/yaml_fs/directory.py @@ -15,7 +15,7 @@ from reclass.errors import NotFoundError SKIPDIRS = ('CVS', 'SCCS') -FILE_EXTENSION = '.yml' +FILE_EXTENSION = ('.yml', '.yaml') def vvv(msg): #print(msg, file=sys.stderr) From 8d17363ca8ebee6d9a67b333969ae6b3b68d395e Mon Sep 17 00:00:00 2001 From: pranavgupta1234 Date: Thu, 2 May 2019 12:27:03 +0530 Subject: [PATCH 48/63] removed fnmatch import --- reclass/storage/yaml_fs/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index e47ea91a..3577b362 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -12,7 +12,6 @@ from __future__ import unicode_literals import os, sys -import fnmatch import yaml from reclass.output.yaml_outputter import ExplicitDumper from reclass.storage import ExternalNodeStorageBase From 8283bfb1a5efbef250bdeac0d7c63184cbc95802 Mon Sep 17 00:00:00 2001 From: pranavgupta1234 Date: Thu, 2 May 2019 21:44:25 +0530 Subject: [PATCH 49/63] extension support added for yaml_git storage as well --- reclass/storage/yaml_git/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index a28079b6..78eed048 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -34,7 +34,7 @@ from reclass.storage import ExternalNodeStorageBase from reclass.storage.yamldata import YamlData -FILE_EXTENSION = '.yml' +FILE_EXTENSION = ('.yml', '.yaml') STORAGE_NAME = 'yaml_git' def path_mangler(inventory_base_uri, nodes_uri, classes_uri): @@ -213,7 +213,7 @@ def files_in_repo(self): branch = {} files = self.files_in_branch(bname) for file in files: - if fnmatch.fnmatch(file.name, '*{0}'.format(FILE_EXTENSION)): + if str(file.name).endswith(FILE_EXTENSION): name = os.path.splitext(file.name)[0] relpath = os.path.dirname(file.path) if callable(self._class_name_mangler): From 961aa7f06e3f75ac5bd776a9d6e98b5cc769ec41 Mon Sep 17 00:00:00 2001 From: pranavgupta1234 Date: Thu, 2 May 2019 21:55:39 +0530 Subject: [PATCH 50/63] remove import fnmatch from yaml_git as well --- reclass/storage/yaml_git/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 78eed048..b212b5ef 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -11,7 +11,6 @@ import distutils.version import errno import fcntl -import fnmatch import os import time From cc7d59dd816e3020f4024492c6f4392199e06e99 Mon Sep 17 00:00:00 2001 From: pranavgupta1234 Date: Fri, 3 May 2019 07:38:31 +0530 Subject: [PATCH 51/63] removed unnecessary typecasting --- reclass/storage/yaml_git/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index b212b5ef..06d839b4 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -212,7 +212,7 @@ def files_in_repo(self): branch = {} files = self.files_in_branch(bname) for file in files: - if str(file.name).endswith(FILE_EXTENSION): + if file.name.endswith(FILE_EXTENSION): name = os.path.splitext(file.name)[0] relpath = os.path.dirname(file.path) if callable(self._class_name_mangler): From 6a15e4ea429840621c85cf2b45eb51447c260b1a Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Wed, 16 Oct 2019 15:31:07 +0200 Subject: [PATCH 52/63] Fix logging for DuplicateNodeNameError in yaml git storage --- reclass/storage/yaml_git/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 06d839b4..fa3f68c4 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -218,7 +218,7 @@ def files_in_repo(self): if callable(self._class_name_mangler): relpath, name = self._class_name_mangler(relpath, name) if name in ret: - raise reclass.errors.DuplicateNodeNameError(self.name + ' - ' + bname, name, ret[name], path) + raise reclass.errors.DuplicateNodeNameError(self.url + ' - ' + bname, name, ret[name], file) else: branch[name] = file ret[bname] = branch @@ -233,7 +233,7 @@ def nodes(self, branch, subdir): if callable(self._node_name_mangler): relpath, node_name = self._node_name_mangler(relpath, node_name) if node_name in ret: - raise reclass.errors.DuplicateNodeNameError(self.name, name, files[name], path) + raise reclass.errors.DuplicateNodeNameError(self.url, name, ret[node_name].path, file.path) else: ret[node_name] = file return ret From 0ff21b826b5f9e5a624ecabb90ee8d6e21addeaa Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Wed, 16 Oct 2019 15:45:27 +0200 Subject: [PATCH 53/63] Change msg param to an optional param for InterpolationError and NameError These are parent classes for other error classes which all pass the msg parameter as an optional parameter. --- reclass/errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reclass/errors.py b/reclass/errors.py index 24bdfaaa..df35ef8e 100644 --- a/reclass/errors.py +++ b/reclass/errors.py @@ -117,7 +117,7 @@ def _get_message(self): class InterpolationError(ReclassException): - def __init__(self, msg, rc=posix.EX_DATAERR, nodename='', uri=None, context=None, tbFlag=True): + def __init__(self, msg=None, rc=posix.EX_DATAERR, nodename='', uri=None, context=None, tbFlag=True): super(InterpolationError, self).__init__(rc=rc, msg=msg, tbFlag=tbFlag) self.nodename = nodename self.uri = uri @@ -330,7 +330,7 @@ def __init__(self, msg): class NameError(ReclassException): - def __init__(self, msg, rc=posix.EX_DATAERR): + def __init__(self, msg=None, rc=posix.EX_DATAERR): super(NameError, self).__init__(rc=rc, msg=msg) From e3fc5e8fa1fcd6a1cb1d3440deda8322bce7cd7c Mon Sep 17 00:00:00 2001 From: Bruno Binet Date: Thu, 2 Apr 2020 16:30:28 +0200 Subject: [PATCH 54/63] add Python3.7 compatibility --- reclass/storage/yaml_git/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 06d839b4..4c739988 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -42,7 +42,7 @@ def path_mangler(inventory_base_uri, nodes_uri, classes_uri): return nodes_uri, classes_uri -GitMD = collections.namedtuple('GitMD', ['name', 'path', 'id'], verbose=False, rename=False) +GitMD = collections.namedtuple('GitMD', ['name', 'path', 'id'], rename=False) class GitURI(object): From 5e65f3f8092ce3dd6ed6868922084a206917049d Mon Sep 17 00:00:00 2001 From: Bruno Binet Date: Wed, 6 May 2020 16:47:53 +0200 Subject: [PATCH 55/63] Fix doc for ignore_class_notfound_regexp --- README-extensions.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README-extensions.rst b/README-extensions.rst index e67e441d..da9ce85b 100644 --- a/README-extensions.rst +++ b/README-extensions.rst @@ -33,7 +33,7 @@ To control the feature there are two options available: .. code-block:: yaml ignore_class_notfound: False - ignore_class_regexp: ['.*'] + ignore_class_notfound_regexp: ['.*'] If you set regexp pattern to ``service.*`` all missing classes starting 'service.' will be logged with warning, but will not fail to return rendered reclass. Assuming all parameter interpolation passes. From b803adb533aee3c50aa2fa902e4442d4891e39c0 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Fri, 12 Jun 2020 10:53:32 +0200 Subject: [PATCH 56/63] Allow for class mappings to wildcard match against either the node name or node file path By default a class mapping will match against the name of a node: class_mappings: - test* alpha This will match all nodes names starting with test. A possibly unintended side effect of the original implementation allowed for matches against the path of the node file. So for a node file 'test/node1.yml' a match: class_mappings: - test/* alpha would work. A reworking of the class mappings implementation ment this matching stopped working. The type of matching done by the class mapping is now controlled by the configuration option class_mappings_match_path. If false (the default) the match is done against the node name and if true the match is done again the path. --- doc/source/operations.rst | 7 +++++++ reclass/core.py | 14 +++++++++----- reclass/datatypes/entity.py | 8 +++++--- reclass/defaults.py | 2 ++ reclass/settings.py | 1 + reclass/storage/yaml_fs/__init__.py | 6 ++++-- reclass/storage/yaml_git/__init__.py | 8 ++++++-- reclass/storage/yamldata.py | 7 ++----- reclass/tests/data/04/classes/one.yml | 2 ++ reclass/tests/data/04/classes/three.yml | 2 ++ reclass/tests/data/04/classes/two.yml | 2 ++ reclass/tests/data/04/nodes/alpha/node1.yml | 2 ++ reclass/tests/test_core.py | 18 +++++++++++++++--- 13 files changed, 59 insertions(+), 20 deletions(-) create mode 100644 reclass/tests/data/04/classes/one.yml create mode 100644 reclass/tests/data/04/classes/three.yml create mode 100644 reclass/tests/data/04/classes/two.yml create mode 100644 reclass/tests/data/04/nodes/alpha/node1.yml diff --git a/doc/source/operations.rst b/doc/source/operations.rst index f744148a..08b34e5f 100644 --- a/doc/source/operations.rst +++ b/doc/source/operations.rst @@ -101,6 +101,13 @@ end with ``.ch`` (again, note the escaped leading asterisk). Multiple classes can be assigned to each mapping by providing a space-separated list (class names cannot contain spaces anyway). +By default the class mappings regex match is done against the node name. This can +be changed to do the match against the path of the node file from the classes +directory, but dropping the .yml extension at the end of the node file. This is +controlled with the setting class_mappings_match_path. When False (the +default) the match is done again the node name and when true the match is done +against the node file path. + .. warning:: The class mappings do not really belong in the configuration file, as they diff --git a/reclass/core.py b/reclass/core.py index 3e0ab34d..1ce74ed2 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -72,26 +72,30 @@ def _shlex_split(instr): key = '/{0}/'.format(key) return key, list(lexer) - def _get_class_mappings_entity(self, nodename): + def _get_class_mappings_entity(self, entity): if not self._class_mappings: return Entity(self._settings, name='empty (class mappings)') c = Classes() + if self._settings.class_mappings_match_path: + matchname = entity.pathname + else: + matchname = entity.name for mapping in self._class_mappings: matched = False key, klasses = Core._shlex_split(mapping) if key[0] == ('/'): - matched = Core._match_regexp(key[1:-1], nodename) + matched = Core._match_regexp(key[1:-1], matchname) if matched: for klass in klasses: c.append_if_new(matched.expand(klass)) else: - if Core._match_glob(key, nodename): + if Core._match_glob(key, matchname): for klass in klasses: c.append_if_new(klass) return Entity(self._settings, classes=c, - name='class mappings for node {0}'.format(nodename)) + name='class mappings for node {0}'.format(entity.name)) def _get_input_data_entity(self): if not self._input_data: @@ -207,7 +211,7 @@ def _node_entity(self, nodename): if node_entity.environment == None: node_entity.environment = self._settings.default_environment base_entity = Entity(self._settings, name='base') - base_entity.merge(self._get_class_mappings_entity(node_entity.name)) + base_entity.merge(self._get_class_mappings_entity(node_entity)) base_entity.merge(self._get_input_data_entity()) base_entity.merge_parameters(self._get_automatic_parameters(nodename, node_entity.environment)) seen = {} diff --git a/reclass/datatypes/entity.py b/reclass/datatypes/entity.py index 2e0e1e43..88b5afe5 100644 --- a/reclass/datatypes/entity.py +++ b/reclass/datatypes/entity.py @@ -24,9 +24,10 @@ class Entity(object): ''' def __init__(self, settings, classes=None, applications=None, parameters=None, exports=None, uri=None, name=None, - environment=None): + pathname=None, environment=None): self._uri = '' if uri is None else uri self._name = '' if name is None else name + self._pathname = '' if pathname is None else pathname self._classes = self._set_field(classes, Classes) self._applications = self._set_field(applications, Applications) pars = [None, settings, uri] @@ -36,6 +37,7 @@ def __init__(self, settings, classes=None, applications=None, name = property(lambda s: s._name) uri = property(lambda s: s._uri) + pathname = property(lambda s: s._pathname) classes = property(lambda s: s._classes) applications = property(lambda s: s._applications) parameters = property(lambda s: s._parameters) @@ -101,10 +103,10 @@ def __ne__(self, other): return not self.__eq__(other) def __repr__(self): - return "%s(%r, %r, %r, %r, uri=%r, name=%r, environment=%r)" % ( + return "%s(%r, %r, %r, %r, uri=%r, name=%r, pathname=%r, environment=%r)" % ( self.__class__.__name__, self.classes, self.applications, self.parameters, self.exports, self.uri, self.name, - self.environment) + self.pathname, self.environment) def as_dict(self): return {'classes': self._classes.as_list(), diff --git a/reclass/defaults.py b/reclass/defaults.py index f240f3f9..f50a8ad5 100644 --- a/reclass/defaults.py +++ b/reclass/defaults.py @@ -57,3 +57,5 @@ AUTOMATIC_RECLASS_PARAMETERS = True DEFAULT_ENVIRONMENT = 'base' + +CLASS_MAPPINGS_MATCH_PATH = False diff --git a/reclass/settings.py b/reclass/settings.py index 62af976b..e9e8a36f 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -19,6 +19,7 @@ class Settings(object): 'allow_dict_over_scalar': defaults.OPT_ALLOW_DICT_OVER_SCALAR, 'allow_none_override': defaults.OPT_ALLOW_NONE_OVERRIDE, 'automatic_parameters': defaults.AUTOMATIC_RECLASS_PARAMETERS, + 'class_mappings_match_path': defaults.CLASS_MAPPINGS_MATCH_PATH, 'default_environment': defaults.DEFAULT_ENVIRONMENT, 'delimiter': defaults.PARAMETER_INTERPOLATION_DELIMITER, 'dict_key_override_prefix': diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py index 3577b362..ee49df39 100644 --- a/reclass/storage/yaml_fs/__init__.py +++ b/reclass/storage/yaml_fs/__init__.py @@ -95,18 +95,20 @@ def get_node(self, name, settings): try: relpath = self._nodes[name] path = os.path.join(self.nodes_uri, relpath) + pathname = os.path.splitext(relpath)[0] except KeyError as e: raise reclass.errors.NodeNotFound(self.name, name, self.nodes_uri) - entity = YamlData.from_file(path).get_entity(name, settings) + entity = YamlData.from_file(path).get_entity(name, pathname, settings) return entity def get_class(self, name, environment, settings): vvv('GET CLASS {0}'.format(name)) try: path = os.path.join(self.classes_uri, self._classes[name]) + pathname = os.path.splitext(self._classes[name])[0] except KeyError as e: raise reclass.errors.ClassNotFound(self.name, name, self.classes_uri) - entity = YamlData.from_file(path).get_entity(name, settings) + entity = YamlData.from_file(path).get_entity(name, pathname, settings) return entity def enumerate_nodes(self): diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py index 06d839b4..5ce2e373 100644 --- a/reclass/storage/yaml_git/__init__.py +++ b/reclass/storage/yaml_git/__init__.py @@ -273,7 +273,9 @@ def __init__(self, nodes_uri, classes_uri, compose_node_name): def get_node(self, name, settings): file = self._nodes[name] blob = self._repos[self._nodes_uri.repo].get(file.id) - entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(self._nodes_uri.repo, self._nodes_uri.branch, file.path)).get_entity(name, settings) + uri = 'git_fs://{0} {1} {2}'.format(self._nodes_uri.repo, self._nodes_uri.branch, file.path) + pathname = os.path.splitext(file.path)[0] + entity = YamlData.from_string(blob.data, uri).get_entity(name, pathname, settings) return entity def get_class(self, name, environment, settings): @@ -288,7 +290,9 @@ def get_class(self, name, environment, settings): raise reclass.errors.NotFoundError("File " + name + " missing from " + uri.repo + " branch " + uri.branch) file = self._repos[uri.repo].files[uri.branch][name] blob = self._repos[uri.repo].get(file.id) - entity = YamlData.from_string(blob.data, 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, file.path)).get_entity(name, settings) + uri = 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, file.path) + pathname = os.path.splitext(file.path)[0] + entity = YamlData.from_string(blob.data, uri).get_entity(name, pathname, settings) return entity def enumerate_nodes(self): diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py index a38b589a..f68d8036 100644 --- a/reclass/storage/yamldata.py +++ b/reclass/storage/yamldata.py @@ -80,10 +80,7 @@ def yield_dots(self, value): def count_dots(self, value): return len(list(self.yield_dots(value))) - def get_entity(self, name, settings): - #if name is None: - # name = self._uri - + def get_entity(self, name, pathname, settings): classes = self._data.get('classes') if classes is None: classes = [] @@ -108,7 +105,7 @@ def get_entity(self, name, settings): env = self._data.get('environment', None) return datatypes.Entity(settings, classes=classes, applications=applications, parameters=parameters, - exports=exports, name=name, environment=env, uri=self.uri) + exports=exports, name=name, pathname=pathname, environment=env, uri=self.uri) def __str__(self): return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._uri, diff --git a/reclass/tests/data/04/classes/one.yml b/reclass/tests/data/04/classes/one.yml new file mode 100644 index 00000000..37ee5e80 --- /dev/null +++ b/reclass/tests/data/04/classes/one.yml @@ -0,0 +1,2 @@ +parameters: + test1: 1 diff --git a/reclass/tests/data/04/classes/three.yml b/reclass/tests/data/04/classes/three.yml new file mode 100644 index 00000000..f71f8ce1 --- /dev/null +++ b/reclass/tests/data/04/classes/three.yml @@ -0,0 +1,2 @@ +parameters: + test3: 3 diff --git a/reclass/tests/data/04/classes/two.yml b/reclass/tests/data/04/classes/two.yml new file mode 100644 index 00000000..80d52099 --- /dev/null +++ b/reclass/tests/data/04/classes/two.yml @@ -0,0 +1,2 @@ +parameters: + test2: 2 diff --git a/reclass/tests/data/04/nodes/alpha/node1.yml b/reclass/tests/data/04/nodes/alpha/node1.yml new file mode 100644 index 00000000..f0f59f52 --- /dev/null +++ b/reclass/tests/data/04/nodes/alpha/node1.yml @@ -0,0 +1,2 @@ +classes: + - one diff --git a/reclass/tests/test_core.py b/reclass/tests/test_core.py index 4827177b..c1e283d2 100644 --- a/reclass/tests/test_core.py +++ b/reclass/tests/test_core.py @@ -23,13 +23,13 @@ class TestCore(unittest.TestCase): - def _core(self, dataset, opts={}): + def _core(self, dataset, opts={}, class_mappings=[]): inventory_uri = os.path.dirname(os.path.abspath(__file__)) + '/data/' + dataset path_mangler = get_path_mangler('yaml_fs') nodes_uri, classes_uri = path_mangler(inventory_uri, 'nodes', 'classes') settings = Settings(opts) storage = get_storage('yaml_fs', nodes_uri, classes_uri, settings.compose_node_name) - return Core(storage, None, settings) + return Core(storage, class_mappings, settings) def test_type_conversion(self): reclass = self._core('01') @@ -72,7 +72,7 @@ def test_top_relative_class_names(self): self.assertEqual(node['parameters'], params) def test_compose_node_names(self): - reclass = self._core('03', {'compose_node_name': True}) + reclass = self._core('03', opts={'compose_node_name': True}) alpha_one_node = reclass.nodeinfo('alpha.one') alpha_one_res = {'a': 1, 'alpha': [1, 2], 'beta': {'a': 1, 'b': 2}, 'b': 2, '_reclass_': {'environment': 'base', 'name': {'full': 'alpha.one', 'short': 'alpha'}}} alpha_two_node = reclass.nodeinfo('alpha.two') @@ -86,6 +86,18 @@ def test_compose_node_names(self): self.assertEqual(beta_one_node['parameters'], beta_one_res) self.assertEqual(beta_two_node['parameters'], beta_two_res) + def test_class_mappings_match_path_false(self): + reclass = self._core('04', opts={'class_mappings_match_path': False}, class_mappings=['node* two', 'alpha/node* three']) + node = reclass.nodeinfo('node1') + params = { 'test1': 1, 'test2': 2, '_reclass_': {'environment': u'base', 'name': {'full': 'node1', 'short': 'node1'}}} + self.assertEqual(node['parameters'], params) + + def test_class_mappings_match_path_true(self): + reclass = self._core('04', opts={'class_mappings_match_path': True}, class_mappings=['node* two', 'alpha/node* three']) + node = reclass.nodeinfo('node1') + params = { 'test1': 1, 'test3': 3, '_reclass_': {'environment': u'base', 'name': {'full': 'node1', 'short': 'node1'}}} + self.assertEqual(node['parameters'], params) + if __name__ == '__main__': unittest.main() From 09d436cc99e7c7e0d6cb6d03c5343c31284c0507 Mon Sep 17 00:00:00 2001 From: Andrew Pickford Date: Wed, 17 Jun 2020 09:17:20 +0200 Subject: [PATCH 57/63] Fix indentical inventory queries with different flags returning incorrect data With two indentical inventory queries, one with the AllEnvs flag and one without both queries return the data from the query with the AllEnvs flag set. This fix adds a test to the invitem class when assembling the inventory query result to check that the node environments match or that the query has the AllEnvs flag set. --- reclass/core.py | 17 ++- reclass/datatypes/tests/test_entity.py | 88 +++++++---- reclass/datatypes/tests/test_exports.py | 188 +++++++++++++++--------- reclass/values/__init__.py | 4 + reclass/values/invitem.py | 31 ++-- 5 files changed, 213 insertions(+), 115 deletions(-) diff --git a/reclass/core.py b/reclass/core.py index 1ce74ed2..61443c2e 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -25,6 +25,7 @@ from reclass.settings import Settings from reclass.datatypes import Entity, Classes, Parameters, Exports from reclass.errors import MappingFormatError, ClassNameResolveError, ClassNotFound, InvQueryClassNameResolveError, InvQueryClassNotFound, InvQueryError, InterpolationError, ResolveError +from reclass.values import NodeInventory from reclass.values.parser import Parser @@ -172,6 +173,19 @@ def _get_automatic_parameters(self, nodename, environment): return Parameters({}, self._settings, '') def _get_inventory(self, all_envs, environment, queries): + ''' + Returns a dictionary of NodeInventory objects, one per matching node. Exports + which are required for the given queries (or all exports if the queries is None) + are rendered, remaining exports are left unrendered. + + Args: + all_envs - if True match export values from nodes in any environment + else if False match only for nodes in the same environment as the + environment parameter + environment - node environment to match against if all_envs is False + queries - list of inventory queries to determine required export values + or if None match all exports defined by a node + ''' inventory = {} for nodename in self._storage.enumerate_nodes(): try: @@ -191,6 +205,7 @@ def _get_inventory(self, all_envs, environment, queries): except ClassNameResolveError as e: raise InvQueryClassNameResolveError(e) if queries is None: + # This only happens if reclass is called with the --inventory option try: node.interpolate_exports() except InterpolationError as e: @@ -203,7 +218,7 @@ def _get_inventory(self, all_envs, environment, queries): except InterpolationError as e: e.nodename = nodename raise InvQueryError(q.contents, e, context=p, uri=q.uri) - inventory[nodename] = node.exports.as_dict() + inventory[nodename] = NodeInventory(node.exports.as_dict(), node_base.environment == environment) return inventory def _node_entity(self, nodename): diff --git a/reclass/datatypes/tests/test_entity.py b/reclass/datatypes/tests/test_entity.py index f18f3fcc..a2b71b51 100644 --- a/reclass/datatypes/tests/test_entity.py +++ b/reclass/datatypes/tests/test_entity.py @@ -11,9 +11,12 @@ from __future__ import print_function from __future__ import unicode_literals +from six import iteritems + from reclass.settings import Settings from reclass.datatypes import Entity, Classes, Parameters, Applications, Exports from reclass.errors import ResolveError +from reclass.values import NodeInventory import unittest try: @@ -167,6 +170,9 @@ def test_as_dict(self, **types): class TestEntityNoMock(unittest.TestCase): + def _make_inventory(self, nodes): + return { name: NodeInventory(node, True) for name, node in iteritems(nodes) } + def test_interpolate_list_types(self): node1_exports = Exports({'exps': [ '${one}' ] }, SETTINGS, 'first') node1_parameters = Parameters({'alpha': [ '${two}', '${three}' ], 'one': 1, 'two': 2, 'three': 3 }, SETTINGS, 'first') @@ -174,33 +180,37 @@ def test_interpolate_list_types(self): node2_exports = Exports({'exps': '${alpha}' }, SETTINGS, 'second') node2_parameters = Parameters({}, SETTINGS, 'second') node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) - r = {'exps': [ 1, 2, 3 ]} + result = {'exps': [ 1, 2, 3 ]} node1_entity.merge(node2_entity) node1_entity.interpolate(None) self.assertIs(type(node1_entity.exports.as_dict()['exps']), list) - self.assertDictEqual(node1_entity.exports.as_dict(), r) + self.assertDictEqual(node1_entity.exports.as_dict(), result) def test_exports_with_refs(self): - inventory = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} + inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) node3_exports = Exports({'a': '${a}', 'b': '${b}'}, SETTINGS, '') node3_parameters = Parameters({'name': 'node3', 'a': '${c}', 'b': 5}, SETTINGS, '') node3_parameters.merge({'c': 3}) node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) node3_entity.interpolate_exports() - inventory['node3'] = node3_entity.exports.as_dict() - r = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 3, 'b': 5}} - self.assertDictEqual(inventory, r) + inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) + result = { 'node1': NodeInventory({'a': 1, 'b': 2}, True), + 'node2': NodeInventory({'a': 3, 'b': 4}, True), + 'node3': NodeInventory({'a': 3, 'b': 5}, True) } + self.assertDictEqual(inventory, result) def test_reference_to_an_export(self): - inventory = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} + inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) node3_exports = Exports({'a': '${a}', 'b': '${b}'}, SETTINGS, '') node3_parameters = Parameters({'name': 'node3', 'ref': '${exp}', 'a': '${c}', 'b': 5}, SETTINGS, '') node3_parameters.merge({'c': 3, 'exp': '$[ exports:a ]'}) node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) node3_entity.interpolate_exports() - inventory['node3'] = node3_entity.exports.as_dict() + inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) node3_entity.interpolate(inventory) - res_inv = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 3, 'b': 5}} + res_inv = { 'node1': NodeInventory({'a': 1, 'b': 2}, True), + 'node2': NodeInventory({'a': 3, 'b': 4}, True), + 'node3': NodeInventory({'a': 3, 'b': 5}, True) } res_params = {'a': 3, 'c': 3, 'b': 5, 'name': 'node3', 'exp': {'node1': 1, 'node3': 3, 'node2': 3}, 'ref': {'node1': 1, 'node3': 3, 'node2': 3}} self.assertDictEqual(node3_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) @@ -218,40 +228,61 @@ def test_exports_multiple_nodes(self): for p, q in queries: node1_entity.interpolate_single_export(q) node2_entity.interpolate_single_export(q) - res_inv = {'node1': {'a': {'test': 1}}, 'node2': {'a': {'test': 2}}} - res_params = {'a': {'test': 1}, 'b': 1, 'name': 'node1', 'exp': {'node1': {'test': 1}, 'node2': {'test': 2}}} - inventory = {} - inventory['node1'] = node1_entity.exports.as_dict() - inventory['node2'] = node2_entity.exports.as_dict() + res_inv = { 'node1': NodeInventory({'a': {'test': 1}}, True), + 'node2': NodeInventory({'a': {'test': 2}}, True) } + res_params = { 'name': 'node1', + 'a': {'test': 1}, + 'b': 1, + 'exp': {'node1': {'test': 1}, 'node2': {'test': 2}} } + inventory = self._make_inventory({'node1': node1_entity.exports.as_dict(), 'node2': node2_entity.exports.as_dict()}) node1_entity.interpolate(inventory) self.assertDictEqual(node1_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) def test_exports_with_ancestor_references(self): - inventory = {'node1': {'alpha' : {'beta': {'a': 1, 'b': 2}}}, 'node2': {'alpha' : {'beta': {'a': 3, 'b': 4}}}} + inventory = self._make_inventory({'node1': {'alpha' : {'beta': {'a': 1, 'b': 2}}}, 'node2': {'alpha' : {'beta': {'a': 3, 'b': 4}}}}) node3_exports = Exports({'alpha': '${alpha}'}, SETTINGS, '') node3_parameters = Parameters({'name': 'node3', 'alpha': {'beta' : {'a': 5, 'b': 6}}, 'exp': '$[ exports:alpha:beta ]'}, SETTINGS, '') node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) - res_params = {'exp': {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 5, 'b': 6}}, 'name': 'node3', 'alpha': {'beta': {'a': 5, 'b': 6}}} - res_inv = {'node1': {'alpha' : {'beta': {'a': 1, 'b': 2}}}, 'node2': {'alpha' : {'beta': {'a': 3, 'b': 4}}}, 'node3': {'alpha' : {'beta': {'a': 5, 'b': 6}}}} + res_params = { 'name': 'node3', + 'exp': {'node1': {'a': 1, 'b': 2}, + 'node2': {'a': 3, 'b': 4}, + 'node3': {'a': 5, 'b': 6}}, + 'alpha': {'beta': {'a': 5, 'b': 6}} } + res_inv = { 'node1': NodeInventory({'alpha' : {'beta': {'a': 1, 'b': 2}}}, True), + 'node2': NodeInventory({'alpha' : {'beta': {'a': 3, 'b': 4}}}, True), + 'node3': NodeInventory({'alpha' : {'beta': {'a': 5, 'b': 6}}}, True) } node3_entity.initialise_interpolation() queries = node3_entity.parameters.get_inv_queries() for p, q in queries: node3_entity.interpolate_single_export(q) - inventory['node3'] = node3_entity.exports.as_dict() + inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) node3_entity.interpolate(inventory) self.assertDictEqual(node3_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) def test_exports_with_nested_references(self): - inventory = {'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}} + inventory = self._make_inventory({'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}}) node3_exports = Exports({'alpha': '${alpha}'}, SETTINGS, '') - node3_parameters = Parameters({'name': 'node3', 'alpha': {'a': '${one}', 'b': '${two}'}, 'beta': '$[ exports:alpha ]', 'one': '111', 'two': '${three}', 'three': '123'}, SETTINGS, '') + node3_parameters = Parameters({ 'name': 'node3', + 'alpha': {'a': '${one}', 'b': '${two}'}, + 'beta': '$[ exports:alpha ]', + 'one': '111', + 'two': '${three}', + 'three': '123'}, + SETTINGS, '') node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) - res_params = {'beta': {'node1': {'a': 1, 'b': 2}, 'node3': {'a': '111', 'b': '123'}, 'node2': {'a': 3, 'b': 4}}, 'name': 'node3', 'alpha': {'a': '111', 'b': '123'}, 'three': '123', 'two': '123', 'one': '111'} - res_inv = {'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}, 'node3': {'alpha': {'a': '111', 'b': '123'}}} + res_params = { 'name': 'node3', + 'alpha': { 'a': '111', 'b': '123' }, + 'beta': { 'node1': {'a': 1, 'b': 2 }, 'node2': { 'a': 3, 'b': 4}, 'node3': { 'a': '111', 'b': '123' } }, + 'one': '111', + 'two': '123', + 'three': '123' } + res_inv = { 'node1': NodeInventory({'alpha': {'a': 1, 'b': 2}}, True), + 'node2': NodeInventory({'alpha': {'a': 3, 'b': 4}}, True), + 'node3': NodeInventory({'alpha': {'a': '111', 'b': '123'}}, True) } node3_entity.interpolate_exports() - inventory['node3'] = node3_entity.exports.as_dict() + inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) node3_entity.interpolate(inventory) self.assertDictEqual(node3_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) @@ -285,11 +316,12 @@ def test_exports_failed_render_ignore(self): for p, q in queries: node1_entity.interpolate_single_export(q) node2_entity.interpolate_single_export(q) - res_inv = {'node1': {'a': 1}, 'node2': {}} - res_params = { 'a': 1, 'name': 'node1', 'exp': {'node1': 1} } - inventory = {} - inventory['node1'] = node1_entity.exports.as_dict() - inventory['node2'] = node2_entity.exports.as_dict() + res_inv = { 'node1': NodeInventory({'a': 1}, True), + 'node2': NodeInventory({}, True) } + res_params = { 'name': 'node1', + 'a': 1, + 'exp': {'node1': 1} } + inventory = self._make_inventory({'node1': node1_entity.exports.as_dict(), 'node2': node2_entity.exports.as_dict()}) node1_entity.interpolate(inventory) self.assertDictEqual(node1_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) diff --git a/reclass/datatypes/tests/test_exports.py b/reclass/datatypes/tests/test_exports.py index 16a45cb4..e0c5cc12 100644 --- a/reclass/datatypes/tests/test_exports.py +++ b/reclass/datatypes/tests/test_exports.py @@ -8,33 +8,39 @@ from __future__ import print_function from __future__ import unicode_literals +from six import iteritems + from reclass.utils.parameterdict import ParameterDict from reclass.utils.parameterlist import ParameterList from reclass.settings import Settings from reclass.datatypes import Exports, Parameters from reclass.errors import ParseError +from reclass.values import NodeInventory import unittest SETTINGS = Settings() class TestInvQuery(unittest.TestCase): + def _make_inventory(self, nodes): + return { name: NodeInventory(node, True) for name, node in iteritems(nodes) } + def test_overwrite_method(self): - e = Exports({'alpha': { 'one': 1, 'two': 2}}, SETTINGS, '') - d = {'alpha': { 'three': 3, 'four': 4}} - e.overwrite(d) - e.interpolate() - self.assertEqual(e.as_dict(), d) + exports = Exports({'alpha': { 'one': 1, 'two': 2}}, SETTINGS, '') + data = {'alpha': { 'three': 3, 'four': 4}} + exports.overwrite(data) + exports.interpolate() + self.assertEqual(exports.as_dict(), data) def test_interpolate_types(self): - e = Exports({'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]}, SETTINGS, '') - r = {'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]} - self.assertIs(type(e.as_dict()['alpha']), ParameterDict) - self.assertIs(type(e.as_dict()['beta']), ParameterList) - e.interpolate() - self.assertIs(type(e.as_dict()['alpha']), dict) - self.assertIs(type(e.as_dict()['beta']), list) - self.assertEqual(e.as_dict(), r) + exports = Exports({'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]}, SETTINGS, '') + result = {'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]} + self.assertIs(type(exports.as_dict()['alpha']), ParameterDict) + self.assertIs(type(exports.as_dict()['beta']), ParameterList) + exports.interpolate() + self.assertIs(type(exports.as_dict()['alpha']), dict) + self.assertIs(type(exports.as_dict()['beta']), list) + self.assertEqual(exports.as_dict(), result) def test_malformed_invquery(self): with self.assertRaises(ParseError): @@ -51,83 +57,121 @@ def test_malformed_invquery(self): p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value anddd exports:c == self:test_value2 ]'}, SETTINGS, '') def test_value_expr_invquery(self): - e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} - p = Parameters({'exp': '$[ exports:a ]'}, SETTINGS, '') - r = {'exp': {'node1': 1, 'node2': 3}} - p.interpolate(e) - self.assertEqual(p.as_dict(), r) + inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) + parameters = Parameters({'exp': '$[ exports:a ]'}, SETTINGS, '') + result = {'exp': {'node1': 1, 'node2': 3}} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery(self): - e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} - p = Parameters({'exp': '$[ exports:a if exports:b == 4 ]'}, SETTINGS, '') - r = {'exp': {'node2': 3}} - p.interpolate(e) - self.assertEqual(p.as_dict(), r) + inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) + parameters = Parameters({'exp': '$[ exports:a if exports:b == 4 ]'}, SETTINGS, '') + result = {'exp': {'node2': 3}} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery_with_refs(self): - e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}} - p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value ]', 'test_value': 2}, SETTINGS, '') - r = {'exp': {'node1': 1}, 'test_value': 2} - p.interpolate(e) - self.assertEqual(p.as_dict(), r) + inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) + parameters = Parameters({'exp': '$[ exports:a if exports:b == self:test_value ]', 'test_value': 2}, SETTINGS, '') + result = {'exp': {'node1': 1}, 'test_value': 2} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_list_if_expr_invquery(self): - e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2}} - p = Parameters({'exp': '$[ if exports:b == 2 ]'}, SETTINGS, '') - r1 = {'exp': ['node1', 'node3']} - r2 = {'exp': ['node3', 'node1']} - p.interpolate(e) - self.assertIn(p.as_dict(), [ r1, r2 ]) + inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2}}) + parameters = Parameters({'exp': '$[ if exports:b == 2 ]'}, SETTINGS, '') + result = {'exp': ['node1', 'node3']} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery_wth_and(self): - e = {'node1': {'a': 1, 'b': 4, 'c': False}, 'node2': {'a': 3, 'b': 4, 'c': True}} - p = Parameters({'exp': '$[ exports:a if exports:b == 4 and exports:c == True ]'}, SETTINGS, '') - r = {'exp': {'node2': 3}} - p.interpolate(e) - self.assertEqual(p.as_dict(), r) + inventory = self._make_inventory({'node1': {'a': 1, 'b': 4, 'c': False}, 'node2': {'a': 3, 'b': 4, 'c': True}}) + parameters = Parameters({'exp': '$[ exports:a if exports:b == 4 and exports:c == True ]'}, SETTINGS, '') + result = {'exp': {'node2': 3}} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery_wth_or(self): - e = {'node1': {'a': 1, 'b': 4}, 'node2': {'a': 3, 'b': 3}} - p = Parameters({'exp': '$[ exports:a if exports:b == 4 or exports:b == 3 ]'}, SETTINGS, '') - r = {'exp': {'node1': 1, 'node2': 3}} - p.interpolate(e) - self.assertEqual(p.as_dict(), r) + inventory = self._make_inventory({'node1': {'a': 1, 'b': 4}, 'node2': {'a': 3, 'b': 3}}) + parameters = Parameters({'exp': '$[ exports:a if exports:b == 4 or exports:b == 3 ]'}, SETTINGS, '') + result = {'exp': {'node1': 1, 'node2': 3}} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_list_if_expr_invquery_with_and(self): - e = {'node1': {'a': 1, 'b': 2, 'c': 'green'}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2, 'c': 'red'}} - p = Parameters({'exp': '$[ if exports:b == 2 and exports:c == green ]'}, SETTINGS, '') - r = {'exp': ['node1']} - p.interpolate(e) - self.assertEqual(p.as_dict(), r) + inventory = self._make_inventory( + { 'node1': {'a': 1, 'b': 2, 'c': 'green'}, + 'node2': {'a': 3, 'b': 3}, + 'node3': {'a': 3, 'b': 2, 'c': 'red'} }) + parameters = Parameters({'exp': '$[ if exports:b == 2 and exports:c == green ]'}, SETTINGS, '') + result = {'exp': ['node1']} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_list_if_expr_invquery_with_and_missing(self): - inventory = {'node1': {'a': 1, 'b': 2, 'c': 'green'}, - 'node2': {'a': 3, 'b': 3}, - 'node3': {'a': 3, 'b': 2}} + inventory = self._make_inventory({'node1': {'a': 1, 'b': 2, 'c': 'green'}, + 'node2': {'a': 3, 'b': 3}, + 'node3': {'a': 3, 'b': 2}}) mapping = {'exp': '$[ if exports:b == 2 and exports:c == green ]'} expected = {'exp': ['node1']} - - pars = Parameters(mapping, SETTINGS, '') - pars.interpolate(inventory) - - self.assertEqual(pars.as_dict(), expected) - - def test_list_if_expr_invquery_with_and(self): - e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 4}} - p = Parameters({'exp': '$[ if exports:b == 2 or exports:b == 4 ]'}, SETTINGS, '') - r1 = {'exp': ['node1', 'node3']} - r2 = {'exp': ['node3', 'node1']} - p.interpolate(e) - self.assertIn(p.as_dict(), [ r1, r2 ]) + parameterss = Parameters(mapping, SETTINGS, '') + parameterss.interpolate(inventory) + self.assertEqual(parameterss.as_dict(), expected) + + def test_list_if_expr_invquery_with_or(self): + inventory = self._make_inventory( + { 'node1': {'a': 1, 'b': 2}, + 'node2': {'a': 3, 'b': 3}, + 'node3': {'a': 3, 'b': 4} }) + parameters = Parameters({'exp': '$[ if exports:b == 2 or exports:b == 4 ]'}, SETTINGS, '') + result = {'exp': ['node1', 'node3']} + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) def test_merging_inv_queries(self): - e = {'node1': {'a': 1}, 'node2': {'a': 1}, 'node3': {'a': 2}} - p1 = Parameters({'exp': '$[ if exports:a == 1 ]'}, SETTINGS, '') - p2 = Parameters({'exp': '$[ if exports:a == 2 ]'}, SETTINGS, '') - r = { 'exp': [ 'node1', 'node2', 'node3' ] } - p1.merge(p2) - p1.interpolate(e) - self.assertEqual(p1.as_dict(), r) + inventory = self._make_inventory({'node1': {'a': 1}, 'node2': {'a': 1}, 'node3': {'a': 2}}) + pars1 = Parameters({'exp': '$[ if exports:a == 1 ]'}, SETTINGS, '') + pars2 = Parameters({'exp': '$[ if exports:a == 2 ]'}, SETTINGS, '') + result = { 'exp': [ 'node1', 'node2', 'node3' ] } + pars1.merge(pars2) + pars1.interpolate(inventory) + self.assertEqual(pars1.as_dict(), result) + + def test_same_expr_invquery_different_flags(self): + inventory = { 'node1': NodeInventory({'a': 1}, True), + 'node2': NodeInventory({'a': 2}, True), + 'node3': NodeInventory({'a': 3}, False) } + parameters = Parameters({'alpha': '$[ exports:a ]', 'beta': '$[ +AllEnvs exports:a ]'}, SETTINGS, '') + result = { 'alpha': { 'node1': 1, 'node2': 2 }, + 'beta': { 'node1': 1 , 'node2': 2, 'node3': 3 } } + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) + + def test_same_if_expr_invquery_different_flags(self): + inventory = { 'node1': NodeInventory({'a': 1, 'b': 1}, True), + 'node2': NodeInventory({'a': 2, 'b': 2}, True), + 'node3': NodeInventory({'a': 3, 'b': 2}, False) } + parameters = Parameters( + { 'alpha': '$[ exports:a if exports:b == 2 ]', + 'beta': '$[ +AllEnvs exports:a if exports:b == 2]' }, + SETTINGS, '') + result = { 'alpha': { 'node2': 2 }, + 'beta': { 'node2': 2, 'node3': 3 } } + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) + + def test_same_list_if_expr_invquery_different_flags(self): + inventory = { 'node1': NodeInventory({'a': 1}, True), + 'node2': NodeInventory({'a': 2}, True), + 'node3': NodeInventory({'a': 2}, False) } + parameters = Parameters( + { 'alpha': '$[ if exports:a == 2 ]', + 'beta': '$[ +AllEnvs if exports:a == 2]' }, + SETTINGS, '') + result = { 'alpha': [ 'node2' ], + 'beta': [ 'node2', 'node3' ] } + parameters.interpolate(inventory) + self.assertEqual(parameters.as_dict(), result) if __name__ == '__main__': unittest.main() diff --git a/reclass/values/__init__.py b/reclass/values/__init__.py index ec0f8822..0458d34e 100644 --- a/reclass/values/__init__.py +++ b/reclass/values/__init__.py @@ -3,3 +3,7 @@ from __future__ import division from __future__ import print_function from __future__ import unicode_literals + +import collections + +NodeInventory = collections.namedtuple('NodeInventory', ['items', 'env_matches'], rename=False) diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py index adb1cb6c..d8f38749 100644 --- a/reclass/values/invitem.py +++ b/reclass/values/invitem.py @@ -16,8 +16,7 @@ from six import iteritems from six import string_types -from reclass.values import item -from reclass.values import parser_funcs +from reclass.values import item, parser_funcs from reclass.settings import Settings from reclass.utils.dictpath import DictPath from reclass.errors import ExpressionError, ParseError, ResolveError @@ -200,10 +199,11 @@ def _resolve(self, path, dictionary): def _value_expression(self, inventory): results = {} - for (node, items) in iteritems(inventory): - if self._value_path.exists_in(items): - results[node] = copy.deepcopy(self._resolve(self._value_path, - items)) + for name, node in iteritems(inventory): + if self.needs_all_envs or node.env_matches: + if self._value_path.exists_in(node.items): + answer = self._resolve(self._value_path, node.items) + results[name] = copy.deepcopy(answer) return results def _test_expression(self, context, inventory): @@ -212,18 +212,21 @@ def _test_expression(self, context, inventory): raise ExpressionError(msg % str(self), tbFlag=False) results = {} - for node, items in iteritems(inventory): - if (self._question.value(context, items) and - self._value_path.exists_in(items)): - results[node] = copy.deepcopy( - self._resolve(self._value_path, items)) + for name, node in iteritems(inventory): + if self.needs_all_envs or node.env_matches: + if (self._question.value(context, node.items) and + self._value_path.exists_in(node.items)): + answer = self._resolve(self._value_path, node.items) + results[name] = copy.deepcopy(answer) return results def _list_test_expression(self, context, inventory): results = [] - for (node, items) in iteritems(inventory): - if self._question.value(context, items): - results.append(node) + for name, node in iteritems(inventory): + if self.needs_all_envs or node.env_matches: + if self._question.value(context, node.items): + results.append(name) + results.sort() return results def render(self, context, inventory): From f7e3e948b60f800c1879950f6cef470c58c0f797 Mon Sep 17 00:00:00 2001 From: Bruno Binet Date: Thu, 2 Apr 2020 16:46:21 +0200 Subject: [PATCH 58/63] Add support to use current node parameters as references in class name so that with the following reclass config: ``` => /etc/reclass/nodes/mynode.yml classes: - common parameters: project: myproject => /etc/reclass/classes/common.yml class: - ${project} => /etc/reclass/classes/myproject.yml parameters: some: project: parameters ``` The mynode parameters would be merged successfully by reclass and we will get the following result for the parameters: ``` parameters: project: myproject some: project: parameters ``` Without this patch, this would result in the following error: ``` Failed to load ext_pillar reclass: ext_pillar.reclass: -> mynode In yaml_fs:///etc/reclass/classes/common.yml Class name ${project} not resolvable ``` --- reclass/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/reclass/core.py b/reclass/core.py index 61443c2e..67b0f92a 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -229,6 +229,7 @@ def _node_entity(self, nodename): base_entity.merge(self._get_class_mappings_entity(node_entity)) base_entity.merge(self._get_input_data_entity()) base_entity.merge_parameters(self._get_automatic_parameters(nodename, node_entity.environment)) + base_entity.merge_parameters(Parameters(node_entity.parameters, self._settings, '')) seen = {} merge_base = self._recurse_entity(base_entity, seen=seen, nodename=nodename, environment=node_entity.environment) From 5656186465125f8974419f76ce1cbf4d787d8ac9 Mon Sep 17 00:00:00 2001 From: Bruno Binet Date: Wed, 6 May 2020 16:29:42 +0200 Subject: [PATCH 59/63] Only use scalar parameters as references in class name so that we avoid side effect of list elements being merged twice in the same list. This commit adds a new "scalar_parameters" reclass config option (defaults to False) used to indicate where to find scalar parameters in node parameters dict. --- reclass/core.py | 11 ++++++++++- reclass/defaults.py | 1 + reclass/settings.py | 1 + 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/reclass/core.py b/reclass/core.py index 67b0f92a..f35450f4 100644 --- a/reclass/core.py +++ b/reclass/core.py @@ -172,6 +172,15 @@ def _get_automatic_parameters(self, nodename, environment): else: return Parameters({}, self._settings, '') + def _get_scalar_parameters(self, node_parameters): + if self._settings.scalar_parameters: + scalars = node_parameters.as_dict().get( + self._settings.scalar_parameters, {}) + return Parameters( + {self._settings.scalar_parameters: scalars}, self._settings, '__scalar__') + else: + return Parameters({}, self._settings, '') + def _get_inventory(self, all_envs, environment, queries): ''' Returns a dictionary of NodeInventory objects, one per matching node. Exports @@ -229,7 +238,7 @@ def _node_entity(self, nodename): base_entity.merge(self._get_class_mappings_entity(node_entity)) base_entity.merge(self._get_input_data_entity()) base_entity.merge_parameters(self._get_automatic_parameters(nodename, node_entity.environment)) - base_entity.merge_parameters(Parameters(node_entity.parameters, self._settings, '')) + base_entity.merge_parameters(self._get_scalar_parameters(node_entity.parameters)) seen = {} merge_base = self._recurse_entity(base_entity, seen=seen, nodename=nodename, environment=node_entity.environment) diff --git a/reclass/defaults.py b/reclass/defaults.py index f50a8ad5..7ea797e5 100644 --- a/reclass/defaults.py +++ b/reclass/defaults.py @@ -56,6 +56,7 @@ ESCAPE_CHARACTER = '\\' AUTOMATIC_RECLASS_PARAMETERS = True +SCALAR_RECLASS_PARAMETERS = False DEFAULT_ENVIRONMENT = 'base' CLASS_MAPPINGS_MATCH_PATH = False diff --git a/reclass/settings.py b/reclass/settings.py index e9e8a36f..08fdea50 100644 --- a/reclass/settings.py +++ b/reclass/settings.py @@ -20,6 +20,7 @@ class Settings(object): 'allow_none_override': defaults.OPT_ALLOW_NONE_OVERRIDE, 'automatic_parameters': defaults.AUTOMATIC_RECLASS_PARAMETERS, 'class_mappings_match_path': defaults.CLASS_MAPPINGS_MATCH_PATH, + 'scalar_parameters': defaults.SCALAR_RECLASS_PARAMETERS, 'default_environment': defaults.DEFAULT_ENVIRONMENT, 'delimiter': defaults.PARAMETER_INTERPOLATION_DELIMITER, 'dict_key_override_prefix': From 7673374fdf2d1d7276104d885ab5c0ead24d532f Mon Sep 17 00:00:00 2001 From: Petr Michalec Date: Fri, 2 Oct 2020 17:59:25 +0200 Subject: [PATCH 60/63] bump version 1.7.0 Signed-off-by: Petr Michalec --- README-extensions.rst | 35 +++++++++++++++++++++++++++++++++++ doc/source/changelog.rst | 4 ++++ reclass/version.py | 2 +- 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/README-extensions.rst b/README-extensions.rst index da9ce85b..7b537cfe 100644 --- a/README-extensions.rst +++ b/README-extensions.rst @@ -691,3 +691,38 @@ classes for the pre-prod environment to use a directory on the local disc: storage_type: yaml_fs # options for yaml_fs storage type uri: /srv/salt/env/pre-prod/classes + + +Support to use current node parameters as references in class name +------------------------------------------------------------------ + +With the following reclass config: + +.. code-block:: + + => /etc/reclass/nodes/mynode.yml + classes: + - common + parameters: + project: myproject + + => /etc/reclass/classes/common.yml + class: + - ${project} + + => /etc/reclass/classes/myproject.yml + parameters: + some: + project: parameters + + +Will get the following result for the parameters: + +.. code-block:: yaml + + parameters: + project: myproject + some: + project: parameters + + diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index dccf34fe..249b0a4a 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -5,6 +5,10 @@ ChangeLog ========= ========== ======================================================== Version Date Changes ========= ========== ======================================================== +1.7.0 2020-10-02 Fixes and few new features: + * Allow class mappings to wildcard match against either the node name and class + * Support for .yaml along with .yml + * Support to use current node parameters as references in class name 1.6.0 2018-11-06 * Python code and parser refactoring by a-ovchinnikov * Improvements in yaml_git and mixed setup by Andrew Pickford * Relative paths in class names by Petr Michalec, Martin Polreich and Andrew Pickford diff --git a/reclass/version.py b/reclass/version.py index 5a40c2ed..70774184 100644 --- a/reclass/version.py +++ b/reclass/version.py @@ -14,7 +14,7 @@ RECLASS_NAME = 'reclass' DESCRIPTION = ('merge data by recursive descent down an ancestry hierarchy ' '(forked extended version)') -VERSION = '1.6.0' +VERSION = '1.7.0' AUTHOR = 'martin f. krafft / Andrew Pickford / salt-formulas community' AUTHOR_EMAIL = 'salt-formulas@freelists.org' MAINTAINER = 'salt-formulas community' From 52b97e3a35ff82ed4aedeb2bab70cfe68d997ab7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julien=20Pervill=C3=A9?= Date: Thu, 26 Nov 2020 14:23:43 +0100 Subject: [PATCH 61/63] Fix running 'python run_tests.py' without mock installed systemwide --- reclass/values/tests/test_item.py | 9 ++++++--- reclass/values/tests/test_refitem.py | 9 ++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/reclass/values/tests/test_item.py b/reclass/values/tests/test_item.py index 4b91f6e6..e2565ef3 100644 --- a/reclass/values/tests/test_item.py +++ b/reclass/values/tests/test_item.py @@ -8,7 +8,10 @@ from reclass.values.item import ContainerItem from reclass.values.item import ItemWithReferences import unittest -from mock import MagicMock +try: + import unittest.mock as mock +except ImportError: + import mock SETTINGS = Settings() @@ -16,7 +19,7 @@ class TestItemWithReferences(unittest.TestCase): def test_assembleRef_allrefs(self): - phonyitem = MagicMock() + phonyitem = mock.MagicMock() phonyitem.has_references = True phonyitem.get_references = lambda *x: [1] @@ -26,7 +29,7 @@ def test_assembleRef_allrefs(self): self.assertTrue(iwr.allRefs) def test_assembleRef_partial(self): - phonyitem = MagicMock() + phonyitem = mock.MagicMock() phonyitem.has_references = True phonyitem.allRefs = False phonyitem.get_references = lambda *x: [1] diff --git a/reclass/values/tests/test_refitem.py b/reclass/values/tests/test_refitem.py index 65814782..09887bac 100644 --- a/reclass/values/tests/test_refitem.py +++ b/reclass/values/tests/test_refitem.py @@ -9,14 +9,17 @@ from reclass.values.dictitem import DictItem from reclass.values.refitem import RefItem import unittest -from mock import MagicMock +try: + import unittest.mock as mock +except ImportError: + import mock SETTINGS = Settings() class TestRefItem(unittest.TestCase): def test_assembleRefs_ok(self): - phonyitem = MagicMock() + phonyitem = mock.MagicMock() phonyitem.render = lambda x, k: 'bar' phonyitem.has_references = True phonyitem.get_references = lambda *x: ['foo'] @@ -27,7 +30,7 @@ def test_assembleRefs_ok(self): self.assertTrue(iwr.allRefs) def test_assembleRefs_failedrefs(self): - phonyitem = MagicMock() + phonyitem = mock.MagicMock() phonyitem.render.side_effect = errors.ResolveError('foo') phonyitem.has_references = True phonyitem.get_references = lambda *x: ['foo'] From 7e58d031f7c04da95e2e60ff4dbe67d58be9d890 Mon Sep 17 00:00:00 2001 From: Alexander Sulfrian Date: Thu, 21 Jan 2021 16:26:20 +0100 Subject: [PATCH 62/63] Do not require enum34 with Python 3.4 and above enum34 is a backport of the enum library of Python 3.4 to earlier Python versions, so enum34 is not required for Python 3.4 and above and should not be installed. --- requirements.txt | 2 +- setup.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 5f6aed18..5a81c5e6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ pyparsing pyyaml six -enum34 +enum34 ; python_version<'3.4' ddt diff --git a/setup.py b/setup.py index ab23207f..87051272 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,10 @@ url = URL, packages = find_packages(exclude=['*tests']), #FIXME validate this entry_points = { 'console_scripts': console_scripts }, - install_requires = ['pyparsing', 'pyyaml', 'six', 'enum34', 'ddt'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) + install_requires = ['pyparsing', 'pyyaml', 'six', 'ddt'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) + extras_require = { + ":python_version<'3.4'": ['enum34'], + }, classifiers=[ 'Development Status :: 4 - Beta', From 50bebe4888cb3e838c0ae552f27cece9d4ec1256 Mon Sep 17 00:00:00 2001 From: Reto Gantenbein Date: Fri, 15 Jul 2022 23:27:37 +0200 Subject: [PATCH 63/63] Add support for collections.abc in Python >=3.8 --- reclass/values/parser_funcs.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/reclass/values/parser_funcs.py b/reclass/values/parser_funcs.py index db34cebc..21952caf 100644 --- a/reclass/values/parser_funcs.py +++ b/reclass/values/parser_funcs.py @@ -14,6 +14,11 @@ import pyparsing as pp import six +try: + collectionsAbc = collections.abc +except AttributeError: + collectionsAbc = collections + tags = enum.Enum('Tags', ['STR', 'REF', 'INV']) _OBJ = 'OBJ' @@ -49,7 +54,7 @@ def _asList(x): return x def listify(w, modifier=_asList): - if (isinstance(w, collections.Iterable) and + if (isinstance(w, collectionsAbc.Iterable) and not isinstance(w, six.string_types)): cls = type(w) if cls == pp.ParseResults: