|
| 1 | +#!/usr/bin/env python3 |
| 2 | +# |
| 3 | +# Copyright (c) 2017, Linaro Limited |
| 4 | +# Copyright (c) 2018, Bobby Noelte |
| 5 | +# |
| 6 | +# SPDX-License-Identifier: Apache-2.0 |
| 7 | +# |
| 8 | + |
| 9 | +import os, fnmatch |
| 10 | +import re |
| 11 | +import yaml |
| 12 | +from collections.abc import Mapping |
| 13 | + |
| 14 | +from pathlib import Path |
| 15 | + |
| 16 | +class Binder(yaml.Loader): |
| 17 | + |
| 18 | + ## |
| 19 | + # List of all yaml files available for yaml loaders |
| 20 | + # of this class. Must be preset before the first |
| 21 | + # load operation. |
| 22 | + _files = [] |
| 23 | + |
| 24 | + ## |
| 25 | + # Files that are already included. |
| 26 | + # Must be reset on the load of every new binding |
| 27 | + _included = [] |
| 28 | + |
| 29 | + @classmethod |
| 30 | + def _merge_binding_dicts(cls, parent, fname, dct, merge_dct): |
| 31 | + # from https://gist.github.com/angstwad/bf22d1822c38a92ec0a9 |
| 32 | + |
| 33 | + """ Recursive dict merge. Inspired by :meth:``dict.update()``, instead of |
| 34 | + updating only top-level keys, _merge_binding_dicts recurses down into dicts nested |
| 35 | + to an arbitrary depth, updating keys. The ``merge_dct`` is merged into |
| 36 | + ``dct``. |
| 37 | + :param parent: parent tuple key |
| 38 | + :param fname: yaml file being processed |
| 39 | + :param dct: dict onto which the merge is executed |
| 40 | + :param merge_dct: dct merged into dct |
| 41 | + :return: None |
| 42 | + """ |
| 43 | + for k, v in merge_dct.items(): |
| 44 | + if (k in dct and isinstance(dct[k], dict) |
| 45 | + and isinstance(merge_dct[k], Mapping)): |
| 46 | + Binder._merge_binding_dicts(k, fname, dct[k], merge_dct[k]) |
| 47 | + else: |
| 48 | + if k in dct and dct[k] != merge_dct[k]: |
| 49 | + # type, title, description, version of inherited node |
| 50 | + # are overwritten by intention. |
| 51 | + if k == 'type': |
| 52 | + # collect types |
| 53 | + if not isinstance(dct['type'], list): |
| 54 | + dct['type'] = [dct['type'],] |
| 55 | + if isinstance(merge_dct['type'], list): |
| 56 | + dct['type'].extend(merge_dct['type']) |
| 57 | + else: |
| 58 | + dct['type'].append(merge_dct['type']) |
| 59 | + continue |
| 60 | + elif k in ("title", "version", "description"): |
| 61 | + # do not warn |
| 62 | + pass |
| 63 | + elif (k == "category") and (merge_dct[k] == "required") \ |
| 64 | + and (dct[k] == "optional"): |
| 65 | + # do not warn |
| 66 | + pass |
| 67 | + else: |
| 68 | + print("binder.py: {}('{}') merge of property '{}': " |
| 69 | + "'{}' overwrites '{}'." |
| 70 | + .format(fname, parent, k, merge_dct[k], dct[k])) |
| 71 | + dct[k] = merge_dct[k] |
| 72 | + |
| 73 | + @classmethod |
| 74 | + def _traverse_inherited(cls, fname, node): |
| 75 | + """ Recursive overload procedure inside ``node`` |
| 76 | + ``inherits`` section is searched for and used as node base when found. |
| 77 | + Base values are then overloaded by node values |
| 78 | + and some consistency checks are done. |
| 79 | + :param fname: initial yaml file being processed |
| 80 | + :param node: |
| 81 | + :return: node |
| 82 | + """ |
| 83 | + |
| 84 | + # do some consistency checks. Especially id is needed for further |
| 85 | + # processing. title must be first to check. |
| 86 | + if 'title' not in node: |
| 87 | + # If 'title' is missing, make fault finding more easy. |
| 88 | + # Give a hint what node we are looking at. |
| 89 | + print("binder.py: {} node without 'title' -", fname, node) |
| 90 | + for prop in ('title', 'version', 'description'): |
| 91 | + if prop not in node: |
| 92 | + node[prop] = "<unknown {}>".format(prop) |
| 93 | + print("binder.py: {} WARNING:".format(fname), |
| 94 | + "'{}' property missing in".format(prop), |
| 95 | + "'{}' binding. Using '{}'.".format(node['title'], |
| 96 | + node[prop])) |
| 97 | + |
| 98 | + # warn if we have an 'id' field |
| 99 | + if 'id' in node: |
| 100 | + print("binder.py: {} WARNING: id field set".format(fname), |
| 101 | + "in '{}', should be removed.".format(node['title'])) |
| 102 | + |
| 103 | + if 'inherits' in node: |
| 104 | + if isinstance(node['inherits'], list): |
| 105 | + inherits_list = node['inherits'] |
| 106 | + else: |
| 107 | + inherits_list = [node['inherits'],] |
| 108 | + node.pop('inherits') |
| 109 | + for inherits in inherits_list: |
| 110 | + if 'inherits' in inherits: |
| 111 | + inherits = cls._traverse_inherited(fname, inherits) |
| 112 | + cls._merge_binding_dicts(None, fname, inherits, node) |
| 113 | + node = inherits |
| 114 | + return node |
| 115 | + |
| 116 | + ## |
| 117 | + # @brief Get bindings for given compatibles. |
| 118 | + # |
| 119 | + # @param compatibles |
| 120 | + # @param bindings_paths directories to search for binding files |
| 121 | + # @return dictionary of bindings found |
| 122 | + @classmethod |
| 123 | + def bindings(cls, compatibles, bindings_paths): |
| 124 | + # find unique set of compatibles across all active nodes |
| 125 | + s = set() |
| 126 | + for k, v in compatibles.items(): |
| 127 | + if isinstance(v, list): |
| 128 | + for item in v: |
| 129 | + s.add(item) |
| 130 | + else: |
| 131 | + s.add(v) |
| 132 | + |
| 133 | + # scan YAML files and find the ones we are interested in |
| 134 | + # We add our own bindings directory first (highest priority) |
| 135 | + # We only allow one binding file with the same name |
| 136 | + bindings_paths.insert(0, Path(Path(__file__).resolve().parent, |
| 137 | + 'bindings')) |
| 138 | + cls._files = [] |
| 139 | + binding_files = [] |
| 140 | + for path in bindings_paths: |
| 141 | + for root, dirnames, filenames in os.walk(str(path)): |
| 142 | + for filename in fnmatch.filter(filenames, '*.yaml'): |
| 143 | + if not filename in binding_files: |
| 144 | + binding_files.append(filename) |
| 145 | + cls._files.append(os.path.join(root, filename)) |
| 146 | + |
| 147 | + bindings_list = {} |
| 148 | + file_load_list = set() |
| 149 | + for file in cls._files: |
| 150 | + for line in open(file, 'r', encoding='utf-8'): |
| 151 | + if re.search('^\s+constraint:*', line): |
| 152 | + c = line.split(':')[1].strip() |
| 153 | + c = c.strip('"') |
| 154 | + if c in s: |
| 155 | + if file not in file_load_list: |
| 156 | + file_load_list.add(file) |
| 157 | + with open(file, 'r', encoding='utf-8') as yf: |
| 158 | + cls._included = [] |
| 159 | + # collapse the bindings inherited information before return |
| 160 | + bindings_list[c] = cls._traverse_inherited(file, yaml.load(yf, cls)) |
| 161 | + |
| 162 | + return bindings_list |
| 163 | + |
| 164 | + def __init__(self, stream): |
| 165 | + filepath = os.path.realpath(stream.name) |
| 166 | + if filepath in self._included: |
| 167 | + print("Error:: circular inclusion for file name '{}'". |
| 168 | + format(stream.name)) |
| 169 | + raise yaml.constructor.ConstructorError |
| 170 | + self._included.append(filepath) |
| 171 | + super(Binder, self).__init__(stream) |
| 172 | + Binder.add_constructor('!include', Binder._include) |
| 173 | + Binder.add_constructor('!import', Binder._include) |
| 174 | + |
| 175 | + def _include(self, node): |
| 176 | + if isinstance(node, yaml.ScalarNode): |
| 177 | + return self._extract_file(self.construct_scalar(node)) |
| 178 | + |
| 179 | + elif isinstance(node, yaml.SequenceNode): |
| 180 | + result = [] |
| 181 | + for filename in self.construct_sequence(node): |
| 182 | + result.append(self._extract_file(filename)) |
| 183 | + return result |
| 184 | + |
| 185 | + elif isinstance(node, yaml.MappingNode): |
| 186 | + result = {} |
| 187 | + for k, v in self.construct_mapping(node).iteritems(): |
| 188 | + result[k] = self._extract_file(v) |
| 189 | + return result |
| 190 | + |
| 191 | + else: |
| 192 | + print("Error: unrecognised node type in !include statement") |
| 193 | + raise yaml.constructor.ConstructorError |
| 194 | + |
| 195 | + def _extract_file(self, filename): |
| 196 | + filepaths = [filepath for filepath in self._files |
| 197 | + if os.path.basename(filepath) == filename] |
| 198 | + if len(filepaths) == 0: |
| 199 | + print("Error: unknown file name '{}' in !include statement". |
| 200 | + format(filename)) |
| 201 | + raise yaml.constructor.ConstructorError |
| 202 | + elif len(filepaths) > 1: |
| 203 | + # multiple candidates for filename |
| 204 | + print("Warning: multiple candidates for file name " |
| 205 | + "'{}' in !include statement - using first of". |
| 206 | + format(filename), filepaths) |
| 207 | + with open(filepaths[0], 'r', encoding='utf-8') as f: |
| 208 | + return yaml.load(f, Binder) |
0 commit comments