1# SPDX-License-Identifier: GPL-2.0+
2# Copyright (c) 2016 Google, Inc
3#
4# Base class for all entries
5#
6
7from collections import namedtuple
8import importlib
9import os
10import pathlib
11import sys
12import time
13
14from binman import bintool
15from binman import elf
16from dtoc import fdt_util
17from u_boot_pylib import tools
18from u_boot_pylib.tools import to_hex, to_hex_size
19from u_boot_pylib import tout
20
21modules = {}
22
23# This is imported if needed
24state = None
25
26# An argument which can be passed to entries on the command line, in lieu of
27# device-tree properties.
28EntryArg = namedtuple('EntryArg', ['name', 'datatype'])
29
30# Information about an entry for use when displaying summaries
31EntryInfo = namedtuple('EntryInfo', ['indent', 'name', 'etype', 'size',
32                                     'image_pos', 'uncomp_size', 'offset',
33                                     'entry'])
34
35class Entry(object):
36    """An Entry in the section
37
38    An entry corresponds to a single node in the device-tree description
39    of the section. Each entry ends up being a part of the final section.
40    Entries can be placed either right next to each other, or with padding
41    between them. The type of the entry determines the data that is in it.
42
43    This class is not used by itself. All entry objects are subclasses of
44    Entry.
45
46    Attributes:
47        section: Section object containing this entry
48        node: The node that created this entry
49        offset: Offset of entry within the section, None if not known yet (in
50            which case it will be calculated by Pack())
51        size: Entry size in bytes, None if not known
52        min_size: Minimum entry size in bytes
53        pre_reset_size: size as it was before ResetForPack(). This allows us to
54            keep track of the size we started with and detect size changes
55        uncomp_size: Size of uncompressed data in bytes, if the entry is
56            compressed, else None
57        contents_size: Size of contents in bytes, 0 by default
58        align: Entry start offset alignment relative to the start of the
59            containing section, or None
60        align_size: Entry size alignment, or None
61        align_end: Entry end offset alignment relative to the start of the
62            containing section, or None
63        pad_before: Number of pad bytes before the contents when it is placed
64            in the containing section, 0 if none. The pad bytes become part of
65            the entry.
66        pad_after: Number of pad bytes after the contents when it is placed in
67            the containing section, 0 if none. The pad bytes become part of
68            the entry.
69        data: Contents of entry (string of bytes). This does not include
70            padding created by pad_before or pad_after. If the entry is
71            compressed, this contains the compressed data.
72        uncomp_data: Original uncompressed data, if this entry is compressed,
73            else None
74        compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
75        orig_offset: Original offset value read from node
76        orig_size: Original size value read from node
77        missing: True if this entry is missing its contents. Note that if it is
78            optional, this entry will not appear in the list generated by
79            entry.CheckMissing() since it is considered OK for it to be missing.
80        allow_missing: Allow children of this entry to be missing (used by
81            subclasses such as Entry_section)
82        allow_fake: Allow creating a dummy fake file if the blob file is not
83            available. This is mainly used for testing.
84        external: True if this entry contains an external binary blob
85        bintools: Bintools used by this entry (only populated for Image)
86        missing_bintools: List of missing bintools for this entry
87        update_hash: True if this entry's "hash" subnode should be
88            updated with a hash of the entry contents
89        comp_bintool: Bintools used for compress and decompress data
90        fake_fname: Fake filename, if one was created, else None
91        required_props (dict of str): Properties which must be present. This can
92            be added to by subclasses
93        elf_fname (str): Filename of the ELF file, if this entry holds an ELF
94            file, or is a binary file produced from an ELF file
95        auto_write_symbols (bool): True to write ELF symbols into this entry's
96            contents
97        absent (bool): True if this entry is absent. This can be controlled by
98            the entry itself, allowing it to vanish in certain circumstances.
99            An absent entry is removed during processing so that it does not
100            appear in the map
101        optional (bool): True if this entry contains an optional external blob
102        overlap (bool): True if this entry overlaps with others
103        preserve (bool): True if this entry should be preserved when updating
104            firmware. This means that it will not be changed by the update.
105            This is just a signal: enforcement of this is up to the updater.
106            This flag does not automatically propagate down to child entries.
107        build_done (bool): Indicates that the entry data has been built and does
108            not need to be done again. This is only used with 'binman replace',
109            to stop sections from being rebuilt if their entries have not been
110            replaced
111    """
112    fake_dir = None
113
114    def __init__(self, section, etype, node, name_prefix='',
115                 auto_write_symbols=False):
116        # Put this here to allow entry-docs and help to work without libfdt
117        global state
118        from binman import state
119
120        self.section = section
121        self.etype = etype
122        self._node = node
123        self.name = node and (name_prefix + node.name) or 'none'
124        self.offset = None
125        self.size = None
126        self.min_size = 0
127        self.pre_reset_size = None
128        self.uncomp_size = None
129        self.data = None
130        self.uncomp_data = None
131        self.contents_size = 0
132        self.align = None
133        self.align_size = None
134        self.align_end = None
135        self.pad_before = 0
136        self.pad_after = 0
137        self.offset_unset = False
138        self.image_pos = None
139        self.extend_size = False
140        self.compress = 'none'
141        self.missing = False
142        self.faked = False
143        self.external = False
144        self.allow_missing = False
145        self.allow_fake = False
146        self.bintools = {}
147        self.missing_bintools = []
148        self.update_hash = True
149        self.fake_fname = None
150        self.required_props = []
151        self.comp_bintool = None
152        self.elf_fname = None
153        self.auto_write_symbols = auto_write_symbols
154        self.absent = False
155        self.optional = False
156        self.overlap = False
157        self.elf_base_sym = None
158        self.offset_from_elf = None
159        self.preserve = False
160        self.build_done = False
161        self.no_write_symbols = False
162
163    @staticmethod
164    def FindEntryClass(etype, expanded):
165        """Look up the entry class for a node.
166
167        Args:
168            node_node: Path name of Node object containing information about
169                       the entry to create (used for errors)
170            etype:   Entry type to use
171            expanded: Use the expanded version of etype
172
173        Returns:
174            The entry class object if found, else None if not found and expanded
175                is True, else a tuple:
176                    module name that could not be found
177                    exception received
178        """
179        # Convert something like 'u-boot@0' to 'u_boot' since we are only
180        # interested in the type.
181        module_name = etype.replace('-', '_')
182
183        if '@' in module_name:
184            module_name = module_name.split('@')[0]
185        if expanded:
186            module_name += '_expanded'
187        module = modules.get(module_name)
188
189        # Also allow entry-type modules to be brought in from the etype directory.
190
191        # Import the module if we have not already done so.
192        if not module:
193            try:
194                module = importlib.import_module('binman.etype.' + module_name)
195            except ImportError as e:
196                if expanded:
197                    return None
198                return module_name, e
199            modules[module_name] = module
200
201        # Look up the expected class name
202        return getattr(module, 'Entry_%s' % module_name)
203
204    @staticmethod
205    def Lookup(node_path, etype, expanded, missing_etype=False):
206        """Look up the entry class for a node.
207
208        Args:
209            node_node (str): Path name of Node object containing information
210                about the entry to create (used for errors)
211            etype (str):   Entry type to use
212            expanded (bool): Use the expanded version of etype
213            missing_etype (bool): True to default to a blob etype if the
214                requested etype is not found
215
216        Returns:
217            The entry class object if found, else None if not found and expanded
218                is True
219
220        Raise:
221            ValueError if expanded is False and the class is not found
222        """
223        # Convert something like 'u-boot@0' to 'u_boot' since we are only
224        # interested in the type.
225        cls = Entry.FindEntryClass(etype, expanded)
226        if cls is None:
227            return None
228        elif isinstance(cls, tuple):
229            if missing_etype:
230                cls = Entry.FindEntryClass('blob', False)
231            if isinstance(cls, tuple): # This should not fail
232                module_name, e = cls
233                raise ValueError(
234                    "Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
235                    (etype, node_path, module_name, e))
236        return cls
237
238    @staticmethod
239    def Create(section, node, etype=None, expanded=False, missing_etype=False):
240        """Create a new entry for a node.
241
242        Args:
243            section (entry_Section):  Section object containing this node
244            node (Node): Node object containing information about the entry to
245                create
246            etype (str): Entry type to use, or None to work it out (used for
247                tests)
248            expanded (bool): Use the expanded version of etype
249            missing_etype (bool): True to default to a blob etype if the
250                requested etype is not found
251
252        Returns:
253            A new Entry object of the correct type (a subclass of Entry)
254        """
255        if not etype:
256            etype = fdt_util.GetString(node, 'type', node.name)
257        obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
258        if obj and expanded:
259            # Check whether to use the expanded entry
260            new_etype = etype + '-expanded'
261            can_expand = not fdt_util.GetBool(node, 'no-expanded')
262            if can_expand and obj.UseExpanded(node, etype, new_etype):
263                etype = new_etype
264            else:
265                obj = None
266        if not obj:
267            obj = Entry.Lookup(node.path, etype, False, missing_etype)
268
269        # Call its constructor to get the object we want.
270        return obj(section, etype, node)
271
272    def ReadNode(self):
273        """Read entry information from the node
274
275        This must be called as the first thing after the Entry is created.
276
277        This reads all the fields we recognise from the node, ready for use.
278        """
279        self.ensure_props()
280        if 'pos' in self._node.props:
281            self.Raise("Please use 'offset' instead of 'pos'")
282        if 'expand-size' in self._node.props:
283            self.Raise("Please use 'extend-size' instead of 'expand-size'")
284        self.offset = fdt_util.GetInt(self._node, 'offset')
285        self.size = fdt_util.GetInt(self._node, 'size')
286        self.min_size = fdt_util.GetInt(self._node, 'min-size', 0)
287        self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
288        self.orig_size = fdt_util.GetInt(self._node, 'orig-size')
289        if self.GetImage().copy_to_orig:
290            self.orig_offset = self.offset
291            self.orig_size = self.size
292
293        # These should not be set in input files, but are set in an FDT map,
294        # which is also read by this code.
295        self.image_pos = fdt_util.GetInt(self._node, 'image-pos')
296        self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
297
298        self.align = fdt_util.GetInt(self._node, 'align')
299        if tools.not_power_of_two(self.align):
300            raise ValueError("Node '%s': Alignment %s must be a power of two" %
301                             (self._node.path, self.align))
302        if self.section and self.align is None:
303            self.align = self.section.align_default
304        self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
305        self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
306        self.align_size = fdt_util.GetInt(self._node, 'align-size')
307        if tools.not_power_of_two(self.align_size):
308            self.Raise("Alignment size %s must be a power of two" %
309                       self.align_size)
310        self.align_end = fdt_util.GetInt(self._node, 'align-end')
311        self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
312        self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
313        self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
314        self.optional = fdt_util.GetBool(self._node, 'optional')
315        self.overlap = fdt_util.GetBool(self._node, 'overlap')
316        if self.overlap:
317            self.required_props += ['offset', 'size']
318
319        # This is only supported by blobs and sections at present
320        self.compress = fdt_util.GetString(self._node, 'compress', 'none')
321        self.offset_from_elf = fdt_util.GetPhandleNameOffset(self._node,
322                                                             'offset-from-elf')
323
324        self.preserve = fdt_util.GetBool(self._node, 'preserve')
325        self.no_write_symbols = fdt_util.GetBool(self._node, 'no-write-symbols')
326
327    def GetDefaultFilename(self):
328        return None
329
330    def GetFdts(self):
331        """Get the device trees used by this entry
332
333        Returns:
334            Empty dict, if this entry is not a .dtb, otherwise:
335            Dict:
336                key: Filename from this entry (without the path)
337                value: Tuple:
338                    Entry object for this dtb
339                    Filename of file containing this dtb
340        """
341        return {}
342
343    def gen_entries(self):
344        """Allow entries to generate other entries
345
346        Some entries generate subnodes automatically, from which sub-entries
347        are then created. This method allows those to be added to the binman
348        definition for the current image. An entry which implements this method
349        should call state.AddSubnode() to add a subnode and can add properties
350        with state.AddString(), etc.
351
352        An example is 'files', which produces a section containing a list of
353        files.
354        """
355        pass
356
357    def AddMissingProperties(self, have_image_pos):
358        """Add new properties to the device tree as needed for this entry
359
360        Args:
361            have_image_pos: True if this entry has an image position. This can
362                be False if its parent section is compressed, since compression
363                groups all entries together into a compressed block of data,
364                obscuring the start of each individual child entry
365        """
366        for prop in ['offset', 'size']:
367            if not prop in self._node.props:
368                state.AddZeroProp(self._node, prop)
369        if have_image_pos and 'image-pos' not in self._node.props:
370            state.AddZeroProp(self._node, 'image-pos')
371        if self.GetImage().allow_repack:
372            if self.orig_offset is not None:
373                state.AddZeroProp(self._node, 'orig-offset', True)
374            if self.orig_size is not None:
375                state.AddZeroProp(self._node, 'orig-size', True)
376
377        if self.compress != 'none':
378            state.AddZeroProp(self._node, 'uncomp-size')
379
380        if self.update_hash:
381            err = state.CheckAddHashProp(self._node)
382            if err:
383                self.Raise(err)
384
385    def SetCalculatedProperties(self):
386        """Set the value of device-tree properties calculated by binman"""
387        state.SetInt(self._node, 'offset', self.offset)
388        state.SetInt(self._node, 'size', self.size)
389        base = self.section.GetRootSkipAtStart() if self.section else 0
390        if self.image_pos is not None:
391            state.SetInt(self._node, 'image-pos', self.image_pos - base)
392        if self.GetImage().allow_repack:
393            if self.orig_offset is not None:
394                state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
395            if self.orig_size is not None:
396                state.SetInt(self._node, 'orig-size', self.orig_size, True)
397        if self.uncomp_size is not None:
398            state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
399
400        if self.update_hash:
401            state.CheckSetHashValue(self._node, self.GetData)
402
403    def ProcessFdt(self, fdt):
404        """Allow entries to adjust the device tree
405
406        Some entries need to adjust the device tree for their purposes. This
407        may involve adding or deleting properties.
408
409        Returns:
410            True if processing is complete
411            False if processing could not be completed due to a dependency.
412                This will cause the entry to be retried after others have been
413                called
414        """
415        return True
416
417    def SetPrefix(self, prefix):
418        """Set the name prefix for a node
419
420        Args:
421            prefix: Prefix to set, or '' to not use a prefix
422        """
423        if prefix:
424            self.name = prefix + self.name
425
426    def SetContents(self, data):
427        """Set the contents of an entry
428
429        This sets both the data and content_size properties
430
431        Args:
432            data: Data to set to the contents (bytes)
433        """
434        self.data = data
435        self.contents_size = len(self.data)
436
437    def ProcessContentsUpdate(self, data):
438        """Update the contents of an entry, after the size is fixed
439
440        This checks that the new data is the same size as the old. If the size
441        has changed, this triggers a re-run of the packing algorithm.
442
443        Args:
444            data: Data to set to the contents (bytes)
445
446        Raises:
447            ValueError if the new data size is not the same as the old
448        """
449        size_ok = True
450        new_size = len(data)
451        if state.AllowEntryExpansion() and new_size > self.contents_size:
452            # self.data will indicate the new size needed
453            size_ok = False
454        elif state.AllowEntryContraction() and new_size < self.contents_size:
455            size_ok = False
456
457        # If not allowed to change, try to deal with it or give up
458        if size_ok:
459            if new_size > self.contents_size:
460                self.Raise('Cannot update entry size from %d to %d' %
461                        (self.contents_size, new_size))
462
463            # Don't let the data shrink. Pad it if necessary
464            if size_ok and new_size < self.contents_size:
465                data += tools.get_bytes(0, self.contents_size - new_size)
466
467        if not size_ok:
468            tout.debug("Entry '%s' size change from %s to %s" % (
469                self._node.path, to_hex(self.contents_size),
470                to_hex(new_size)))
471        self.SetContents(data)
472        return size_ok
473
474    def ObtainContents(self, skip_entry=None, fake_size=0):
475        """Figure out the contents of an entry.
476
477        For missing blobs (where allow-missing is enabled), the contents are set
478        to b'' and self.missing is set to True.
479
480        Args:
481            skip_entry (Entry): Entry to skip when obtaining section contents
482            fake_size (int): Size of fake file to create if needed
483
484        Returns:
485            True if the contents were found, False if another call is needed
486            after the other entries are processed, None if there is no contents
487        """
488        # No contents by default: subclasses can implement this
489        return True
490
491    def ResetForPack(self):
492        """Reset offset/size fields so that packing can be done again"""
493        self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
494                    (to_hex(self.offset), to_hex(self.orig_offset),
495                     to_hex(self.size), to_hex(self.orig_size)))
496        self.pre_reset_size = self.size
497        self.offset = self.orig_offset
498        self.size = self.orig_size
499
500    def Pack(self, offset):
501        """Figure out how to pack the entry into the section
502
503        Most of the time the entries are not fully specified. There may be
504        an alignment but no size. In that case we take the size from the
505        contents of the entry.
506
507        If an entry has no hard-coded offset, it will be placed at @offset.
508
509        Once this function is complete, both the offset and size of the
510        entry will be know.
511
512        Args:
513            Current section offset pointer
514
515        Returns:
516            New section offset pointer (after this entry)
517        """
518        self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
519                    (to_hex(self.offset), to_hex(self.size),
520                     self.contents_size))
521        if self.offset is None:
522            if self.offset_unset:
523                self.Raise('No offset set with offset-unset: should another '
524                           'entry provide this correct offset?')
525            elif self.offset_from_elf:
526                self.offset = self.lookup_offset()
527            else:
528                self.offset = tools.align(offset, self.align)
529        needed = self.pad_before + self.contents_size + self.pad_after
530        needed = max(needed, self.min_size)
531        needed = tools.align(needed, self.align_size)
532        size = self.size
533        if not size:
534            size = needed
535        new_offset = self.offset + size
536        aligned_offset = tools.align(new_offset, self.align_end)
537        if aligned_offset != new_offset:
538            size = aligned_offset - self.offset
539            new_offset = aligned_offset
540
541        if not self.size:
542            self.size = size
543
544        if self.size < needed:
545            self.Raise("Entry contents size is %#x (%d) but entry size is "
546                       "%#x (%d)" % (needed, needed, self.size, self.size))
547        # Check that the alignment is correct. It could be wrong if the
548        # and offset or size values were provided (i.e. not calculated), but
549        # conflict with the provided alignment values
550        if self.size != tools.align(self.size, self.align_size):
551            self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
552                  (self.size, self.size, self.align_size, self.align_size))
553        if self.offset != tools.align(self.offset, self.align):
554            self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
555                  (self.offset, self.offset, self.align, self.align))
556        self.Detail('   - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
557                    (self.offset, self.size, self.contents_size, new_offset))
558
559        return new_offset
560
561    def Raise(self, msg):
562        """Convenience function to raise an error referencing a node"""
563        raise ValueError("Node '%s': %s" % (self._node.path, msg))
564
565    def Info(self, msg):
566        """Convenience function to log info referencing a node"""
567        tag = "Info '%s'" % self._node.path
568        tout.detail('%30s: %s' % (tag, msg))
569
570    def Detail(self, msg):
571        """Convenience function to log detail referencing a node"""
572        tag = "Node '%s'" % self._node.path
573        tout.detail('%30s: %s' % (tag, msg))
574
575    def GetEntryArgsOrProps(self, props, required=False):
576        """Return the values of a set of properties
577
578        Args:
579            props: List of EntryArg objects
580
581        Raises:
582            ValueError if a property is not found
583        """
584        values = []
585        missing = []
586        for prop in props:
587            python_prop = prop.name.replace('-', '_')
588            if hasattr(self, python_prop):
589                value = getattr(self, python_prop)
590            else:
591                value = None
592            if value is None:
593                value = self.GetArg(prop.name, prop.datatype)
594            if value is None and required:
595                missing.append(prop.name)
596            values.append(value)
597        if missing:
598            self.GetImage().MissingArgs(self, missing)
599        return values
600
601    def GetPath(self):
602        """Get the path of a node
603
604        Returns:
605            Full path of the node for this entry
606        """
607        return self._node.path
608
609    def GetData(self, required=True):
610        """Get the contents of an entry
611
612        Args:
613            required: True if the data must be present, False if it is OK to
614                return None
615
616        Returns:
617            bytes content of the entry, excluding any padding. If the entry is
618                compressed, the compressed data is returned. If the entry data
619                is not yet available, False can be returned. If the entry data
620                is null, then None is returned.
621        """
622        self.Detail('GetData: size %s' % to_hex_size(self.data))
623        return self.data
624
625    def GetPaddedData(self, data=None):
626        """Get the data for an entry including any padding
627
628        Gets the entry data and uses its section's pad-byte value to add padding
629        before and after as defined by the pad-before and pad-after properties.
630
631        This does not consider alignment.
632
633        Returns:
634            Contents of the entry along with any pad bytes before and
635            after it (bytes)
636        """
637        if data is None:
638            data = self.GetData()
639        return self.section.GetPaddedDataForEntry(self, data)
640
641    def GetOffsets(self):
642        """Get the offsets for siblings
643
644        Some entry types can contain information about the position or size of
645        other entries. An example of this is the Intel Flash Descriptor, which
646        knows where the Intel Management Engine section should go.
647
648        If this entry knows about the position of other entries, it can specify
649        this by returning values here
650
651        Returns:
652            Dict:
653                key: Entry type
654                value: List containing position and size of the given entry
655                    type. Either can be None if not known
656        """
657        return {}
658
659    def SetOffsetSize(self, offset, size):
660        """Set the offset and/or size of an entry
661
662        Args:
663            offset: New offset, or None to leave alone
664            size: New size, or None to leave alone
665        """
666        if offset is not None:
667            self.offset = offset
668        if size is not None:
669            self.size = size
670
671    def SetImagePos(self, image_pos):
672        """Set the position in the image
673
674        Args:
675            image_pos: Position of this entry in the image
676        """
677        self.image_pos = image_pos + self.offset
678
679    def ProcessContents(self):
680        """Do any post-packing updates of entry contents
681
682        This function should call ProcessContentsUpdate() to update the entry
683        contents, if necessary, returning its return value here.
684
685        Args:
686            data: Data to set to the contents (bytes)
687
688        Returns:
689            True if the new data size is OK, False if expansion is needed
690
691        Raises:
692            ValueError if the new data size is not the same as the old and
693                state.AllowEntryExpansion() is False
694        """
695        return True
696
697    def WriteSymbols(self, section):
698        """Write symbol values into binary files for access at run time
699
700        Args:
701          section: Section containing the entry
702        """
703        if self.auto_write_symbols and not self.no_write_symbols:
704            # Check if we are writing symbols into an ELF file
705            is_elf = self.GetDefaultFilename() == self.elf_fname
706            elf.LookupAndWriteSymbols(self.elf_fname, self, section.GetImage(),
707                                      is_elf, self.elf_base_sym)
708
709    def CheckEntries(self):
710        """Check that the entry offsets are correct
711
712        This is used for entries which have extra offset requirements (other
713        than having to be fully inside their section). Sub-classes can implement
714        this function and raise if there is a problem.
715        """
716        pass
717
718    @staticmethod
719    def GetStr(value):
720        if value is None:
721            return '<none>  '
722        return '%08x' % value
723
724    @staticmethod
725    def WriteMapLine(fd, indent, name, offset, size, image_pos):
726        print('%s  %s%s  %s  %s' % (Entry.GetStr(image_pos), ' ' * indent,
727                                    Entry.GetStr(offset), Entry.GetStr(size),
728                                    name), file=fd)
729
730    def WriteMap(self, fd, indent):
731        """Write a map of the entry to a .map file
732
733        Args:
734            fd: File to write the map to
735            indent: Curent indent level of map (0=none, 1=one level, etc.)
736        """
737        self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
738                          self.image_pos)
739
740    # pylint: disable=assignment-from-none
741    def GetEntries(self):
742        """Return a list of entries contained by this entry
743
744        Returns:
745            List of entries, or None if none. A normal entry has no entries
746                within it so will return None
747        """
748        return None
749
750    def FindEntryByNode(self, find_node):
751        """Find a node in an entry, searching all subentries
752
753        This does a recursive search.
754
755        Args:
756            find_node (fdt.Node): Node to find
757
758        Returns:
759            Entry: entry, if found, else None
760        """
761        entries = self.GetEntries()
762        if entries:
763            for entry in entries.values():
764                if entry._node == find_node:
765                    return entry
766                found = entry.FindEntryByNode(find_node)
767                if found:
768                    return found
769
770        return None
771
772    def GetArg(self, name, datatype=str):
773        """Get the value of an entry argument or device-tree-node property
774
775        Some node properties can be provided as arguments to binman. First check
776        the entry arguments, and fall back to the device tree if not found
777
778        Args:
779            name: Argument name
780            datatype: Data type (str or int)
781
782        Returns:
783            Value of argument as a string or int, or None if no value
784
785        Raises:
786            ValueError if the argument cannot be converted to in
787        """
788        value = state.GetEntryArg(name)
789        if value is not None:
790            if datatype == int:
791                try:
792                    value = int(value)
793                except ValueError:
794                    self.Raise("Cannot convert entry arg '%s' (value '%s') to integer" %
795                               (name, value))
796            elif datatype == str:
797                pass
798            else:
799                raise ValueError("GetArg() internal error: Unknown data type '%s'" %
800                                 datatype)
801        else:
802            value = fdt_util.GetDatatype(self._node, name, datatype)
803        return value
804
805    @staticmethod
806    def WriteDocs(modules, test_missing=None):
807        """Write out documentation about the various entry types to stdout
808
809        Args:
810            modules: List of modules to include
811            test_missing: Used for testing. This is a module to report
812                as missing
813        """
814        print('''Binman Entry Documentation
815===========================
816
817This file describes the entry types supported by binman. These entry types can
818be placed in an image one by one to build up a final firmware image. It is
819fairly easy to create new entry types. Just add a new file to the 'etype'
820directory. You can use the existing entries as examples.
821
822Note that some entries are subclasses of others, using and extending their
823features to produce new behaviours.
824
825
826''')
827        modules = sorted(modules)
828
829        # Don't show the test entry
830        if '_testing' in modules:
831            modules.remove('_testing')
832        missing = []
833        for name in modules:
834            module = Entry.Lookup('WriteDocs', name, False)
835            docs = getattr(module, '__doc__')
836            if test_missing == name:
837                docs = None
838            if docs:
839                lines = docs.splitlines()
840                first_line = lines[0]
841                rest = [line[4:] for line in lines[1:]]
842                hdr = 'Entry: %s: %s' % (name.replace('_', '-'), first_line)
843
844                # Create a reference for use by rST docs
845                ref_name = f'etype_{module.__name__[6:]}'.lower()
846                print('.. _%s:' % ref_name)
847                print()
848                print(hdr)
849                print('-' * len(hdr))
850                print('\n'.join(rest))
851                print()
852                print()
853            else:
854                missing.append(name)
855
856        if missing:
857            raise ValueError('Documentation is missing for modules: %s' %
858                             ', '.join(missing))
859
860    def GetUniqueName(self):
861        """Get a unique name for a node
862
863        Returns:
864            String containing a unique name for a node, consisting of the name
865            of all ancestors (starting from within the 'binman' node) separated
866            by a dot ('.'). This can be useful for generating unique filesnames
867            in the output directory.
868        """
869        name = self.name
870        node = self._node
871        while node.parent:
872            node = node.parent
873            if node.name in ('binman', '/'):
874                break
875            name = '%s.%s' % (node.name, name)
876        return name
877
878    def extend_to_limit(self, limit):
879        """Extend an entry so that it ends at the given offset limit"""
880        if self.offset + self.size < limit:
881            self.size = limit - self.offset
882            # Request the contents again, since changing the size requires that
883            # the data grows. This should not fail, but check it to be sure.
884            if not self.ObtainContents():
885                self.Raise('Cannot obtain contents when expanding entry')
886
887    def HasSibling(self, name):
888        """Check if there is a sibling of a given name
889
890        Returns:
891            True if there is an entry with this name in the the same section,
892                else False
893        """
894        return name in self.section.GetEntries()
895
896    def GetSiblingImagePos(self, name):
897        """Return the image position of the given sibling
898
899        Returns:
900            Image position of sibling, or None if the sibling has no position,
901                or False if there is no such sibling
902        """
903        if not self.HasSibling(name):
904            return False
905        return self.section.GetEntries()[name].image_pos
906
907    @staticmethod
908    def AddEntryInfo(entries, indent, name, etype, size, image_pos,
909                     uncomp_size, offset, entry):
910        """Add a new entry to the entries list
911
912        Args:
913            entries: List (of EntryInfo objects) to add to
914            indent: Current indent level to add to list
915            name: Entry name (string)
916            etype: Entry type (string)
917            size: Entry size in bytes (int)
918            image_pos: Position within image in bytes (int)
919            uncomp_size: Uncompressed size if the entry uses compression, else
920                None
921            offset: Entry offset within parent in bytes (int)
922            entry: Entry object
923        """
924        entries.append(EntryInfo(indent, name, etype, size, image_pos,
925                                 uncomp_size, offset, entry))
926
927    def ListEntries(self, entries, indent):
928        """Add files in this entry to the list of entries
929
930        This can be overridden by subclasses which need different behaviour.
931
932        Args:
933            entries: List (of EntryInfo objects) to add to
934            indent: Current indent level to add to list
935        """
936        self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
937                          self.image_pos, self.uncomp_size, self.offset, self)
938
939    def ReadData(self, decomp=True, alt_format=None):
940        """Read the data for an entry from the image
941
942        This is used when the image has been read in and we want to extract the
943        data for a particular entry from that image.
944
945        Args:
946            decomp: True to decompress any compressed data before returning it;
947                False to return the raw, uncompressed data
948
949        Returns:
950            Entry data (bytes)
951        """
952        # Use True here so that we get an uncompressed section to work from,
953        # although compressed sections are currently not supported
954        tout.debug("ReadChildData section '%s', entry '%s'" %
955                   (self.section.GetPath(), self.GetPath()))
956        data = self.section.ReadChildData(self, decomp, alt_format)
957        return data
958
959    def ReadChildData(self, child, decomp=True, alt_format=None):
960        """Read the data for a particular child entry
961
962        This reads data from the parent and extracts the piece that relates to
963        the given child.
964
965        Args:
966            child (Entry): Child entry to read data for (must be valid)
967            decomp (bool): True to decompress any compressed data before
968                returning it; False to return the raw, uncompressed data
969            alt_format (str): Alternative format to read in, or None
970
971        Returns:
972            Data for the child (bytes)
973        """
974        pass
975
976    def LoadData(self, decomp=True):
977        data = self.ReadData(decomp)
978        self.contents_size = len(data)
979        self.ProcessContentsUpdate(data)
980        self.Detail('Loaded data size %x' % len(data))
981
982    def GetAltFormat(self, data, alt_format):
983        """Read the data for an extry in an alternative format
984
985        Supported formats are list in the documentation for each entry. An
986        example is fdtmap which provides .
987
988        Args:
989            data (bytes): Data to convert (this should have been produced by the
990                entry)
991            alt_format (str): Format to use
992
993        """
994        pass
995
996    def GetImage(self):
997        """Get the image containing this entry
998
999        Returns:
1000            Image object containing this entry
1001        """
1002        return self.section.GetImage()
1003
1004    def WriteData(self, data, decomp=True):
1005        """Write the data to an entry in the image
1006
1007        This is used when the image has been read in and we want to replace the
1008        data for a particular entry in that image.
1009
1010        The image must be re-packed and written out afterwards.
1011
1012        Args:
1013            data: Data to replace it with
1014            decomp: True to compress the data if needed, False if data is
1015                already compressed so should be used as is
1016
1017        Returns:
1018            True if the data did not result in a resize of this entry, False if
1019                 the entry must be resized
1020        """
1021        if self.size is not None:
1022            self.contents_size = self.size
1023        else:
1024            self.contents_size = self.pre_reset_size
1025        ok = self.ProcessContentsUpdate(data)
1026        self.build_done = False
1027        self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
1028        section_ok = self.section.WriteChildData(self)
1029        return ok and section_ok
1030
1031    def WriteChildData(self, child):
1032        """Handle writing the data in a child entry
1033
1034        This should be called on the child's parent section after the child's
1035        data has been updated. It should update any data structures needed to
1036        validate that the update is successful.
1037
1038        This base-class implementation does nothing, since the base Entry object
1039        does not have any children.
1040
1041        Args:
1042            child: Child Entry that was written
1043
1044        Returns:
1045            True if the section could be updated successfully, False if the
1046                data is such that the section could not update
1047        """
1048        self.build_done = False
1049        entry = self.section
1050
1051        # Now we must rebuild all sections above this one
1052        while entry and entry != entry.section:
1053            self.build_done = False
1054            entry = entry.section
1055
1056        return True
1057
1058    def GetSiblingOrder(self):
1059        """Get the relative order of an entry amoung its siblings
1060
1061        Returns:
1062            'start' if this entry is first among siblings, 'end' if last,
1063                otherwise None
1064        """
1065        entries = list(self.section.GetEntries().values())
1066        if entries:
1067            if self == entries[0]:
1068                return 'start'
1069            elif self == entries[-1]:
1070                return 'end'
1071        return 'middle'
1072
1073    def SetAllowMissing(self, allow_missing):
1074        """Set whether a section allows missing external blobs
1075
1076        Args:
1077            allow_missing: True if allowed, False if not allowed
1078        """
1079        # This is meaningless for anything other than sections
1080        pass
1081
1082    def SetAllowFakeBlob(self, allow_fake):
1083        """Set whether a section allows to create a fake blob
1084
1085        Args:
1086            allow_fake: True if allowed, False if not allowed
1087        """
1088        self.allow_fake = allow_fake
1089
1090    def CheckMissing(self, missing_list):
1091        """Check if the entry has missing external blobs
1092
1093        If there are missing (non-optional) blobs, the entries are added to the
1094        list
1095
1096        Args:
1097            missing_list: List of Entry objects to be added to
1098        """
1099        if self.missing and not self.optional:
1100            missing_list.append(self)
1101
1102    def check_fake_fname(self, fname, size=0):
1103        """If the file is missing and the entry allows fake blobs, fake it
1104
1105        Sets self.faked to True if faked
1106
1107        Args:
1108            fname (str): Filename to check
1109            size (int): Size of fake file to create
1110
1111        Returns:
1112            tuple:
1113                fname (str): Filename of faked file
1114                bool: True if the blob was faked, False if not
1115        """
1116        if self.allow_fake and not pathlib.Path(fname).is_file():
1117            if not self.fake_fname:
1118                outfname = os.path.join(self.fake_dir, os.path.basename(fname))
1119                with open(outfname, "wb") as out:
1120                    out.truncate(size)
1121                tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
1122                self.fake_fname = outfname
1123            self.faked = True
1124            return self.fake_fname, True
1125        return fname, False
1126
1127    def CheckFakedBlobs(self, faked_blobs_list):
1128        """Check if any entries in this section have faked external blobs
1129
1130        If there are faked blobs, the entries are added to the list
1131
1132        Args:
1133            faked_blobs_list: List of Entry objects to be added to
1134        """
1135        # This is meaningless for anything other than blobs
1136        pass
1137
1138    def CheckOptional(self, optional_list):
1139        """Check if the entry has missing but optional external blobs
1140
1141        If there are missing (optional) blobs, the entries are added to the list
1142
1143        Args:
1144            optional_list (list): List of Entry objects to be added to
1145        """
1146        if self.missing and self.optional:
1147            optional_list.append(self)
1148
1149    def GetAllowMissing(self):
1150        """Get whether a section allows missing external blobs
1151
1152        Returns:
1153            True if allowed, False if not allowed
1154        """
1155        return self.allow_missing
1156
1157    def record_missing_bintool(self, bintool):
1158        """Record a missing bintool that was needed to produce this entry
1159
1160        Args:
1161            bintool (Bintool): Bintool that was missing
1162        """
1163        if bintool not in self.missing_bintools:
1164            self.missing_bintools.append(bintool)
1165
1166    def check_missing_bintools(self, missing_list):
1167        """Check if any entries in this section have missing bintools
1168
1169        If there are missing bintools, these are added to the list
1170
1171        Args:
1172            missing_list: List of Bintool objects to be added to
1173        """
1174        for bintool in self.missing_bintools:
1175            if bintool not in missing_list:
1176                missing_list.append(bintool)
1177
1178
1179    def GetHelpTags(self):
1180        """Get the tags use for missing-blob help
1181
1182        Returns:
1183            list of possible tags, most desirable first
1184        """
1185        return list(filter(None, [self.missing_msg, self.name, self.etype]))
1186
1187    def CompressData(self, indata):
1188        """Compress data according to the entry's compression method
1189
1190        Args:
1191            indata: Data to compress
1192
1193        Returns:
1194            Compressed data
1195        """
1196        self.uncomp_data = indata
1197        if self.compress != 'none':
1198            self.uncomp_size = len(indata)
1199            if self.comp_bintool.is_present():
1200                data = self.comp_bintool.compress(indata)
1201            else:
1202                self.record_missing_bintool(self.comp_bintool)
1203                data = tools.get_bytes(0, 1024)
1204        else:
1205            data = indata
1206        return data
1207
1208    def DecompressData(self, indata):
1209        """Decompress data according to the entry's compression method
1210
1211        Args:
1212            indata: Data to decompress
1213
1214        Returns:
1215            Decompressed data
1216        """
1217        if self.compress != 'none':
1218            if self.comp_bintool.is_present():
1219                data = self.comp_bintool.decompress(indata)
1220                self.uncomp_size = len(data)
1221            else:
1222                self.record_missing_bintool(self.comp_bintool)
1223                data = tools.get_bytes(0, 1024)
1224        else:
1225            data = indata
1226        self.uncomp_data = data
1227        return data
1228
1229    @classmethod
1230    def UseExpanded(cls, node, etype, new_etype):
1231        """Check whether to use an expanded entry type
1232
1233        This is called by Entry.Create() when it finds an expanded version of
1234        an entry type (e.g. 'u-boot-expanded'). If this method returns True then
1235        it will be used (e.g. in place of 'u-boot'). If it returns False, it is
1236        ignored.
1237
1238        Args:
1239            node:     Node object containing information about the entry to
1240                      create
1241            etype:    Original entry type being used
1242            new_etype: New entry type proposed
1243
1244        Returns:
1245            True to use this entry type, False to use the original one
1246        """
1247        tout.info("Node '%s': etype '%s': %s selected" %
1248                  (node.path, etype, new_etype))
1249        return True
1250
1251    def CheckAltFormats(self, alt_formats):
1252        """Add any alternative formats supported by this entry type
1253
1254        Args:
1255            alt_formats (dict): Dict to add alt_formats to:
1256                key: Name of alt format
1257                value: Help text
1258        """
1259        pass
1260
1261    def AddBintools(self, btools):
1262        """Add the bintools used by this entry type
1263
1264        Args:
1265            btools (dict of Bintool):
1266
1267        Raise:
1268            ValueError if compression algorithm is not supported
1269        """
1270        algo = self.compress
1271        if algo != 'none':
1272            algos = ['bzip2', 'gzip', 'lz4', 'lzma', 'lzo', 'xz', 'zstd']
1273            if algo not in algos:
1274                raise ValueError("Unknown algorithm '%s'" % algo)
1275            names = {'lzma': 'lzma_alone', 'lzo': 'lzop'}
1276            name = names.get(self.compress, self.compress)
1277            self.comp_bintool = self.AddBintool(btools, name)
1278
1279    @classmethod
1280    def AddBintool(self, tools, name):
1281        """Add a new bintool to the tools used by this etype
1282
1283        Args:
1284            name: Name of the tool
1285        """
1286        btool = bintool.Bintool.create(name)
1287        tools[name] = btool
1288        return btool
1289
1290    def SetUpdateHash(self, update_hash):
1291        """Set whether this entry's "hash" subnode should be updated
1292
1293        Args:
1294            update_hash: True if hash should be updated, False if not
1295        """
1296        self.update_hash = update_hash
1297
1298    def collect_contents_to_file(self, entries, prefix, fake_size=0):
1299        """Put the contents of a list of entries into a file
1300
1301        Args:
1302            entries (list of Entry): Entries to collect
1303            prefix (str): Filename prefix of file to write to
1304            fake_size (int): Size of fake file to create if needed
1305
1306        If any entry does not have contents yet, this function returns False
1307        for the data.
1308
1309        Returns:
1310            Tuple:
1311                bytes: Concatenated data from all the entries (or None)
1312                str: Filename of file written (or None if no data)
1313                str: Unique portion of filename (or None if no data)
1314        """
1315        data = b''
1316        for entry in entries:
1317            data += entry.GetData()
1318        uniq = self.GetUniqueName()
1319        fname = tools.get_output_filename(f'{prefix}.{uniq}')
1320        tools.write_file(fname, data)
1321        return data, fname, uniq
1322
1323    @classmethod
1324    def create_fake_dir(cls):
1325        """Create the directory for fake files"""
1326        cls.fake_dir = tools.get_output_filename('binman-fake')
1327        if not os.path.exists(cls.fake_dir):
1328            os.mkdir(cls.fake_dir)
1329        tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
1330
1331    def ensure_props(self):
1332        """Raise an exception if properties are missing
1333
1334        Args:
1335            prop_list (list of str): List of properties to check for
1336
1337        Raises:
1338            ValueError: Any property is missing
1339        """
1340        not_present = []
1341        for prop in self.required_props:
1342            if not prop in self._node.props:
1343                not_present.append(prop)
1344        if not_present:
1345            self.Raise(f"'{self.etype}' entry is missing properties: {' '.join(not_present)}")
1346
1347    def mark_absent(self, msg):
1348        tout.info("Entry '%s' marked absent: %s" % (self._node.path, msg))
1349        self.absent = True
1350
1351    def read_elf_segments(self):
1352        """Read segments from an entry that can generate an ELF file
1353
1354        Returns:
1355            tuple:
1356                list of segments, each:
1357                    int: Segment number (0 = first)
1358                    int: Start address of segment in memory
1359                    bytes: Contents of segment
1360                int: entry address of ELF file
1361        """
1362        return None
1363
1364    def lookup_offset(self):
1365        node, sym_name, offset = self.offset_from_elf
1366        entry = self.section.FindEntryByNode(node)
1367        if not entry:
1368            self.Raise("Cannot find entry for node '%s'" % node.name)
1369        if not entry.elf_fname:
1370            entry.Raise("Need elf-fname property '%s'" % node.name)
1371        val = elf.GetSymbolOffset(entry.elf_fname, sym_name,
1372                                  entry.elf_base_sym)
1373        return val + offset
1374
1375    def mark_build_done(self):
1376        """Mark an entry as already built"""
1377        self.build_done = True
1378        entries = self.GetEntries()
1379        if entries:
1380            for entry in entries.values():
1381                entry.mark_build_done()
1382
1383    def UpdateSignatures(self, privatekey_fname, algo, input_fname):
1384        self.Raise('Updating signatures is not supported with this entry type')
1385