1 # SPDX-License-Identifier: GPL-2.0+
2 # Copyright (c) 2016 Google, Inc
4 # Base class for all entries
7 from collections import namedtuple
14 from binman import bintool
15 from binman import elf
16 from dtoc import fdt_util
17 from u_boot_pylib import tools
18 from u_boot_pylib.tools import to_hex, to_hex_size
19 from u_boot_pylib import tout
23 # This is imported if needed
26 # An argument which can be passed to entries on the command line, in lieu of
27 # device-tree properties.
28 EntryArg = namedtuple('EntryArg', ['name', 'datatype'])
30 # Information about an entry for use when displaying summaries
31 EntryInfo = namedtuple('EntryInfo', ['indent', 'name', 'etype', 'size',
32 'image_pos', 'uncomp_size', 'offset',
36 """An Entry in the section
38 An entry corresponds to a single node in the device-tree description
39 of the section. Each entry ends up being a part of the final section.
40 Entries can be placed either right next to each other, or with padding
41 between them. The type of the entry determines the data that is in it.
43 This class is not used by itself. All entry objects are subclasses of
47 section: Section object containing this entry
48 node: The node that created this entry
49 offset: Offset of entry within the section, None if not known yet (in
50 which case it will be calculated by Pack())
51 size: Entry size in bytes, None if not known
52 min_size: Minimum entry size in bytes
53 pre_reset_size: size as it was before ResetForPack(). This allows us to
54 keep track of the size we started with and detect size changes
55 uncomp_size: Size of uncompressed data in bytes, if the entry is
57 contents_size: Size of contents in bytes, 0 by default
58 align: Entry start offset alignment relative to the start of the
59 containing section, or None
60 align_size: Entry size alignment, or None
61 align_end: Entry end offset alignment relative to the start of the
62 containing section, or None
63 pad_before: Number of pad bytes before the contents when it is placed
64 in the containing section, 0 if none. The pad bytes become part of
66 pad_after: Number of pad bytes after the contents when it is placed in
67 the containing section, 0 if none. The pad bytes become part of
69 data: Contents of entry (string of bytes). This does not include
70 padding created by pad_before or pad_after. If the entry is
71 compressed, this contains the compressed data.
72 uncomp_data: Original uncompressed data, if this entry is compressed,
74 compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
75 orig_offset: Original offset value read from node
76 orig_size: Original size value read from node
77 missing: True if this entry is missing its contents. Note that if it is
78 optional, this entry will not appear in the list generated by
79 entry.CheckMissing() since it is considered OK for it to be missing.
80 allow_missing: Allow children of this entry to be missing (used by
81 subclasses such as Entry_section)
82 allow_fake: Allow creating a dummy fake file if the blob file is not
83 available. This is mainly used for testing.
84 external: True if this entry contains an external binary blob
85 bintools: Bintools used by this entry (only populated for Image)
86 missing_bintools: List of missing bintools for this entry
87 update_hash: True if this entry's "hash" subnode should be
88 updated with a hash of the entry contents
89 comp_bintool: Bintools used for compress and decompress data
90 fake_fname: Fake filename, if one was created, else None
91 required_props (dict of str): Properties which must be present. This can
92 be added to by subclasses
93 elf_fname (str): Filename of the ELF file, if this entry holds an ELF
94 file, or is a binary file produced from an ELF file
95 auto_write_symbols (bool): True to write ELF symbols into this entry's
97 absent (bool): True if this entry is absent. This can be controlled by
98 the entry itself, allowing it to vanish in certain circumstances.
99 An absent entry is removed during processing so that it does not
101 optional (bool): True if this entry contains an optional external blob
102 overlap (bool): True if this entry overlaps with others
103 preserve (bool): True if this entry should be preserved when updating
104 firmware. This means that it will not be changed by the update.
105 This is just a signal: enforcement of this is up to the updater.
106 This flag does not automatically propagate down to child entries.
107 build_done (bool): Indicates that the entry data has been built and does
108 not need to be done again. This is only used with 'binman replace',
109 to stop sections from being rebuilt if their entries have not been
111 symbols_base (int): Use this value as the assumed load address of the
112 target entry, when calculating the symbol value. If None, this is
113 0 for blobs and the image-start address for ELF files
117 def __init__(self, section, etype, node, name_prefix='',
118 auto_write_symbols=False):
119 # Put this here to allow entry-docs and help to work without libfdt
121 from binman import state
123 self.section = section
126 self.name = node and (name_prefix + node.name) or 'none'
130 self.pre_reset_size = None
131 self.uncomp_size = None
133 self.uncomp_data = None
134 self.contents_size = 0
136 self.align_size = None
137 self.align_end = None
140 self.offset_unset = False
141 self.image_pos = None
142 self.extend_size = False
143 self.compress = 'none'
146 self.external = False
147 self.allow_missing = False
148 self.allow_fake = False
150 self.missing_bintools = []
151 self.update_hash = True
152 self.fake_fname = None
153 self.required_props = []
154 self.comp_bintool = None
155 self.elf_fname = None
156 self.auto_write_symbols = auto_write_symbols
158 self.optional = False
160 self.elf_base_sym = None
161 self.offset_from_elf = None
162 self.preserve = False
163 self.build_done = False
164 self.no_write_symbols = False
165 self.symbols_base = None
168 def FindEntryClass(etype, expanded):
169 """Look up the entry class for a node.
172 node_node: Path name of Node object containing information about
173 the entry to create (used for errors)
174 etype: Entry type to use
175 expanded: Use the expanded version of etype
178 The entry class object if found, else None if not found and expanded
179 is True, else a tuple:
180 module name that could not be found
183 # Convert something like 'u-boot@0' to 'u_boot' since we are only
184 # interested in the type.
185 module_name = etype.replace('-', '_')
187 if '@' in module_name:
188 module_name = module_name.split('@')[0]
190 module_name += '_expanded'
191 module = modules.get(module_name)
193 # Also allow entry-type modules to be brought in from the etype directory.
195 # Import the module if we have not already done so.
198 module = importlib.import_module('binman.etype.' + module_name)
199 except ImportError as e:
202 return module_name, e
203 modules[module_name] = module
205 # Look up the expected class name
206 return getattr(module, 'Entry_%s' % module_name)
209 def Lookup(node_path, etype, expanded, missing_etype=False):
210 """Look up the entry class for a node.
213 node_node (str): Path name of Node object containing information
214 about the entry to create (used for errors)
215 etype (str): Entry type to use
216 expanded (bool): Use the expanded version of etype
217 missing_etype (bool): True to default to a blob etype if the
218 requested etype is not found
221 The entry class object if found, else None if not found and expanded
225 ValueError if expanded is False and the class is not found
227 # Convert something like 'u-boot@0' to 'u_boot' since we are only
228 # interested in the type.
229 cls = Entry.FindEntryClass(etype, expanded)
232 elif isinstance(cls, tuple):
234 cls = Entry.FindEntryClass('blob', False)
235 if isinstance(cls, tuple): # This should not fail
238 "Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
239 (etype, node_path, module_name, e))
243 def Create(section, node, etype=None, expanded=False, missing_etype=False):
244 """Create a new entry for a node.
247 section (entry_Section): Section object containing this node
248 node (Node): Node object containing information about the entry to
250 etype (str): Entry type to use, or None to work it out (used for
252 expanded (bool): Use the expanded version of etype
253 missing_etype (bool): True to default to a blob etype if the
254 requested etype is not found
257 A new Entry object of the correct type (a subclass of Entry)
260 etype = fdt_util.GetString(node, 'type', node.name)
261 obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
263 # Check whether to use the expanded entry
264 new_etype = etype + '-expanded'
265 can_expand = not fdt_util.GetBool(node, 'no-expanded')
266 if can_expand and obj.UseExpanded(node, etype, new_etype):
271 obj = Entry.Lookup(node.path, etype, False, missing_etype)
273 # Call its constructor to get the object we want.
274 return obj(section, etype, node)
277 """Read entry information from the node
279 This must be called as the first thing after the Entry is created.
281 This reads all the fields we recognise from the node, ready for use.
284 if 'pos' in self._node.props:
285 self.Raise("Please use 'offset' instead of 'pos'")
286 if 'expand-size' in self._node.props:
287 self.Raise("Please use 'extend-size' instead of 'expand-size'")
288 self.offset = fdt_util.GetInt(self._node, 'offset')
289 self.size = fdt_util.GetInt(self._node, 'size')
290 self.min_size = fdt_util.GetInt(self._node, 'min-size', 0)
291 self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
292 self.orig_size = fdt_util.GetInt(self._node, 'orig-size')
293 if self.GetImage().copy_to_orig:
294 self.orig_offset = self.offset
295 self.orig_size = self.size
297 # These should not be set in input files, but are set in an FDT map,
298 # which is also read by this code.
299 self.image_pos = fdt_util.GetInt(self._node, 'image-pos')
300 self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
302 self.align = fdt_util.GetInt(self._node, 'align')
303 if tools.not_power_of_two(self.align):
304 raise ValueError("Node '%s': Alignment %s must be a power of two" %
305 (self._node.path, self.align))
306 if self.section and self.align is None:
307 self.align = self.section.align_default
308 self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
309 self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
310 self.align_size = fdt_util.GetInt(self._node, 'align-size')
311 if tools.not_power_of_two(self.align_size):
312 self.Raise("Alignment size %s must be a power of two" %
314 self.align_end = fdt_util.GetInt(self._node, 'align-end')
315 self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
316 self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
317 self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
318 self.optional = fdt_util.GetBool(self._node, 'optional')
319 self.overlap = fdt_util.GetBool(self._node, 'overlap')
321 self.required_props += ['offset', 'size']
322 self.assume_size = fdt_util.GetInt(self._node, 'assume-size', 0)
324 # This is only supported by blobs and sections at present
325 self.compress = fdt_util.GetString(self._node, 'compress', 'none')
326 self.offset_from_elf = fdt_util.GetPhandleNameOffset(self._node,
329 self.preserve = fdt_util.GetBool(self._node, 'preserve')
330 self.no_write_symbols = fdt_util.GetBool(self._node, 'no-write-symbols')
331 self.symbols_base = fdt_util.GetInt(self._node, 'symbols-base')
333 def GetDefaultFilename(self):
337 """Get the device trees used by this entry
340 Empty dict, if this entry is not a .dtb, otherwise:
342 key: Filename from this entry (without the path)
344 Entry object for this dtb
345 Filename of file containing this dtb
349 def gen_entries(self):
350 """Allow entries to generate other entries
352 Some entries generate subnodes automatically, from which sub-entries
353 are then created. This method allows those to be added to the binman
354 definition for the current image. An entry which implements this method
355 should call state.AddSubnode() to add a subnode and can add properties
356 with state.AddString(), etc.
358 An example is 'files', which produces a section containing a list of
363 def AddMissingProperties(self, have_image_pos):
364 """Add new properties to the device tree as needed for this entry
367 have_image_pos: True if this entry has an image position. This can
368 be False if its parent section is compressed, since compression
369 groups all entries together into a compressed block of data,
370 obscuring the start of each individual child entry
372 for prop in ['offset', 'size']:
373 if not prop in self._node.props:
374 state.AddZeroProp(self._node, prop)
375 if have_image_pos and 'image-pos' not in self._node.props:
376 state.AddZeroProp(self._node, 'image-pos')
377 if self.GetImage().allow_repack:
378 if self.orig_offset is not None:
379 state.AddZeroProp(self._node, 'orig-offset', True)
380 if self.orig_size is not None:
381 state.AddZeroProp(self._node, 'orig-size', True)
383 if self.compress != 'none':
384 state.AddZeroProp(self._node, 'uncomp-size')
387 err = state.CheckAddHashProp(self._node)
391 def SetCalculatedProperties(self):
392 """Set the value of device-tree properties calculated by binman"""
393 state.SetInt(self._node, 'offset', self.offset)
394 state.SetInt(self._node, 'size', self.size)
395 base = self.section.GetRootSkipAtStart() if self.section else 0
396 if self.image_pos is not None:
397 state.SetInt(self._node, 'image-pos', self.image_pos - base)
398 if self.GetImage().allow_repack:
399 if self.orig_offset is not None:
400 state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
401 if self.orig_size is not None:
402 state.SetInt(self._node, 'orig-size', self.orig_size, True)
403 if self.uncomp_size is not None:
404 state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
407 state.CheckSetHashValue(self._node, self.GetData)
409 def ProcessFdt(self, fdt):
410 """Allow entries to adjust the device tree
412 Some entries need to adjust the device tree for their purposes. This
413 may involve adding or deleting properties.
416 True if processing is complete
417 False if processing could not be completed due to a dependency.
418 This will cause the entry to be retried after others have been
423 def SetPrefix(self, prefix):
424 """Set the name prefix for a node
427 prefix: Prefix to set, or '' to not use a prefix
430 self.name = prefix + self.name
432 def SetContents(self, data):
433 """Set the contents of an entry
435 This sets both the data and content_size properties
438 data: Data to set to the contents (bytes)
441 self.contents_size = len(self.data)
443 def ProcessContentsUpdate(self, data):
444 """Update the contents of an entry, after the size is fixed
446 This checks that the new data is the same size as the old. If the size
447 has changed, this triggers a re-run of the packing algorithm.
450 data: Data to set to the contents (bytes)
453 ValueError if the new data size is not the same as the old
457 if state.AllowEntryExpansion() and new_size > self.contents_size:
458 # self.data will indicate the new size needed
460 elif state.AllowEntryContraction() and new_size < self.contents_size:
463 # If not allowed to change, try to deal with it or give up
465 if new_size > self.contents_size:
466 self.Raise('Cannot update entry size from %d to %d' %
467 (self.contents_size, new_size))
469 # Don't let the data shrink. Pad it if necessary
470 if size_ok and new_size < self.contents_size:
471 data += tools.get_bytes(0, self.contents_size - new_size)
474 tout.debug("Entry '%s' size change from %s to %s" % (
475 self._node.path, to_hex(self.contents_size),
477 self.SetContents(data)
480 def ObtainContents(self, skip_entry=None, fake_size=0):
481 """Figure out the contents of an entry.
483 For missing blobs (where allow-missing is enabled), the contents are set
484 to b'' and self.missing is set to True.
487 skip_entry (Entry): Entry to skip when obtaining section contents
488 fake_size (int): Size of fake file to create if needed
491 True if the contents were found, False if another call is needed
492 after the other entries are processed, None if there is no contents
494 # No contents by default: subclasses can implement this
497 def ResetForPack(self):
498 """Reset offset/size fields so that packing can be done again"""
499 self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
500 (to_hex(self.offset), to_hex(self.orig_offset),
501 to_hex(self.size), to_hex(self.orig_size)))
502 self.pre_reset_size = self.size
503 self.offset = self.orig_offset
504 self.size = self.orig_size
506 def Pack(self, offset):
507 """Figure out how to pack the entry into the section
509 Most of the time the entries are not fully specified. There may be
510 an alignment but no size. In that case we take the size from the
511 contents of the entry.
513 If an entry has no hard-coded offset, it will be placed at @offset.
515 Once this function is complete, both the offset and size of the
519 Current section offset pointer
522 New section offset pointer (after this entry)
524 self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
525 (to_hex(self.offset), to_hex(self.size),
527 if self.offset is None:
528 if self.offset_unset:
529 self.Raise('No offset set with offset-unset: should another '
530 'entry provide this correct offset?')
531 elif self.offset_from_elf:
532 self.offset = self.lookup_offset()
534 self.offset = tools.align(offset, self.align)
535 needed = self.pad_before + self.contents_size + self.pad_after
536 needed = max(needed, self.min_size)
537 needed = tools.align(needed, self.align_size)
541 new_offset = self.offset + size
542 aligned_offset = tools.align(new_offset, self.align_end)
543 if aligned_offset != new_offset:
544 size = aligned_offset - self.offset
545 new_offset = aligned_offset
550 if self.size < needed:
551 self.Raise("Entry contents size is %#x (%d) but entry size is "
552 "%#x (%d)" % (needed, needed, self.size, self.size))
553 # Check that the alignment is correct. It could be wrong if the
554 # and offset or size values were provided (i.e. not calculated), but
555 # conflict with the provided alignment values
556 if self.size != tools.align(self.size, self.align_size):
557 self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
558 (self.size, self.size, self.align_size, self.align_size))
559 if self.offset != tools.align(self.offset, self.align):
560 self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
561 (self.offset, self.offset, self.align, self.align))
562 self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
563 (self.offset, self.size, self.contents_size, new_offset))
567 def Raise(self, msg):
568 """Convenience function to raise an error referencing a node"""
569 raise ValueError("Node '%s': %s" % (self._node.path, msg))
572 """Convenience function to log info referencing a node"""
573 tag = "Info '%s'" % self._node.path
574 tout.detail('%30s: %s' % (tag, msg))
576 def Detail(self, msg):
577 """Convenience function to log detail referencing a node"""
578 tag = "Node '%s'" % self._node.path
579 tout.detail('%30s: %s' % (tag, msg))
581 def GetEntryArgsOrProps(self, props, required=False):
582 """Return the values of a set of properties
584 Looks up the named entryargs and returns the value for each. If any
585 required ones are missing, the error is reported to the user.
588 props (list of EntryArg): List of entry arguments to look up
589 required (bool): True if these entry arguments are required
592 list of values: one for each item in props, the type is determined
593 by the EntryArg's 'datatype' property (str or int)
596 ValueError if a property is not found
601 python_prop = prop.name.replace('-', '_')
602 if hasattr(self, python_prop):
603 value = getattr(self, python_prop)
607 value = self.GetArg(prop.name, prop.datatype)
608 if value is None and required:
609 missing.append(prop.name)
612 self.GetImage().MissingArgs(self, missing)
616 """Get the path of a node
619 Full path of the node for this entry
621 return self._node.path
623 def GetData(self, required=True):
624 """Get the contents of an entry
627 required: True if the data must be present, False if it is OK to
631 bytes content of the entry, excluding any padding. If the entry is
632 compressed, the compressed data is returned. If the entry data
633 is not yet available, False can be returned. If the entry data
634 is null, then None is returned.
636 self.Detail('GetData: size %s' % to_hex_size(self.data))
639 def GetPaddedData(self, data=None):
640 """Get the data for an entry including any padding
642 Gets the entry data and uses its section's pad-byte value to add padding
643 before and after as defined by the pad-before and pad-after properties.
645 This does not consider alignment.
648 Contents of the entry along with any pad bytes before and
652 data = self.GetData()
653 return self.section.GetPaddedDataForEntry(self, data)
655 def GetOffsets(self):
656 """Get the offsets for siblings
658 Some entry types can contain information about the position or size of
659 other entries. An example of this is the Intel Flash Descriptor, which
660 knows where the Intel Management Engine section should go.
662 If this entry knows about the position of other entries, it can specify
663 this by returning values here
668 value: List containing position and size of the given entry
669 type. Either can be None if not known
673 def SetOffsetSize(self, offset, size):
674 """Set the offset and/or size of an entry
677 offset: New offset, or None to leave alone
678 size: New size, or None to leave alone
680 if offset is not None:
685 def SetImagePos(self, image_pos):
686 """Set the position in the image
689 image_pos: Position of this entry in the image
691 self.image_pos = image_pos + self.offset
693 def ProcessContents(self):
694 """Do any post-packing updates of entry contents
696 This function should call ProcessContentsUpdate() to update the entry
697 contents, if necessary, returning its return value here.
700 data: Data to set to the contents (bytes)
703 True if the new data size is OK, False if expansion is needed
706 ValueError if the new data size is not the same as the old and
707 state.AllowEntryExpansion() is False
711 def WriteSymbols(self, section):
712 """Write symbol values into binary files for access at run time
714 As a special case, if symbols_base is not specified and this is an
715 end-at-4gb image, a symbols_base of 0 is used
718 section: Section containing the entry
720 if self.auto_write_symbols and not self.no_write_symbols:
721 # Check if we are writing symbols into an ELF file
722 is_elf = self.GetDefaultFilename() == self.elf_fname
724 symbols_base = self.symbols_base
725 if symbols_base is None and self.GetImage()._end_4gb:
728 elf.LookupAndWriteSymbols(self.elf_fname, self, section.GetImage(),
729 is_elf, self.elf_base_sym, symbols_base)
731 def CheckEntries(self):
732 """Check that the entry offsets are correct
734 This is used for entries which have extra offset requirements (other
735 than having to be fully inside their section). Sub-classes can implement
736 this function and raise if there is a problem.
744 return '%08x' % value
747 def WriteMapLine(fd, indent, name, offset, size, image_pos):
748 print('%s %s%s %s %s' % (Entry.GetStr(image_pos), ' ' * indent,
749 Entry.GetStr(offset), Entry.GetStr(size),
752 def WriteMap(self, fd, indent):
753 """Write a map of the entry to a .map file
756 fd: File to write the map to
757 indent: Curent indent level of map (0=none, 1=one level, etc.)
759 self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
762 # pylint: disable=assignment-from-none
763 def GetEntries(self):
764 """Return a list of entries contained by this entry
767 List of entries, or None if none. A normal entry has no entries
768 within it so will return None
772 def FindEntryByNode(self, find_node):
773 """Find a node in an entry, searching all subentries
775 This does a recursive search.
778 find_node (fdt.Node): Node to find
781 Entry: entry, if found, else None
783 entries = self.GetEntries()
785 for entry in entries.values():
786 if entry._node == find_node:
788 found = entry.FindEntryByNode(find_node)
794 def GetArg(self, name, datatype=str):
795 """Get the value of an entry argument or device-tree-node property
797 Some node properties can be provided as arguments to binman. First check
798 the entry arguments, and fall back to the device tree if not found
802 datatype: Data type (str or int)
805 Value of argument as a string or int, or None if no value
808 ValueError if the argument cannot be converted to in
810 value = state.GetEntryArg(name)
811 if value is not None:
816 self.Raise("Cannot convert entry arg '%s' (value '%s') to integer" %
818 elif datatype == str:
821 raise ValueError("GetArg() internal error: Unknown data type '%s'" %
824 value = fdt_util.GetDatatype(self._node, name, datatype)
828 def WriteDocs(modules, test_missing=None):
829 """Write out documentation about the various entry types to stdout
832 modules: List of modules to include
833 test_missing: Used for testing. This is a module to report
836 print('''Binman Entry Documentation
837 ==========================
839 This file describes the entry types supported by binman. These entry types can
840 be placed in an image one by one to build up a final firmware image. It is
841 fairly easy to create new entry types. Just add a new file to the 'etype'
842 directory. You can use the existing entries as examples.
844 Note that some entries are subclasses of others, using and extending their
845 features to produce new behaviours.
849 modules = sorted(modules)
851 # Don't show the test entry
852 if '_testing' in modules:
853 modules.remove('_testing')
856 module = Entry.Lookup('WriteDocs', name, False)
857 docs = getattr(module, '__doc__')
858 if test_missing == name:
861 lines = docs.splitlines()
862 first_line = lines[0]
863 rest = [line[4:] for line in lines[1:]]
864 hdr = 'Entry: %s: %s' % (name.replace('_', '-'), first_line)
866 # Create a reference for use by rST docs
867 ref_name = f'etype_{module.__name__[6:]}'.lower()
868 print('.. _%s:' % ref_name)
871 print('-' * len(hdr))
872 print('\n'.join(rest))
879 raise ValueError('Documentation is missing for modules: %s' %
882 def GetUniqueName(self):
883 """Get a unique name for a node
886 String containing a unique name for a node, consisting of the name
887 of all ancestors (starting from within the 'binman' node) separated
888 by a dot ('.'). This can be useful for generating unique filesnames
889 in the output directory.
895 if node.name in ('binman', '/'):
897 name = '%s.%s' % (node.name, name)
900 def extend_to_limit(self, limit):
901 """Extend an entry so that it ends at the given offset limit"""
902 if self.offset + self.size < limit:
903 self.size = limit - self.offset
904 # Request the contents again, since changing the size requires that
905 # the data grows. This should not fail, but check it to be sure.
906 if not self.ObtainContents():
907 self.Raise('Cannot obtain contents when expanding entry')
909 def HasSibling(self, name):
910 """Check if there is a sibling of a given name
913 True if there is an entry with this name in the the same section,
916 return name in self.section.GetEntries()
918 def GetSiblingImagePos(self, name):
919 """Return the image position of the given sibling
922 Image position of sibling, or None if the sibling has no position,
923 or False if there is no such sibling
925 if not self.HasSibling(name):
927 return self.section.GetEntries()[name].image_pos
930 def AddEntryInfo(entries, indent, name, etype, size, image_pos,
931 uncomp_size, offset, entry):
932 """Add a new entry to the entries list
935 entries: List (of EntryInfo objects) to add to
936 indent: Current indent level to add to list
937 name: Entry name (string)
938 etype: Entry type (string)
939 size: Entry size in bytes (int)
940 image_pos: Position within image in bytes (int)
941 uncomp_size: Uncompressed size if the entry uses compression, else
943 offset: Entry offset within parent in bytes (int)
946 entries.append(EntryInfo(indent, name, etype, size, image_pos,
947 uncomp_size, offset, entry))
949 def ListEntries(self, entries, indent):
950 """Add files in this entry to the list of entries
952 This can be overridden by subclasses which need different behaviour.
955 entries: List (of EntryInfo objects) to add to
956 indent: Current indent level to add to list
958 self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
959 self.image_pos, self.uncomp_size, self.offset, self)
961 def ReadData(self, decomp=True, alt_format=None):
962 """Read the data for an entry from the image
964 This is used when the image has been read in and we want to extract the
965 data for a particular entry from that image.
968 decomp: True to decompress any compressed data before returning it;
969 False to return the raw, uncompressed data
974 # Use True here so that we get an uncompressed section to work from,
975 # although compressed sections are currently not supported
976 tout.debug("ReadChildData section '%s', entry '%s'" %
977 (self.section.GetPath(), self.GetPath()))
978 data = self.section.ReadChildData(self, decomp, alt_format)
981 def ReadChildData(self, child, decomp=True, alt_format=None):
982 """Read the data for a particular child entry
984 This reads data from the parent and extracts the piece that relates to
988 child (Entry): Child entry to read data for (must be valid)
989 decomp (bool): True to decompress any compressed data before
990 returning it; False to return the raw, uncompressed data
991 alt_format (str): Alternative format to read in, or None
994 Data for the child (bytes)
998 def LoadData(self, decomp=True):
999 data = self.ReadData(decomp)
1000 self.contents_size = len(data)
1001 self.ProcessContentsUpdate(data)
1002 self.Detail('Loaded data size %x' % len(data))
1004 def GetAltFormat(self, data, alt_format):
1005 """Read the data for an extry in an alternative format
1007 Supported formats are list in the documentation for each entry. An
1008 example is fdtmap which provides .
1011 data (bytes): Data to convert (this should have been produced by the
1013 alt_format (str): Format to use
1019 """Get the image containing this entry
1022 Image object containing this entry
1024 return self.section.GetImage()
1026 def WriteData(self, data, decomp=True):
1027 """Write the data to an entry in the image
1029 This is used when the image has been read in and we want to replace the
1030 data for a particular entry in that image.
1032 The image must be re-packed and written out afterwards.
1035 data: Data to replace it with
1036 decomp: True to compress the data if needed, False if data is
1037 already compressed so should be used as is
1040 True if the data did not result in a resize of this entry, False if
1041 the entry must be resized
1043 if self.size is not None:
1044 self.contents_size = self.size
1046 self.contents_size = self.pre_reset_size
1047 ok = self.ProcessContentsUpdate(data)
1048 self.build_done = False
1049 self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
1050 section_ok = self.section.WriteChildData(self)
1051 return ok and section_ok
1053 def WriteChildData(self, child):
1054 """Handle writing the data in a child entry
1056 This should be called on the child's parent section after the child's
1057 data has been updated. It should update any data structures needed to
1058 validate that the update is successful.
1060 This base-class implementation does nothing, since the base Entry object
1061 does not have any children.
1064 child: Child Entry that was written
1067 True if the section could be updated successfully, False if the
1068 data is such that the section could not update
1070 self.build_done = False
1071 entry = self.section
1073 # Now we must rebuild all sections above this one
1074 while entry and entry != entry.section:
1075 self.build_done = False
1076 entry = entry.section
1080 def GetSiblingOrder(self):
1081 """Get the relative order of an entry amoung its siblings
1084 'start' if this entry is first among siblings, 'end' if last,
1087 entries = list(self.section.GetEntries().values())
1089 if self == entries[0]:
1091 elif self == entries[-1]:
1095 def SetAllowMissing(self, allow_missing):
1096 """Set whether a section allows missing external blobs
1099 allow_missing: True if allowed, False if not allowed
1101 # This is meaningless for anything other than sections
1104 def SetAllowFakeBlob(self, allow_fake):
1105 """Set whether a section allows to create a fake blob
1108 allow_fake: True if allowed, False if not allowed
1110 self.allow_fake = allow_fake
1112 def CheckMissing(self, missing_list):
1113 """Check if the entry has missing external blobs
1115 If there are missing (non-optional) blobs, the entries are added to the
1119 missing_list: List of Entry objects to be added to
1121 if self.missing and not self.optional:
1122 missing_list.append(self)
1124 def check_fake_fname(self, fname, size=0):
1125 """If the file is missing and the entry allows fake blobs, fake it
1127 Sets self.faked to True if faked
1130 fname (str): Filename to check
1131 size (int): Size of fake file to create
1135 fname (str): Filename of faked file
1136 bool: True if the blob was faked, False if not
1138 if self.allow_fake and not pathlib.Path(fname).is_file():
1139 if not self.fake_fname:
1140 outfname = os.path.join(self.fake_dir, os.path.basename(fname))
1141 with open(outfname, "wb") as out:
1143 tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
1144 self.fake_fname = outfname
1146 return self.fake_fname, True
1149 def CheckFakedBlobs(self, faked_blobs_list):
1150 """Check if any entries in this section have faked external blobs
1152 If there are faked blobs, the entries are added to the list
1155 faked_blobs_list: List of Entry objects to be added to
1157 # This is meaningless for anything other than blobs
1160 def CheckOptional(self, optional_list):
1161 """Check if the entry has missing but optional external blobs
1163 If there are missing (optional) blobs, the entries are added to the list
1166 optional_list (list): List of Entry objects to be added to
1168 if self.missing and self.optional:
1169 optional_list.append(self)
1171 def GetAllowMissing(self):
1172 """Get whether a section allows missing external blobs
1175 True if allowed, False if not allowed
1177 return self.allow_missing
1179 def record_missing_bintool(self, bintool):
1180 """Record a missing bintool that was needed to produce this entry
1183 bintool (Bintool): Bintool that was missing
1185 if bintool not in self.missing_bintools:
1186 self.missing_bintools.append(bintool)
1188 def check_missing_bintools(self, missing_list):
1189 """Check if any entries in this section have missing bintools
1191 If there are missing bintools, these are added to the list
1194 missing_list: List of Bintool objects to be added to
1196 for bintool in self.missing_bintools:
1197 if bintool not in missing_list:
1198 missing_list.append(bintool)
1201 def GetHelpTags(self):
1202 """Get the tags use for missing-blob help
1205 list of possible tags, most desirable first
1207 return list(filter(None, [self.missing_msg, self.name, self.etype]))
1209 def CompressData(self, indata):
1210 """Compress data according to the entry's compression method
1213 indata: Data to compress
1218 self.uncomp_data = indata
1219 if self.compress != 'none':
1220 self.uncomp_size = len(indata)
1221 if self.comp_bintool.is_present():
1222 data = self.comp_bintool.compress(indata)
1223 uniq = self.GetUniqueName()
1224 fname = tools.get_output_filename(f'comp.{uniq}')
1225 tools.write_file(fname, data)
1227 self.record_missing_bintool(self.comp_bintool)
1228 data = tools.get_bytes(0, 1024)
1233 def DecompressData(self, indata):
1234 """Decompress data according to the entry's compression method
1237 indata: Data to decompress
1242 if self.compress != 'none':
1243 if self.comp_bintool.is_present():
1244 data = self.comp_bintool.decompress(indata)
1245 self.uncomp_size = len(data)
1247 self.record_missing_bintool(self.comp_bintool)
1248 data = tools.get_bytes(0, 1024)
1251 self.uncomp_data = data
1255 def UseExpanded(cls, node, etype, new_etype):
1256 """Check whether to use an expanded entry type
1258 This is called by Entry.Create() when it finds an expanded version of
1259 an entry type (e.g. 'u-boot-expanded'). If this method returns True then
1260 it will be used (e.g. in place of 'u-boot'). If it returns False, it is
1264 node: Node object containing information about the entry to
1266 etype: Original entry type being used
1267 new_etype: New entry type proposed
1270 True to use this entry type, False to use the original one
1272 tout.info("Node '%s': etype '%s': %s selected" %
1273 (node.path, etype, new_etype))
1276 def CheckAltFormats(self, alt_formats):
1277 """Add any alternative formats supported by this entry type
1280 alt_formats (dict): Dict to add alt_formats to:
1281 key: Name of alt format
1286 def AddBintools(self, btools):
1287 """Add the bintools used by this entry type
1290 btools (dict of Bintool):
1293 ValueError if compression algorithm is not supported
1295 algo = self.compress
1297 algos = ['bzip2', 'gzip', 'lz4', 'lzma', 'lzo', 'xz', 'zstd']
1298 if algo not in algos:
1299 raise ValueError("Unknown algorithm '%s'" % algo)
1300 names = {'lzma': 'lzma_alone', 'lzo': 'lzop'}
1301 name = names.get(self.compress, self.compress)
1302 self.comp_bintool = self.AddBintool(btools, name)
1305 def AddBintool(self, tools, name):
1306 """Add a new bintool to the tools used by this etype
1309 name: Name of the tool
1311 btool = bintool.Bintool.create(name)
1315 def SetUpdateHash(self, update_hash):
1316 """Set whether this entry's "hash" subnode should be updated
1319 update_hash: True if hash should be updated, False if not
1321 self.update_hash = update_hash
1323 def collect_contents_to_file(self, entries, prefix, fake_size=0):
1324 """Put the contents of a list of entries into a file
1327 entries (list of Entry): Entries to collect
1328 prefix (str): Filename prefix of file to write to
1329 fake_size (int): Size of fake file to create if needed
1331 If any entry does not have contents yet, this function returns False
1336 bytes: Concatenated data from all the entries (or None)
1337 str: Filename of file written (or None if no data)
1338 str: Unique portion of filename (or None if no data)
1341 for entry in entries:
1342 data += entry.GetData()
1343 uniq = self.GetUniqueName()
1344 fname = tools.get_output_filename(f'{prefix}.{uniq}')
1345 tools.write_file(fname, data)
1346 return data, fname, uniq
1349 def create_fake_dir(cls):
1350 """Create the directory for fake files"""
1351 cls.fake_dir = tools.get_output_filename('binman-fake')
1352 if not os.path.exists(cls.fake_dir):
1353 os.mkdir(cls.fake_dir)
1354 tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
1356 def ensure_props(self):
1357 """Raise an exception if properties are missing
1360 prop_list (list of str): List of properties to check for
1363 ValueError: Any property is missing
1366 for prop in self.required_props:
1367 if not prop in self._node.props:
1368 not_present.append(prop)
1370 self.Raise(f"'{self.etype}' entry is missing properties: {' '.join(not_present)}")
1372 def mark_absent(self, msg):
1373 tout.info("Entry '%s' marked absent: %s" % (self._node.path, msg))
1376 def read_elf_segments(self):
1377 """Read segments from an entry that can generate an ELF file
1381 list of segments, each:
1382 int: Segment number (0 = first)
1383 int: Start address of segment in memory
1384 bytes: Contents of segment
1385 int: entry address of ELF file
1389 def lookup_offset(self):
1390 node, sym_name, offset = self.offset_from_elf
1391 entry = self.section.FindEntryByNode(node)
1393 self.Raise("Cannot find entry for node '%s'" % node.name)
1394 if not entry.elf_fname:
1395 entry.Raise("Need elf-fname property '%s'" % node.name)
1396 val = elf.GetSymbolOffset(entry.elf_fname, sym_name,
1400 def mark_build_done(self):
1401 """Mark an entry as already built"""
1402 self.build_done = True
1403 entries = self.GetEntries()
1405 for entry in entries.values():
1406 entry.mark_build_done()
1408 def UpdateSignatures(self, privatekey_fname, algo, input_fname):
1409 self.Raise('Updating signatures is not supported with this entry type')
1411 def FdtContents(self, fdt_etype):
1412 """Get the contents of an FDT for a particular phase
1415 fdt_etype (str): Filename of the phase of the FDT to return, e.g.
1420 fname (str): Filename of .dtb
1421 bytes: Contents of FDT (possibly run through fdtgrep)
1423 return self.section.FdtContents(fdt_etype)