aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTom Rini <trini@konsulko.com>2023-07-24 14:55:56 -0400
committerTom Rini <trini@konsulko.com>2023-07-24 14:55:56 -0400
commitc07ad9520c6190070513016fdb495d4703a4a853 (patch)
treeecd308f65b3b7e5f709cb09c9766f1fc8edb7783
parent45622f326278db923c443b04342b59679bcbb2ba (diff)
parent407a1413e3202585ca842896365718873b170ee2 (diff)
downloadu-boot-WIP/24Jul2023.zip
u-boot-WIP/24Jul2023.tar.gz
u-boot-WIP/24Jul2023.tar.bz2
Merge tag 'dm-pull-24jul23' of https://source.denx.de/u-boot/custodians/u-boot-dmWIP/24Jul2023
buildman refactoring and --maintainer-check binman TI support binman cipher support
-rw-r--r--.azure-pipelines.yml2
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--tools/binman/bintools.rst33
-rw-r--r--tools/binman/btool/fdt_add_pubkey.py67
-rw-r--r--tools/binman/control.py2
-rw-r--r--tools/binman/entries.rst125
-rw-r--r--tools/binman/etype/blob_dtb.py2
-rw-r--r--tools/binman/etype/encrypted.py138
-rw-r--r--tools/binman/etype/pre_load.py6
-rw-r--r--tools/binman/etype/section.py2
-rw-r--r--tools/binman/etype/u_boot_spl_pubkey_dtb.py112
-rw-r--r--tools/binman/ftest.py164
-rw-r--r--tools/binman/test/230_pre_load.dts2
-rw-r--r--tools/binman/test/231_pre_load_pkcs.dts2
-rw-r--r--tools/binman/test/232_pre_load_pss.dts2
-rw-r--r--tools/binman/test/233_pre_load_invalid_padding.dts2
-rw-r--r--tools/binman/test/234_pre_load_invalid_sha.dts2
-rw-r--r--tools/binman/test/235_pre_load_invalid_algo.dts2
-rw-r--r--tools/binman/test/236_pre_load_invalid_key.dts2
-rw-r--r--tools/binman/test/291_rockchip_tpl.dts (renamed from tools/binman/test/277_rockchip_tpl.dts)0
-rw-r--r--tools/binman/test/292_mkimage_missing_multiple.dts (renamed from tools/binman/test/278_mkimage_missing_multiple.dts)0
-rw-r--r--tools/binman/test/293_ti_board_cfg.dts (renamed from tools/binman/test/277_ti_board_cfg.dts)0
-rw-r--r--tools/binman/test/294_ti_board_cfg_combined.dts (renamed from tools/binman/test/278_ti_board_cfg_combined.dts)0
-rw-r--r--tools/binman/test/295_ti_board_cfg_no_type.dts (renamed from tools/binman/test/279_ti_board_cfg_no_type.dts)0
-rw-r--r--tools/binman/test/296_ti_secure.dts (renamed from tools/binman/test/279_ti_secure.dts)0
-rw-r--r--tools/binman/test/297_ti_secure_rom.dts (renamed from tools/binman/test/280_ti_secure_rom.dts)0
-rw-r--r--tools/binman/test/298_ti_secure_rom_combined.dts (renamed from tools/binman/test/281_ti_secure_rom_combined.dts)0
-rw-r--r--tools/binman/test/299_ti_secure_rom_a.dts (renamed from tools/binman/test/288_ti_secure_rom_a.dts)0
-rw-r--r--tools/binman/test/300_ti_secure_rom_b.dts (renamed from tools/binman/test/289_ti_secure_rom_b.dts)0
-rw-r--r--tools/binman/test/301_encrypted_no_algo.dts15
-rw-r--r--tools/binman/test/302_encrypted_invalid_iv_file.dts18
-rw-r--r--tools/binman/test/303_encrypted_missing_key.dts23
-rw-r--r--tools/binman/test/304_encrypted_key_source.dts24
-rw-r--r--tools/binman/test/305_encrypted_key_file.dts24
-rw-r--r--tools/binman/test/306_spl_pubkey_dtb.dts16
-rw-r--r--tools/binman/test/dev.key (renamed from tools/binman/test/230_dev.key)0
-rw-r--r--tools/buildman/board.py6
-rw-r--r--tools/buildman/boards.py259
-rw-r--r--tools/buildman/bsettings.py14
-rw-r--r--tools/buildman/builder.py262
-rw-r--r--tools/buildman/builderthread.py652
-rw-r--r--tools/buildman/buildman.rst32
-rw-r--r--tools/buildman/cmdline.py180
-rw-r--r--tools/buildman/control.py790
-rw-r--r--tools/buildman/func_test.py328
-rwxr-xr-xtools/buildman/main.py71
-rw-r--r--tools/buildman/test.py28
-rw-r--r--tools/buildman/test/Kconfig72
-rw-r--r--tools/buildman/test/boards/board0/MAINTAINERS5
-rw-r--r--tools/buildman/test/boards/board2/MAINTAINERS5
-rw-r--r--tools/buildman/test/configs/board0_defconfig1
-rw-r--r--tools/buildman/test/configs/board2_defconfig1
-rw-r--r--tools/buildman/toolchain.py14
-rwxr-xr-xtools/moveconfig.py2
-rw-r--r--tools/u_boot_pylib/test_util.py10
55 files changed, 2615 insertions, 906 deletions
diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml
index ef7711d..2678e5a 100644
--- a/.azure-pipelines.yml
+++ b/.azure-pipelines.yml
@@ -123,7 +123,7 @@ stages:
options: $(container_option)
steps:
- script: |
- ./tools/buildman/buildman -R
+ ./tools/buildman/buildman --maintainer-check || exit 0
- job: tools_only
displayName: 'Ensure host tools build'
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 80dc587..8010afa 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -187,7 +187,7 @@ sloccount:
Check for configs without MAINTAINERS entry:
stage: testsuites
script:
- - ./tools/buildman/buildman -R
+ - ./tools/buildman/buildman --maintainer-check || exit 0
# Ensure host tools build
Build tools-only:
diff --git a/tools/binman/bintools.rst b/tools/binman/bintools.rst
index c30e7eb..20ee243 100644
--- a/tools/binman/bintools.rst
+++ b/tools/binman/bintools.rst
@@ -155,6 +155,17 @@ Support is provided for fetching this on Debian-like systems, using apt.
+Bintool: openssl: openssl tool
+------------------------------
+
+This bintool supports creating new openssl certificates.
+
+It also supports fetching a binary openssl
+
+Documentation about openssl is at https://www.openssl.org/
+
+
+
Bintool: xz: Compression/decompression using the xz algorithm
-------------------------------------------------------------
@@ -183,3 +194,25 @@ Documentation is available via::
+Bintool: fdt_add_pubkey: Add public key to device tree
+------------------------------------------------------
+
+This bintool supports running `fdt_add_pubkey` in order to add a public
+key coming from a certificate to a device-tree.
+
+Normally signing is done using `mkimage` in context of `binman sign`. However,
+in this process the public key is not added to the stage before u-boot proper.
+Using `fdt_add_pubkey` the key can be injected to the SPL independent of
+`mkimage`
+
+
+
+Bintool: bootgen: Sign ZynqMP FSBL image
+---------------------------------------------
+
+This bintool supports running `bootgen` in order to sign a SPL for ZynqMP
+devices.
+
+The bintool automatically creates an appropriate input image file (.bif) for
+bootgen based on the passed arguments. The output is a bootable,
+authenticated `boot.bin` file.
diff --git a/tools/binman/btool/fdt_add_pubkey.py b/tools/binman/btool/fdt_add_pubkey.py
new file mode 100644
index 0000000..a507742
--- /dev/null
+++ b/tools/binman/btool/fdt_add_pubkey.py
@@ -0,0 +1,67 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright (C) 2023 Weidmüller Interface GmbH & Co. KG
+# Lukas Funke <lukas.funke@weidmueller.com>
+#
+"""Bintool implementation for fdt_add_pubkey"""
+
+from binman import bintool
+
+class Bintoolfdt_add_pubkey(bintool.Bintool):
+ """Add public key to control dtb (spl or u-boot proper)
+
+ This bintool supports running `fdt_add_pubkey`.
+
+ Normally mkimage adds signature information to the control dtb. However
+ binman images are built independent from each other. Thus it is required
+ to add the public key separately from mkimage.
+ """
+ def __init__(self, name):
+ super().__init__(name, 'Generate image for U-Boot')
+
+ # pylint: disable=R0913
+ def run(self, input_fname, keydir, keyname, required, algo):
+ """Run fdt_add_pubkey
+
+ Args:
+ input_fname (str): dtb file to sign
+ keydir (str): Directory with public key. Optional parameter,
+ default value: '.' (current directory)
+ keyname (str): Public key name. Optional parameter,
+ default value: key
+ required (str): If present this indicates that the key must be
+ verified for the image / configuration to be considered valid.
+ algo (str): Cryptographic algorithm. Optional parameter,
+ default value: sha1,rsa2048
+ """
+ args = []
+ if algo:
+ args += ['-a', algo]
+ if keydir:
+ args += ['-k', keydir]
+ if keyname:
+ args += ['-n', keyname]
+ if required:
+ args += ['-r', required]
+
+ args += [ input_fname ]
+
+ return self.run_cmd(*args)
+
+ def fetch(self, method):
+ """Fetch handler for fdt_add_pubkey
+
+ This installs fdt_add_pubkey using the apt utility.
+
+ Args:
+ method (FETCH_...): Method to use
+
+ Returns:
+ True if the file was fetched and now installed, None if a method
+ other than FETCH_BIN was requested
+
+ Raises:
+ Valuerror: Fetching could not be completed
+ """
+ if method != bintool.FETCH_BIN:
+ return None
+ return self.apt_install('u-boot-tools')
diff --git a/tools/binman/control.py b/tools/binman/control.py
index 25e6681..d1ee1d6 100644
--- a/tools/binman/control.py
+++ b/tools/binman/control.py
@@ -308,8 +308,8 @@ def BeforeReplace(image, allow_resize):
image: Image to prepare
"""
state.PrepareFromLoadedData(image)
- image.LoadData()
image.CollectBintools()
+ image.LoadData(decomp=False)
# If repacking, drop the old offset/size values except for the original
# ones, so we are only left with the constraints.
diff --git a/tools/binman/entries.rst b/tools/binman/entries.rst
index 1621ff3..f237693 100644
--- a/tools/binman/entries.rst
+++ b/tools/binman/entries.rst
@@ -468,6 +468,92 @@ updating the EC on startup via software sync.
+.. _etype_encrypted:
+
+Entry: encrypted: Externally built encrypted binary blob
+--------------------------------------------------------
+
+This entry provides the functionality to include information about how to
+decrypt an encrypted binary. This information is added to the
+resulting device tree by adding a new cipher node in the entry's parent
+node (i.e. the binary).
+
+The key that must be used to decrypt the binary is either directly embedded
+in the device tree or indirectly by specifying a key source. The key source
+can be used as an id of a key that is stored in an external device.
+
+Using an embedded key
+~~~~~~~~~~~~~~~~~~~~~
+
+This is an example using an embedded key::
+
+ blob-ext {
+ filename = "encrypted-blob.bin";
+ };
+
+ encrypted {
+ algo = "aes256-gcm";
+ iv-filename = "encrypted-blob.bin.iv";
+ key-filename = "encrypted-blob.bin.key";
+ };
+
+This entry generates the following device tree structure form the example
+above::
+
+ data = [...]
+ cipher {
+ algo = "aes256-gcm";
+ key = <0x...>;
+ iv = <0x...>;
+ };
+
+The data property is generated by the blob-ext etype, the cipher node and
+its content is generated by this etype.
+
+Using an external key
+~~~~~~~~~~~~~~~~~~~~~
+
+Instead of embedding the key itself into the device tree, it is also
+possible to address an externally stored key by specifying a 'key-source'
+instead of the 'key'::
+
+ blob-ext {
+ filename = "encrypted-blob.bin";
+ };
+
+ encrypted {
+ algo = "aes256-gcm";
+ iv-filename = "encrypted-blob.bin.iv";
+ key-source = "external-key-id";
+ };
+
+This entry generates the following device tree structure form the example
+above::
+
+ data = [...]
+ cipher {
+ algo = "aes256-gcm";
+ key-source = "external-key-id";
+ iv = <0x...>;
+ };
+
+Properties
+~~~~~~~~~~
+
+Properties / Entry arguments:
+ - algo: The encryption algorithm. Currently no algorithm is supported
+ out-of-the-box. Certain algorithms will be added in future
+ patches.
+ - iv-filename: The name of the file containing the initialization
+ vector (in short iv). See
+ https://en.wikipedia.org/wiki/Initialization_vector
+ - key-filename: The name of the file containing the key. Either
+ key-filename or key-source must be provided.
+ - key-source: The key that should be used. Either key-filename or
+ key-source must be provided.
+
+
+
.. _etype_fdtmap:
Entry: fdtmap: An entry which contains an FDT map
@@ -2031,6 +2117,45 @@ binman uses that to look up symbols to write into the SPL binary.
+.. _etype_u_boot_spl_pubkey_dtb:
+
+Entry: u-boot-spl-pubkey-dtb: U-Boot SPL device tree including public key
+-------------------------------------------------------------------------
+
+Properties / Entry arguments:
+ - key-name-hint: Public key name without extension (.crt).
+ Default is determined by underlying
+ bintool (fdt_add_pubkey), usually 'key'.
+ - algo: (Optional) Algorithm used for signing. Default is determined by
+ underlying bintool (fdt_add_pubkey), usually 'sha1,rsa2048'
+ - required: (Optional) If present this indicates that the key must be
+ verified for the image / configuration to be
+ considered valid
+
+The following example shows an image containing an SPL which
+is packed together with the dtb. Binman will add a signature
+node to the dtb.
+
+Example node::
+
+ image {
+ ...
+ spl {
+ filename = "spl.bin"
+
+ u-boot-spl-nodtb {
+ };
+ u-boot-spl-pubkey-dtb {
+ algo = "sha384,rsa4096";
+ required = "conf";
+ key-name-hint = "dev";
+ };
+ };
+ ...
+ }
+
+
+
.. _etype_u_boot_spl_with_ucode_ptr:
Entry: u-boot-spl-with-ucode-ptr: U-Boot SPL with embedded microcode pointer
diff --git a/tools/binman/etype/blob_dtb.py b/tools/binman/etype/blob_dtb.py
index 6a3fbc4..d543de9 100644
--- a/tools/binman/etype/blob_dtb.py
+++ b/tools/binman/etype/blob_dtb.py
@@ -38,7 +38,7 @@ class Entry_blob_dtb(Entry_blob):
self.Raise("Invalid prepend in '%s': '%s'" %
(self._node.name, self.prepend))
- def ObtainContents(self):
+ def ObtainContents(self, fake_size=0):
"""Get the device-tree from the list held by the 'state' module"""
self._filename = self.GetDefaultFilename()
self._pathname, _ = state.GetFdtContents(self.GetFdtEtype())
diff --git a/tools/binman/etype/encrypted.py b/tools/binman/etype/encrypted.py
new file mode 100644
index 0000000..53d0e76
--- /dev/null
+++ b/tools/binman/etype/encrypted.py
@@ -0,0 +1,138 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright 2023 Weidmüller Interface GmbH & Co. KG
+# Written by Christian Taedcke <christian.taedcke@weidmueller.com>
+#
+# Entry-type module for cipher information of encrypted blobs/binaries
+#
+
+from binman.etype.collection import Entry
+from dtoc import fdt_util
+from u_boot_pylib import tools
+
+# This is imported if needed
+state = None
+
+
+class Entry_encrypted(Entry):
+ """Externally built encrypted binary blob
+
+ This entry provides the functionality to include information about how to
+ decrypt an encrypted binary. This information is added to the
+ resulting device tree by adding a new cipher node in the entry's parent
+ node (i.e. the binary).
+
+ The key that must be used to decrypt the binary is either directly embedded
+ in the device tree or indirectly by specifying a key source. The key source
+ can be used as an id of a key that is stored in an external device.
+
+ Using an embedded key
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ This is an example using an embedded key::
+
+ blob-ext {
+ filename = "encrypted-blob.bin";
+ };
+
+ encrypted {
+ algo = "aes256-gcm";
+ iv-filename = "encrypted-blob.bin.iv";
+ key-filename = "encrypted-blob.bin.key";
+ };
+
+ This entry generates the following device tree structure form the example
+ above::
+
+ data = [...]
+ cipher {
+ algo = "aes256-gcm";
+ key = <0x...>;
+ iv = <0x...>;
+ };
+
+ The data property is generated by the blob-ext etype, the cipher node and
+ its content is generated by this etype.
+
+ Using an external key
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Instead of embedding the key itself into the device tree, it is also
+ possible to address an externally stored key by specifying a 'key-source'
+ instead of the 'key'::
+
+ blob-ext {
+ filename = "encrypted-blob.bin";
+ };
+
+ encrypted {
+ algo = "aes256-gcm";
+ iv-filename = "encrypted-blob.bin.iv";
+ key-source = "external-key-id";
+ };
+
+ This entry generates the following device tree structure form the example
+ above::
+
+ data = [...]
+ cipher {
+ algo = "aes256-gcm";
+ key-source = "external-key-id";
+ iv = <0x...>;
+ };
+
+ Properties
+ ~~~~~~~~~~
+
+ Properties / Entry arguments:
+ - algo: The encryption algorithm. Currently no algorithm is supported
+ out-of-the-box. Certain algorithms will be added in future
+ patches.
+ - iv-filename: The name of the file containing the initialization
+ vector (in short iv). See
+ https://en.wikipedia.org/wiki/Initialization_vector
+ - key-filename: The name of the file containing the key. Either
+ key-filename or key-source must be provided.
+ - key-source: The key that should be used. Either key-filename or
+ key-source must be provided.
+ """
+
+ def __init__(self, section, etype, node):
+ # Put this here to allow entry-docs and help to work without libfdt
+ global state
+ from binman import state
+
+ super().__init__(section, etype, node)
+ self.required_props = ['algo', 'iv-filename']
+ self._algo = None
+ self._iv_filename = None
+ self._key_name_hint = None
+ self._key_filename = None
+
+ def ReadNode(self):
+ super().ReadNode()
+
+ self._algo = fdt_util.GetString(self._node, 'algo')
+ self._iv_filename = fdt_util.GetString(self._node, 'iv-filename')
+ self._key_filename = fdt_util.GetString(self._node, 'key-filename')
+ self._key_source = fdt_util.GetString(self._node, 'key-source')
+
+ if self._key_filename is None and self._key_source is None:
+ self.Raise("Provide either 'key-filename' or 'key-source'")
+
+ def gen_entries(self):
+ super().gen_entries()
+
+ iv_filename = tools.get_input_filename(self._iv_filename)
+ iv = tools.read_file(iv_filename, binary=True)
+
+ cipher_node = state.AddSubnode(self._node.parent, "cipher")
+ cipher_node.AddString("algo", self._algo)
+ cipher_node.AddData("iv", iv)
+
+ if self._key_filename:
+ key_filename = tools.get_input_filename(self._key_filename)
+ key = tools.read_file(key_filename, binary=True)
+ cipher_node.AddData("key", key)
+
+ if self._key_source:
+ cipher_node.AddString("key-source", self._key_source)
diff --git a/tools/binman/etype/pre_load.py b/tools/binman/etype/pre_load.py
index bd3545b..2e4c723 100644
--- a/tools/binman/etype/pre_load.py
+++ b/tools/binman/etype/pre_load.py
@@ -81,7 +81,8 @@ class Entry_pre_load(Entry_collection):
def ReadNode(self):
super().ReadNode()
- self.key_path, = self.GetEntryArgsOrProps([EntryArg('pre-load-key-path', str)])
+ self.key_path, = self.GetEntryArgsOrProps(
+ [EntryArg('pre-load-key-path', str)])
if self.key_path is None:
self.key_path = ''
@@ -98,8 +99,7 @@ class Entry_pre_load(Entry_collection):
self.Raise(sign_name + " is not supported")
# Read the key
- with open(key_name, 'rb') as pem:
- key = RSA.import_key(pem.read())
+ key = RSA.import_key(tools.read_file(key_name))
# Check if the key has the expected size
if key.size_in_bytes() != RSAS[sign_name]:
diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py
index 7c4d312..fb49e85 100644
--- a/tools/binman/etype/section.py
+++ b/tools/binman/etype/section.py
@@ -179,7 +179,7 @@ class Entry_section(Entry):
Returns:
bool: True if the node is a special one, else False
"""
- start_list = ('hash', 'signature', 'template')
+ start_list = ('cipher', 'hash', 'signature', 'template')
return any(node.name.startswith(name) for name in start_list)
def ReadNode(self):
diff --git a/tools/binman/etype/u_boot_spl_pubkey_dtb.py b/tools/binman/etype/u_boot_spl_pubkey_dtb.py
new file mode 100644
index 0000000..cb19606
--- /dev/null
+++ b/tools/binman/etype/u_boot_spl_pubkey_dtb.py
@@ -0,0 +1,112 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright (c) 2023 Weidmueller GmbH
+# Written by Lukas Funke <lukas.funke@weidmueller.com>
+#
+# Entry-type module for 'u-boot-spl-pubkey.dtb'
+#
+
+import tempfile
+import os
+
+from binman.etype.blob_dtb import Entry_blob_dtb
+
+from dtoc import fdt_util
+
+from u_boot_pylib import tools
+
+# This is imported if needed
+state = None
+
+# pylint: disable=C0103
+class Entry_u_boot_spl_pubkey_dtb(Entry_blob_dtb):
+ """U-Boot SPL device tree including public key
+
+ Properties / Entry arguments:
+ - key-name-hint: Public key name without extension (.crt).
+ Default is determined by underlying
+ bintool (fdt_add_pubkey), usually 'key'.
+ - algo: (Optional) Algorithm used for signing. Default is determined by
+ underlying bintool (fdt_add_pubkey), usually 'sha1,rsa2048'
+ - required: (Optional) If present this indicates that the key must be
+ verified for the image / configuration to be
+ considered valid
+
+ The following example shows an image containing an SPL which
+ is packed together with the dtb. Binman will add a signature
+ node to the dtb.
+
+ Example node::
+
+ image {
+ ...
+ spl {
+ filename = "spl.bin"
+
+ u-boot-spl-nodtb {
+ };
+ u-boot-spl-pubkey-dtb {
+ algo = "sha384,rsa4096";
+ required = "conf";
+ key-name-hint = "dev";
+ };
+ };
+ ...
+ }
+ """
+
+ def __init__(self, section, etype, node):
+ # Put this here to allow entry-docs and help to work without libfdt
+ global state
+ from binman import state
+
+ super().__init__(section, etype, node)
+ self.required_props = ['key-name-hint']
+ self.fdt_add_pubkey = None
+ self._algo = fdt_util.GetString(self._node, 'algo')
+ self._required = fdt_util.GetString(self._node, 'required')
+ self._key_name_hint = fdt_util.GetString(self._node, 'key-name-hint')
+
+ def ObtainContents(self, fake_size=0):
+ """Add public key to SPL dtb
+
+ Add public key which is pointed out by
+ 'key-name-hint' to node 'signature' in the spl-dtb
+
+ This is equivalent to the '-K' option of 'mkimage'
+
+ Args:
+ fake_size (int): unused
+ """
+
+ # We don't pass fake_size upwards because this is currently
+ # not supported by the blob type
+ super().ObtainContents()
+
+ with tempfile.NamedTemporaryFile(prefix=os.path.basename(
+ self.GetFdtEtype()),
+ dir=tools.get_output_dir())\
+ as pubkey_tdb:
+ tools.write_file(pubkey_tdb.name, self.GetData())
+ keyname = tools.get_input_filename(self._key_name_hint + ".crt")
+ self.fdt_add_pubkey.run(pubkey_tdb.name,
+ os.path.dirname(keyname),
+ self._key_name_hint,
+ self._required, self._algo)
+ dtb = tools.read_file(pubkey_tdb.name)
+ self.SetContents(dtb)
+ state.UpdateFdtContents(self.GetFdtEtype(), dtb)
+
+ return True
+
+ # pylint: disable=R0201,C0116
+ def GetDefaultFilename(self):
+ return 'spl/u-boot-spl-pubkey.dtb'
+
+ # pylint: disable=R0201,C0116
+ def GetFdtEtype(self):
+ return 'u-boot-spl-dtb'
+
+ # pylint: disable=R0201,C0116
+ def AddBintools(self, btools):
+ super().AddBintools(btools)
+ self.fdt_add_pubkey = self.AddBintool(btools, 'fdt_add_pubkey')
diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py
index 3e8091e..1cfa349 100644
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -94,6 +94,8 @@ ROCKCHIP_TPL_DATA = b'rockchip-tpl'
TEST_FDT1_DATA = b'fdt1'
TEST_FDT2_DATA = b'test-fdt2'
ENV_DATA = b'var1=1\nvar2="2"'
+ENCRYPTED_IV_DATA = b'123456'
+ENCRYPTED_KEY_DATA = b'abcde'
PRE_LOAD_MAGIC = b'UBSH'
PRE_LOAD_VERSION = 0x11223344.to_bytes(4, 'big')
PRE_LOAD_HDR_SIZE = 0x00001000.to_bytes(4, 'big')
@@ -232,6 +234,10 @@ class TestFunctional(unittest.TestCase):
# Newer OP_TEE file in v1 binary format
cls.make_tee_bin('tee.bin')
+ # test files for encrypted tests
+ TestFunctional._MakeInputFile('encrypted-file.iv', ENCRYPTED_IV_DATA)
+ TestFunctional._MakeInputFile('encrypted-file.key', ENCRYPTED_KEY_DATA)
+
cls.comp_bintools = {}
for name in COMP_BINTOOLS:
cls.comp_bintools[name] = bintool.Bintool.create(name)
@@ -648,6 +654,16 @@ class TestFunctional(unittest.TestCase):
tools.read_file(cls.ElfTestFile(src_fname)))
@classmethod
+ def _SetupPmuFwlElf(cls, src_fname='bss_data'):
+ """Set up an ELF file with a '_dt_ucode_base_size' symbol
+
+ Args:
+ Filename of ELF file to use as VPL
+ """
+ TestFunctional._MakeInputFile('pmu-firmware.elf',
+ tools.read_file(cls.ElfTestFile(src_fname)))
+
+ @classmethod
def _SetupDescriptor(cls):
with open(cls.TestFile('descriptor.bin'), 'rb') as fd:
TestFunctional._MakeInputFile('descriptor.bin', fd.read())
@@ -5647,41 +5663,61 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testPreLoad(self):
"""Test an image with a pre-load header"""
entry_args = {
- 'pre-load-key-path': '.',
+ 'pre-load-key-path': os.path.join(self._binman_dir, 'test'),
}
- data, _, _, _ = self._DoReadFileDtb('230_pre_load.dts',
- entry_args=entry_args)
- self.assertEqual(PRE_LOAD_MAGIC, data[:len(PRE_LOAD_MAGIC)])
- self.assertEqual(PRE_LOAD_VERSION, data[4:4 + len(PRE_LOAD_VERSION)])
- self.assertEqual(PRE_LOAD_HDR_SIZE, data[8:8 + len(PRE_LOAD_HDR_SIZE)])
- data = self._DoReadFile('230_pre_load.dts')
+ data = self._DoReadFileDtb(
+ '230_pre_load.dts', entry_args=entry_args,
+ extra_indirs=[os.path.join(self._binman_dir, 'test')])[0]
self.assertEqual(PRE_LOAD_MAGIC, data[:len(PRE_LOAD_MAGIC)])
self.assertEqual(PRE_LOAD_VERSION, data[4:4 + len(PRE_LOAD_VERSION)])
self.assertEqual(PRE_LOAD_HDR_SIZE, data[8:8 + len(PRE_LOAD_HDR_SIZE)])
+ def testPreLoadNoKey(self):
+ """Test an image with a pre-load heade0r with missing key"""
+ with self.assertRaises(FileNotFoundError) as exc:
+ self._DoReadFile('230_pre_load.dts')
+ self.assertIn("No such file or directory: 'dev.key'",
+ str(exc.exception))
+
def testPreLoadPkcs(self):
"""Test an image with a pre-load header with padding pkcs"""
- data = self._DoReadFile('231_pre_load_pkcs.dts')
+ entry_args = {
+ 'pre-load-key-path': os.path.join(self._binman_dir, 'test'),
+ }
+ data = self._DoReadFileDtb('231_pre_load_pkcs.dts',
+ entry_args=entry_args)[0]
self.assertEqual(PRE_LOAD_MAGIC, data[:len(PRE_LOAD_MAGIC)])
self.assertEqual(PRE_LOAD_VERSION, data[4:4 + len(PRE_LOAD_VERSION)])
self.assertEqual(PRE_LOAD_HDR_SIZE, data[8:8 + len(PRE_LOAD_HDR_SIZE)])
def testPreLoadPss(self):
"""Test an image with a pre-load header with padding pss"""
- data = self._DoReadFile('232_pre_load_pss.dts')
+ entry_args = {
+ 'pre-load-key-path': os.path.join(self._binman_dir, 'test'),
+ }
+ data = self._DoReadFileDtb('232_pre_load_pss.dts',
+ entry_args=entry_args)[0]
self.assertEqual(PRE_LOAD_MAGIC, data[:len(PRE_LOAD_MAGIC)])
self.assertEqual(PRE_LOAD_VERSION, data[4:4 + len(PRE_LOAD_VERSION)])
self.assertEqual(PRE_LOAD_HDR_SIZE, data[8:8 + len(PRE_LOAD_HDR_SIZE)])
def testPreLoadInvalidPadding(self):
"""Test an image with a pre-load header with an invalid padding"""
+ entry_args = {
+ 'pre-load-key-path': os.path.join(self._binman_dir, 'test'),
+ }
with self.assertRaises(ValueError) as e:
- data = self._DoReadFile('233_pre_load_invalid_padding.dts')
+ self._DoReadFileDtb('233_pre_load_invalid_padding.dts',
+ entry_args=entry_args)
def testPreLoadInvalidSha(self):
"""Test an image with a pre-load header with an invalid hash"""
+ entry_args = {
+ 'pre-load-key-path': os.path.join(self._binman_dir, 'test'),
+ }
with self.assertRaises(ValueError) as e:
- data = self._DoReadFile('234_pre_load_invalid_sha.dts')
+ self._DoReadFileDtb('234_pre_load_invalid_sha.dts',
+ entry_args=entry_args)
def testPreLoadInvalidAlgo(self):
"""Test an image with a pre-load header with an invalid algo"""
@@ -5690,8 +5726,12 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testPreLoadInvalidKey(self):
"""Test an image with a pre-load header with an invalid key"""
+ entry_args = {
+ 'pre-load-key-path': os.path.join(self._binman_dir, 'test'),
+ }
with self.assertRaises(ValueError) as e:
- data = self._DoReadFile('236_pre_load_invalid_key.dts')
+ data = self._DoReadFileDtb('236_pre_load_invalid_key.dts',
+ entry_args=entry_args)
def _CheckSafeUniqueNames(self, *images):
"""Check all entries of given images for unsafe unique names"""
@@ -6659,18 +6699,18 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testPackRockchipTpl(self):
"""Test that an image with a Rockchip TPL binary can be created"""
- data = self._DoReadFile('277_rockchip_tpl.dts')
+ data = self._DoReadFile('291_rockchip_tpl.dts')
self.assertEqual(ROCKCHIP_TPL_DATA, data[:len(ROCKCHIP_TPL_DATA)])
def testMkimageMissingBlobMultiple(self):
"""Test missing blob with mkimage entry and multiple-data-files"""
with test_util.capture_sys_output() as (stdout, stderr):
- self._DoTestFile('278_mkimage_missing_multiple.dts', allow_missing=True)
+ self._DoTestFile('292_mkimage_missing_multiple.dts', allow_missing=True)
err = stderr.getvalue()
self.assertIn("is missing external blobs and is non-functional", err)
with self.assertRaises(ValueError) as e:
- self._DoTestFile('278_mkimage_missing_multiple.dts', allow_missing=False)
+ self._DoTestFile('292_mkimage_missing_multiple.dts', allow_missing=False)
self.assertIn("not found in input path", str(e.exception))
def _PrepareSignEnv(self, dts='280_fit_sign.dts'):
@@ -6906,19 +6946,19 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testTIBoardConfig(self):
"""Test that a schema validated board config file can be generated"""
- data = self._DoReadFile('277_ti_board_cfg.dts')
+ data = self._DoReadFile('293_ti_board_cfg.dts')
self.assertEqual(TI_BOARD_CONFIG_DATA, data)
def testTIBoardConfigCombined(self):
"""Test that a schema validated combined board config file can be generated"""
- data = self._DoReadFile('278_ti_board_cfg_combined.dts')
+ data = self._DoReadFile('294_ti_board_cfg_combined.dts')
configlen_noheader = TI_BOARD_CONFIG_DATA * 4
self.assertGreater(data, configlen_noheader)
def testTIBoardConfigNoDataType(self):
"""Test that error is thrown when data type is not supported"""
with self.assertRaises(ValueError) as e:
- data = self._DoReadFile('279_ti_board_cfg_no_type.dts')
+ data = self._DoReadFile('295_ti_board_cfg_no_type.dts')
self.assertIn("Schema validation error", str(e.exception))
def testPackTiSecure(self):
@@ -6927,7 +6967,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
entry_args = {
'keyfile': keyfile,
}
- data = self._DoReadFileDtb('279_ti_secure.dts',
+ data = self._DoReadFileDtb('296_ti_secure.dts',
entry_args=entry_args)[0]
self.assertGreater(len(data), len(TI_UNSECURE_DATA))
@@ -6939,7 +6979,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
'keyfile': keyfile,
}
with test_util.capture_sys_output() as (_, stderr):
- self._DoTestFile('279_ti_secure.dts',
+ self._DoTestFile('296_ti_secure.dts',
force_missing_bintools='openssl',
entry_args=entry_args)
err = stderr.getvalue()
@@ -6951,11 +6991,11 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
entry_args = {
'keyfile': keyfile,
}
- data = self._DoReadFileDtb('280_ti_secure_rom.dts',
+ data = self._DoReadFileDtb('297_ti_secure_rom.dts',
entry_args=entry_args)[0]
- data_a = self._DoReadFileDtb('288_ti_secure_rom_a.dts',
+ data_a = self._DoReadFileDtb('299_ti_secure_rom_a.dts',
entry_args=entry_args)[0]
- data_b = self._DoReadFileDtb('289_ti_secure_rom_b.dts',
+ data_b = self._DoReadFileDtb('300_ti_secure_rom_b.dts',
entry_args=entry_args)[0]
self.assertGreater(len(data), len(TI_UNSECURE_DATA))
self.assertGreater(len(data_a), len(TI_UNSECURE_DATA))
@@ -6967,9 +7007,85 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
entry_args = {
'keyfile': keyfile,
}
- data = self._DoReadFileDtb('281_ti_secure_rom_combined.dts',
+ data = self._DoReadFileDtb('298_ti_secure_rom_combined.dts',
entry_args=entry_args)[0]
self.assertGreater(len(data), len(TI_UNSECURE_DATA))
+ def testEncryptedNoAlgo(self):
+ """Test encrypted node with missing required properties"""
+ with self.assertRaises(ValueError) as e:
+ self._DoReadFileDtb('301_encrypted_no_algo.dts')
+ self.assertIn(
+ "Node '/binman/fit/images/u-boot/encrypted': 'encrypted' entry is missing properties: algo iv-filename",
+ str(e.exception))
+
+ def testEncryptedInvalidIvfile(self):
+ """Test encrypted node with invalid iv file"""
+ with self.assertRaises(ValueError) as e:
+ self._DoReadFileDtb('302_encrypted_invalid_iv_file.dts')
+ self.assertIn("Filename 'invalid-iv-file' not found in input path",
+ str(e.exception))
+
+ def testEncryptedMissingKey(self):
+ """Test encrypted node with missing key properties"""
+ with self.assertRaises(ValueError) as e:
+ self._DoReadFileDtb('303_encrypted_missing_key.dts')
+ self.assertIn(
+ "Node '/binman/fit/images/u-boot/encrypted': Provide either 'key-filename' or 'key-source'",
+ str(e.exception))
+
+ def testEncryptedKeySource(self):
+ """Test encrypted node with key-source property"""
+ data = self._DoReadFileDtb('304_encrypted_key_source.dts')[0]
+
+ dtb = fdt.Fdt.FromData(data)
+ dtb.Scan()
+
+ node = dtb.GetNode('/images/u-boot/cipher')
+ self.assertEqual('algo-name', node.props['algo'].value)
+ self.assertEqual('key-source-value', node.props['key-source'].value)
+ self.assertEqual(ENCRYPTED_IV_DATA,
+ tools.to_bytes(''.join(node.props['iv'].value)))
+ self.assertNotIn('key', node.props)
+
+ def testEncryptedKeyFile(self):
+ """Test encrypted node with key-filename property"""
+ data = self._DoReadFileDtb('305_encrypted_key_file.dts')[0]
+
+ dtb = fdt.Fdt.FromData(data)
+ dtb.Scan()
+
+ node = dtb.GetNode('/images/u-boot/cipher')
+ self.assertEqual('algo-name', node.props['algo'].value)
+ self.assertEqual(ENCRYPTED_IV_DATA,
+ tools.to_bytes(''.join(node.props['iv'].value)))
+ self.assertEqual(ENCRYPTED_KEY_DATA,
+ tools.to_bytes(''.join(node.props['key'].value)))
+ self.assertNotIn('key-source', node.props)
+
+
+ def testSplPubkeyDtb(self):
+ """Test u_boot_spl_pubkey_dtb etype"""
+ data = tools.read_file(self.TestFile("key.pem"))
+ self._MakeInputFile("key.crt", data)
+ self._DoReadFileRealDtb('306_spl_pubkey_dtb.dts')
+ image = control.images['image']
+ entries = image.GetEntries()
+ dtb_entry = entries['u-boot-spl-pubkey-dtb']
+ dtb_data = dtb_entry.GetData()
+ dtb = fdt.Fdt.FromData(dtb_data)
+ dtb.Scan()
+
+ signature_node = dtb.GetNode('/signature')
+ self.assertIsNotNone(signature_node)
+ key_node = signature_node.FindNode("key-key")
+ self.assertIsNotNone(key_node)
+ self.assertEqual(fdt_util.GetString(key_node, "required"),
+ "conf")
+ self.assertEqual(fdt_util.GetString(key_node, "algo"),
+ "sha384,rsa4096")
+ self.assertEqual(fdt_util.GetString(key_node, "key-name-hint"),
+ "key")
+
if __name__ == "__main__":
unittest.main()
diff --git a/tools/binman/test/230_pre_load.dts b/tools/binman/test/230_pre_load.dts
index c0c2472..e6d9ef4 100644
--- a/tools/binman/test/230_pre_load.dts
+++ b/tools/binman/test/230_pre_load.dts
@@ -10,7 +10,7 @@
pre-load {
content = <&image>;
algo-name = "sha256,rsa2048";
- key-name = "tools/binman/test/230_dev.key";
+ key-name = "dev.key";
header-size = <4096>;
version = <0x11223344>;
};
diff --git a/tools/binman/test/231_pre_load_pkcs.dts b/tools/binman/test/231_pre_load_pkcs.dts
index 530638c..66268cd 100644
--- a/tools/binman/test/231_pre_load_pkcs.dts
+++ b/tools/binman/test/231_pre_load_pkcs.dts
@@ -11,7 +11,7 @@
content = <&image>;
algo-name = "sha256,rsa2048";
padding-name = "pkcs-1.5";
- key-name = "tools/binman/test/230_dev.key";
+ key-name = "dev.key";
header-size = <4096>;
version = <0x11223344>;
};
diff --git a/tools/binman/test/232_pre_load_pss.dts b/tools/binman/test/232_pre_load_pss.dts
index 371e0fd..3008d3f 100644
--- a/tools/binman/test/232_pre_load_pss.dts
+++ b/tools/binman/test/232_pre_load_pss.dts
@@ -11,7 +11,7 @@
content = <&image>;
algo-name = "sha256,rsa2048";
padding-name = "pss";
- key-name = "tools/binman/test/230_dev.key";
+ key-name = "dev.key";
header-size = <4096>;
version = <0x11223344>;
};
diff --git a/tools/binman/test/233_pre_load_invalid_padding.dts b/tools/binman/test/233_pre_load_invalid_padding.dts
index 9cb4cb5..bbe2d1b 100644
--- a/tools/binman/test/233_pre_load_invalid_padding.dts
+++ b/tools/binman/test/233_pre_load_invalid_padding.dts
@@ -11,7 +11,7 @@
content = <&image>;
algo-name = "sha256,rsa2048";
padding-name = "padding";
- key-name = "tools/binman/test/230_dev.key";
+ key-name = "dev.key";
header-size = <4096>;
version = <1>;
};
diff --git a/tools/binman/test/234_pre_load_invalid_sha.dts b/tools/binman/test/234_pre_load_invalid_sha.dts
index 8ded98d..29afd2e 100644
--- a/tools/binman/test/234_pre_load_invalid_sha.dts
+++ b/tools/binman/test/234_pre_load_invalid_sha.dts
@@ -11,7 +11,7 @@
content = <&image>;
algo-name = "sha2560,rsa2048";
padding-name = "pkcs-1.5";
- key-name = "tools/binman/test/230_dev.key";
+ key-name = "dev.key";
header-size = <4096>;
version = <1>;
};
diff --git a/tools/binman/test/235_pre_load_invalid_algo.dts b/tools/binman/test/235_pre_load_invalid_algo.dts
index 145286c..d6f6dd2 100644
--- a/tools/binman/test/235_pre_load_invalid_algo.dts
+++ b/tools/binman/test/235_pre_load_invalid_algo.dts
@@ -11,7 +11,7 @@
content = <&image>;
algo-name = "sha256,rsa20480";
padding-name = "pkcs-1.5";
- key-name = "tools/binman/test/230_dev.key";
+ key-name = "dev.key";
header-size = <4096>;
version = <1>;
};
diff --git a/tools/binman/test/236_pre_load_invalid_key.dts b/tools/binman/test/236_pre_load_invalid_key.dts
index df858c3..f93bc97 100644
--- a/tools/binman/test/236_pre_load_invalid_key.dts
+++ b/tools/binman/test/236_pre_load_invalid_key.dts
@@ -11,7 +11,7 @@
content = <&image>;
algo-name = "sha256,rsa4096";
padding-name = "pkcs-1.5";
- key-name = "tools/binman/test/230_dev.key";
+ key-name = "dev.key";
header-size = <4096>;
version = <1>;
};
diff --git a/tools/binman/test/277_rockchip_tpl.dts b/tools/binman/test/291_rockchip_tpl.dts
index 269f56e..269f56e 100644
--- a/tools/binman/test/277_rockchip_tpl.dts
+++ b/tools/binman/test/291_rockchip_tpl.dts
diff --git a/tools/binman/test/278_mkimage_missing_multiple.dts b/tools/binman/test/292_mkimage_missing_multiple.dts
index f84aea4..f84aea4 100644
--- a/tools/binman/test/278_mkimage_missing_multiple.dts
+++ b/tools/binman/test/292_mkimage_missing_multiple.dts
diff --git a/tools/binman/test/277_ti_board_cfg.dts b/tools/binman/test/293_ti_board_cfg.dts
index cda024c..cda024c 100644
--- a/tools/binman/test/277_ti_board_cfg.dts
+++ b/tools/binman/test/293_ti_board_cfg.dts
diff --git a/tools/binman/test/278_ti_board_cfg_combined.dts b/tools/binman/test/294_ti_board_cfg_combined.dts
index 95ef449..95ef449 100644
--- a/tools/binman/test/278_ti_board_cfg_combined.dts
+++ b/tools/binman/test/294_ti_board_cfg_combined.dts
diff --git a/tools/binman/test/279_ti_board_cfg_no_type.dts b/tools/binman/test/295_ti_board_cfg_no_type.dts
index 584b7ac..584b7ac 100644
--- a/tools/binman/test/279_ti_board_cfg_no_type.dts
+++ b/tools/binman/test/295_ti_board_cfg_no_type.dts
diff --git a/tools/binman/test/279_ti_secure.dts b/tools/binman/test/296_ti_secure.dts
index 941d0ab..941d0ab 100644
--- a/tools/binman/test/279_ti_secure.dts
+++ b/tools/binman/test/296_ti_secure.dts
diff --git a/tools/binman/test/280_ti_secure_rom.dts b/tools/binman/test/297_ti_secure_rom.dts
index d131376..d131376 100644
--- a/tools/binman/test/280_ti_secure_rom.dts
+++ b/tools/binman/test/297_ti_secure_rom.dts
diff --git a/tools/binman/test/281_ti_secure_rom_combined.dts b/tools/binman/test/298_ti_secure_rom_combined.dts
index bf87273..bf87273 100644
--- a/tools/binman/test/281_ti_secure_rom_combined.dts
+++ b/tools/binman/test/298_ti_secure_rom_combined.dts
diff --git a/tools/binman/test/288_ti_secure_rom_a.dts b/tools/binman/test/299_ti_secure_rom_a.dts
index 887138f..887138f 100644
--- a/tools/binman/test/288_ti_secure_rom_a.dts
+++ b/tools/binman/test/299_ti_secure_rom_a.dts
diff --git a/tools/binman/test/289_ti_secure_rom_b.dts b/tools/binman/test/300_ti_secure_rom_b.dts
index c6d6182..c6d6182 100644
--- a/tools/binman/test/289_ti_secure_rom_b.dts
+++ b/tools/binman/test/300_ti_secure_rom_b.dts
diff --git a/tools/binman/test/301_encrypted_no_algo.dts b/tools/binman/test/301_encrypted_no_algo.dts
new file mode 100644
index 0000000..03f7ffe
--- /dev/null
+++ b/tools/binman/test/301_encrypted_no_algo.dts
@@ -0,0 +1,15 @@
+// SPDX-License-Identifier: GPL-2.0+
+/dts-v1/;
+
+/ {
+ binman {
+ fit {
+ images {
+ u-boot {
+ encrypted {
+ };
+ };
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/302_encrypted_invalid_iv_file.dts b/tools/binman/test/302_encrypted_invalid_iv_file.dts
new file mode 100644
index 0000000..388a0a6
--- /dev/null
+++ b/tools/binman/test/302_encrypted_invalid_iv_file.dts
@@ -0,0 +1,18 @@
+// SPDX-License-Identifier: GPL-2.0+
+/dts-v1/;
+
+/ {
+ binman {
+ fit {
+ images {
+ u-boot {
+ encrypted {
+ algo = "some-algo";
+ key-source = "key";
+ iv-filename = "invalid-iv-file";
+ };
+ };
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/303_encrypted_missing_key.dts b/tools/binman/test/303_encrypted_missing_key.dts
new file mode 100644
index 0000000..d1daaa0
--- /dev/null
+++ b/tools/binman/test/303_encrypted_missing_key.dts
@@ -0,0 +1,23 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ fit {
+ description = "test desc";
+
+ images {
+ u-boot {
+ encrypted {
+ algo = "algo-name";
+ iv-filename = "encrypted-file.iv";
+ };
+ };
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/304_encrypted_key_source.dts b/tools/binman/test/304_encrypted_key_source.dts
new file mode 100644
index 0000000..884ec50
--- /dev/null
+++ b/tools/binman/test/304_encrypted_key_source.dts
@@ -0,0 +1,24 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ fit {
+ description = "test desc";
+
+ images {
+ u-boot {
+ encrypted {
+ algo = "algo-name";
+ key-source = "key-source-value";
+ iv-filename = "encrypted-file.iv";
+ };
+ };
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/305_encrypted_key_file.dts b/tools/binman/test/305_encrypted_key_file.dts
new file mode 100644
index 0000000..efd7ee5
--- /dev/null
+++ b/tools/binman/test/305_encrypted_key_file.dts
@@ -0,0 +1,24 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ fit {
+ description = "test desc";
+
+ images {
+ u-boot {
+ encrypted {
+ algo = "algo-name";
+ iv-filename = "encrypted-file.iv";
+ key-filename = "encrypted-file.key";
+ };
+ };
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/306_spl_pubkey_dtb.dts b/tools/binman/test/306_spl_pubkey_dtb.dts
new file mode 100644
index 0000000..3256ff9
--- /dev/null
+++ b/tools/binman/test/306_spl_pubkey_dtb.dts
@@ -0,0 +1,16 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ u-boot-spl-pubkey-dtb {
+ algo = "sha384,rsa4096";
+ required = "conf";
+ key-name-hint = "key";
+ };
+ };
+};
diff --git a/tools/binman/test/230_dev.key b/tools/binman/test/dev.key
index b36bad2..b36bad2 100644
--- a/tools/binman/test/230_dev.key
+++ b/tools/binman/test/dev.key
diff --git a/tools/buildman/board.py b/tools/buildman/board.py
index 8ef905b..248d8bf 100644
--- a/tools/buildman/board.py
+++ b/tools/buildman/board.py
@@ -17,14 +17,14 @@ class Board:
vendor: Name of vendor (e.g. armltd)
board_name: Name of board (e.g. integrator)
target: Target name (use make <target>_defconfig to configure)
- cfg_name: Config name
+ cfg_name: Config-file name (in includes/configs/)
"""
self.target = target
self.arch = arch
self.cpu = cpu
- self.board_name = board_name
- self.vendor = vendor
self.soc = soc
+ self.vendor = vendor
+ self.board_name = board_name
self.cfg_name = cfg_name
self.props = [self.target, self.arch, self.cpu, self.board_name,
self.vendor, self.soc, self.cfg_name]
diff --git a/tools/buildman/boards.py b/tools/buildman/boards.py
index 0bb0723..83adbf1 100644
--- a/tools/buildman/boards.py
+++ b/tools/buildman/boards.py
@@ -50,7 +50,7 @@ def try_remove(fname):
raise
-def output_is_new(output):
+def output_is_new(output, config_dir, srcdir):
"""Check if the output file is up to date.
Looks at defconfig and Kconfig files to make sure none is newer than the
@@ -59,6 +59,8 @@ def output_is_new(output):
Args:
output (str): Filename to check
+ config_dir (str): Directory containing defconfig files
+ srcdir (str): Directory containing Kconfig and MAINTAINERS files
Returns:
True if the given output file exists and is newer than any of
@@ -76,7 +78,7 @@ def output_is_new(output):
return False
raise
- for (dirpath, _, filenames) in os.walk(CONFIG_DIR):
+ for (dirpath, _, filenames) in os.walk(config_dir):
for filename in fnmatch.filter(filenames, '*_defconfig'):
if fnmatch.fnmatch(filename, '.*'):
continue
@@ -84,7 +86,7 @@ def output_is_new(output):
if ctime < os.path.getctime(filepath):
return False
- for (dirpath, _, filenames) in os.walk('.'):
+ for (dirpath, _, filenames) in os.walk(srcdir):
for filename in filenames:
if (fnmatch.fnmatch(filename, '*~') or
not fnmatch.fnmatch(filename, 'Kconfig*') and
@@ -103,7 +105,7 @@ def output_is_new(output):
if line[0] == '#' or line == '\n':
continue
defconfig = line.split()[6] + '_defconfig'
- if not os.path.exists(os.path.join(CONFIG_DIR, defconfig)):
+ if not os.path.exists(os.path.join(config_dir, defconfig)):
return False
return True
@@ -191,10 +193,10 @@ class KconfigScanner:
# 'target' is added later
}
- def __init__(self):
+ def __init__(self, srctree):
"""Scan all the Kconfig files and create a Kconfig object."""
# Define environment variables referenced from Kconfig
- os.environ['srctree'] = os.getcwd()
+ os.environ['srctree'] = srctree
os.environ['UBOOTVERSION'] = 'dummy'
os.environ['KCONFIG_OBJDIR'] = ''
self._tmpfile = None
@@ -211,40 +213,36 @@ class KconfigScanner:
if self._tmpfile:
try_remove(self._tmpfile)
- def scan(self, defconfig):
+ def scan(self, defconfig, warn_targets):
"""Load a defconfig file to obtain board parameters.
Args:
defconfig (str): path to the defconfig file to be processed
+ warn_targets (bool): True to warn about missing or duplicate
+ CONFIG_TARGET options
Returns:
- A dictionary of board parameters. It has a form of:
- {
- 'arch': <arch_name>,
- 'cpu': <cpu_name>,
- 'soc': <soc_name>,
- 'vendor': <vendor_name>,
- 'board': <board_name>,
- 'target': <target_name>,
- 'config': <config_header_name>,
- }
+ tuple: dictionary of board parameters. It has a form of:
+ {
+ 'arch': <arch_name>,
+ 'cpu': <cpu_name>,
+ 'soc': <soc_name>,
+ 'vendor': <vendor_name>,
+ 'board': <board_name>,
+ 'target': <target_name>,
+ 'config': <config_header_name>,
+ }
+ warnings (list of str): list of warnings found
"""
- # strip special prefixes and save it in a temporary file
- outfd, self._tmpfile = tempfile.mkstemp()
- with os.fdopen(outfd, 'w') as outf:
- with open(defconfig, encoding='utf-8') as inf:
- for line in inf:
- colon = line.find(':CONFIG_')
- if colon == -1:
- outf.write(line)
- else:
- outf.write(line[colon + 1:])
+ leaf = os.path.basename(defconfig)
+ expect_target, match, rear = leaf.partition('_defconfig')
+ assert match and not rear, f'{leaf} : invalid defconfig'
- self._conf.load_config(self._tmpfile)
- try_remove(self._tmpfile)
+ self._conf.load_config(defconfig)
self._tmpfile = None
params = {}
+ warnings = []
# Get the value of CONFIG_SYS_ARCH, CONFIG_SYS_CPU, ... etc.
# Set '-' if the value is empty.
@@ -255,9 +253,23 @@ class KconfigScanner:
else:
params[key] = '-'
- defconfig = os.path.basename(defconfig)
- params['target'], match, rear = defconfig.partition('_defconfig')
- assert match and not rear, f'{defconfig} : invalid defconfig'
+ # Check there is exactly one TARGET_xxx set
+ if warn_targets:
+ target = None
+ for name, sym in self._conf.syms.items():
+ if name.startswith('TARGET_') and sym.str_value == 'y':
+ tname = name[7:].lower()
+ if target:
+ warnings.append(
+ f'WARNING: {leaf}: Duplicate TARGET_xxx: {target} and {tname}')
+ else:
+ target = tname
+
+ if not target:
+ cfg_name = expect_target.replace('-', '_').upper()
+ warnings.append(f'WARNING: {leaf}: No TARGET_{cfg_name} enabled')
+
+ params['target'] = expect_target
# fix-up for aarch64
if params['arch'] == 'arm' and params['cpu'] == 'armv8':
@@ -274,7 +286,7 @@ class KconfigScanner:
else:
params['arch'] = 'riscv64'
- return params
+ return params, warnings
class MaintainersDatabase:
@@ -332,26 +344,55 @@ class MaintainersDatabase:
str: Maintainers of the board. If the board has two or more
maintainers, they are separated with colons.
"""
- if not target in self.database:
- self.warnings.append(f"WARNING: no maintainers for '{target}'")
- return ''
+ entry = self.database.get(target)
+ if entry:
+ status, maint_list = entry
+ if not status.startswith('Orphan'):
+ if len(maint_list) > 1 or (maint_list and maint_list[0] != '-'):
+ return ':'.join(maint_list)
- return ':'.join(self.database[target][1])
+ self.warnings.append(f"WARNING: no maintainers for '{target}'")
+ return ''
- def parse_file(self, fname):
+ def parse_file(self, srcdir, fname):
"""Parse a MAINTAINERS file.
Parse a MAINTAINERS file and accumulate board status and maintainers
information in the self.database dict.
+ defconfig files are used to specify the target, e.g. xxx_defconfig is
+ used for target 'xxx'. If there is no defconfig file mentioned in the
+ MAINTAINERS file F: entries, then this function does nothing.
+
+ The N: name entries can be used to specify a defconfig file using
+ wildcards.
+
Args:
+ srcdir (str): Directory containing source code (Kconfig files)
fname (str): MAINTAINERS file to be parsed
"""
+ def add_targets(linenum):
+ """Add any new targets
+
+ Args:
+ linenum (int): Current line number
+ """
+ added = False
+ if targets:
+ for target in targets:
+ self.database[target] = (status, maintainers)
+ added = True
+ if not added and (status != '-' and maintainers):
+ leaf = fname[len(srcdir) + 1:]
+ if leaf != 'MAINTAINERS':
+ self.warnings.append(
+ f'WARNING: orphaned defconfig in {leaf} ending at line {linenum + 1}')
+
targets = []
maintainers = []
status = '-'
with open(fname, encoding="utf-8") as inf:
- for line in inf:
+ for linenum, line in enumerate(inf):
# Check also commented maintainers
if line[:3] == '#M:':
line = line[1:]
@@ -360,9 +401,12 @@ class MaintainersDatabase:
maintainers.append(rest)
elif tag == 'F:':
# expand wildcard and filter by 'configs/*_defconfig'
- for item in glob.glob(rest):
+ glob_path = os.path.join(srcdir, rest)
+ for item in glob.glob(glob_path):
front, match, rear = item.partition('configs/')
- if not front and match:
+ if front.endswith('/'):
+ front = front[:-1]
+ if front == srcdir and match:
front, match, rear = rear.rpartition('_defconfig')
if match and not rear:
targets.append(front)
@@ -371,23 +415,26 @@ class MaintainersDatabase:
elif tag == 'N:':
# Just scan the configs directory since that's all we care
# about
- for dirpath, _, fnames in os.walk('configs'):
- for fname in fnames:
- path = os.path.join(dirpath, fname)
+ walk_path = os.walk(os.path.join(srcdir, 'configs'))
+ for dirpath, _, fnames in walk_path:
+ for cfg in fnames:
+ path = os.path.join(dirpath, cfg)[len(srcdir) + 1:]
front, match, rear = path.partition('configs/')
- if not front and match:
- front, match, rear = rear.rpartition('_defconfig')
- if match and not rear:
- targets.append(front)
+ if front or not match:
+ continue
+ front, match, rear = rear.rpartition('_defconfig')
+
+ # Use this entry if it matches the defconfig file
+ # without the _defconfig suffix. For example
+ # 'am335x.*' matches am335x_guardian_defconfig
+ if match and not rear and re.search(rest, front):
+ targets.append(front)
elif line == '\n':
- for target in targets:
- self.database[target] = (status, maintainers)
+ add_targets(linenum)
targets = []
maintainers = []
status = '-'
- if targets:
- for target in targets:
- self.database[target] = (status, maintainers)
+ add_targets(linenum)
class Boards:
@@ -622,39 +669,63 @@ class Boards:
return result, warnings
@classmethod
- def scan_defconfigs_for_multiprocess(cls, queue, defconfigs):
+ def scan_defconfigs_for_multiprocess(cls, srcdir, queue, defconfigs,
+ warn_targets):
"""Scan defconfig files and queue their board parameters
This function is intended to be passed to multiprocessing.Process()
constructor.
Args:
+ srcdir (str): Directory containing source code
queue (multiprocessing.Queue): The resulting board parameters are
written into this.
defconfigs (sequence of str): A sequence of defconfig files to be
scanned.
+ warn_targets (bool): True to warn about missing or duplicate
+ CONFIG_TARGET options
"""
- kconf_scanner = KconfigScanner()
+ kconf_scanner = KconfigScanner(srcdir)
for defconfig in defconfigs:
- queue.put(kconf_scanner.scan(defconfig))
+ queue.put(kconf_scanner.scan(defconfig, warn_targets))
@classmethod
- def read_queues(cls, queues, params_list):
- """Read the queues and append the data to the paramers list"""
+ def read_queues(cls, queues, params_list, warnings):
+ """Read the queues and append the data to the paramers list
+
+ Args:
+ queues (list of multiprocessing.Queue): Queues to read
+ params_list (list of dict): List to add params too
+ warnings (set of str): Set to add warnings to
+ """
for que in queues:
while not que.empty():
- params_list.append(que.get())
+ params, warn = que.get()
+ params_list.append(params)
+ warnings.update(warn)
- def scan_defconfigs(self, jobs=1):
+ def scan_defconfigs(self, config_dir, srcdir, jobs=1, warn_targets=False):
"""Collect board parameters for all defconfig files.
This function invokes multiple processes for faster processing.
Args:
+ config_dir (str): Directory containing the defconfig files
+ srcdir (str): Directory containing source code (Kconfig files)
jobs (int): The number of jobs to run simultaneously
+ warn_targets (bool): True to warn about missing or duplicate
+ CONFIG_TARGET options
+
+ Returns:
+ tuple:
+ list of dict: List of board parameters, each a dict:
+ key: 'arch', 'cpu', 'soc', 'vendor', 'board', 'target',
+ 'config'
+ value: string value of the key
+ list of str: List of warnings recorded
"""
all_defconfigs = []
- for (dirpath, _, filenames) in os.walk(CONFIG_DIR):
+ for (dirpath, _, filenames) in os.walk(config_dir):
for filename in fnmatch.filter(filenames, '*_defconfig'):
if fnmatch.fnmatch(filename, '.*'):
continue
@@ -669,18 +740,19 @@ class Boards:
que = multiprocessing.Queue(maxsize=-1)
proc = multiprocessing.Process(
target=self.scan_defconfigs_for_multiprocess,
- args=(que, defconfigs))
+ args=(srcdir, que, defconfigs, warn_targets))
proc.start()
processes.append(proc)
queues.append(que)
- # The resulting data should be accumulated to this list
+ # The resulting data should be accumulated to these lists
params_list = []
+ warnings = set()
# Data in the queues should be retrieved preriodically.
# Otherwise, the queues would become full and subprocesses would get stuck.
while any(p.is_alive() for p in processes):
- self.read_queues(queues, params_list)
+ self.read_queues(queues, params_list, warnings)
# sleep for a while until the queues are filled
time.sleep(SLEEP_TIME)
@@ -690,12 +762,12 @@ class Boards:
proc.join()
# retrieve leftover data
- self.read_queues(queues, params_list)
+ self.read_queues(queues, params_list, warnings)
- return params_list
+ return params_list, sorted(list(warnings))
@classmethod
- def insert_maintainers_info(cls, params_list):
+ def insert_maintainers_info(cls, srcdir, params_list):
"""Add Status and Maintainers information to the board parameters list.
Args:
@@ -705,16 +777,21 @@ class Boards:
list of str: List of warnings collected due to missing status, etc.
"""
database = MaintainersDatabase()
- for (dirpath, _, filenames) in os.walk('.'):
- if 'MAINTAINERS' in filenames:
- database.parse_file(os.path.join(dirpath, 'MAINTAINERS'))
+ for (dirpath, _, filenames) in os.walk(srcdir):
+ if 'MAINTAINERS' in filenames and 'tools/buildman' not in dirpath:
+ database.parse_file(srcdir,
+ os.path.join(dirpath, 'MAINTAINERS'))
for i, params in enumerate(params_list):
target = params['target']
- params['status'] = database.get_status(target)
- params['maintainers'] = database.get_maintainers(target)
+ maintainers = database.get_maintainers(target)
+ params['maintainers'] = maintainers
+ if maintainers:
+ params['status'] = database.get_status(target)
+ else:
+ params['status'] = '-'
params_list[i] = params
- return database.warnings
+ return sorted(database.warnings)
@classmethod
def format_and_output(cls, params_list, output):
@@ -750,9 +827,40 @@ class Boards:
with open(output, 'w', encoding="utf-8") as outf:
outf.write(COMMENT_BLOCK + '\n'.join(output_lines) + '\n')
+ def build_board_list(self, config_dir=CONFIG_DIR, srcdir='.', jobs=1,
+ warn_targets=False):
+ """Generate a board-database file
+
+ This works by reading the Kconfig, then loading each board's defconfig
+ in to get the setting for each option. In particular, CONFIG_TARGET_xxx
+ is typically set by the defconfig, where xxx is the target to build.
+
+ Args:
+ config_dir (str): Directory containing the defconfig files
+ srcdir (str): Directory containing source code (Kconfig files)
+ jobs (int): The number of jobs to run simultaneously
+ warn_targets (bool): True to warn about missing or duplicate
+ CONFIG_TARGET options
+
+ Returns:
+ tuple:
+ list of dict: List of board parameters, each a dict:
+ key: 'arch', 'cpu', 'soc', 'vendor', 'board', 'config',
+ 'target'
+ value: string value of the key
+ list of str: Warnings that came up
+ """
+ params_list, warnings = self.scan_defconfigs(config_dir, srcdir, jobs,
+ warn_targets)
+ m_warnings = self.insert_maintainers_info(srcdir, params_list)
+ return params_list, warnings + m_warnings
+
def ensure_board_list(self, output, jobs=1, force=False, quiet=False):
"""Generate a board database file if needed.
+ This is intended to check if Kconfig has changed since the boards.cfg
+ files was generated.
+
Args:
output (str): The name of the output file
jobs (int): The number of jobs to run simultaneously
@@ -762,12 +870,11 @@ class Boards:
Returns:
bool: True if all is well, False if there were warnings
"""
- if not force and output_is_new(output):
+ if not force and output_is_new(output, CONFIG_DIR, '.'):
if not quiet:
print(f'{output} is up to date. Nothing to do.')
return True
- params_list = self.scan_defconfigs(jobs)
- warnings = self.insert_maintainers_info(params_list)
+ params_list, warnings = self.build_board_list(CONFIG_DIR, '.', jobs)
for warn in warnings:
print(warn, file=sys.stderr)
self.format_and_output(params_list, output)
diff --git a/tools/buildman/bsettings.py b/tools/buildman/bsettings.py
index 029c401..f7f8276 100644
--- a/tools/buildman/bsettings.py
+++ b/tools/buildman/bsettings.py
@@ -7,7 +7,7 @@ import io
config_fname = None
-def Setup(fname=''):
+def setup(fname=''):
"""Set up the buildman settings module by reading config files
Args:
@@ -23,15 +23,15 @@ def Setup(fname=''):
config_fname = '%s/.buildman' % os.getenv('HOME')
if not os.path.exists(config_fname):
print('No config file found ~/.buildman\nCreating one...\n')
- CreateBuildmanConfigFile(config_fname)
+ create_buildman_config_file(config_fname)
print('To install tool chains, please use the --fetch-arch option')
if config_fname:
settings.read(config_fname)
-def AddFile(data):
+def add_file(data):
settings.readfp(io.StringIO(data))
-def GetItems(section):
+def get_items(section):
"""Get the items from a section of the config.
Args:
@@ -47,7 +47,7 @@ def GetItems(section):
except:
raise
-def GetGlobalItemValue(name):
+def get_global_item_value(name):
"""Get an item from the 'global' section of the config.
Args:
@@ -58,7 +58,7 @@ def GetGlobalItemValue(name):
"""
return settings.get('global', name, fallback=None)
-def SetItem(section, tag, value):
+def set_item(section, tag, value):
"""Set an item and write it back to the settings file"""
global settings
global config_fname
@@ -68,7 +68,7 @@ def SetItem(section, tag, value):
with open(config_fname, 'w') as fd:
settings.write(fd)
-def CreateBuildmanConfigFile(config_fname):
+def create_buildman_config_file(config_fname):
"""Creates a new config file with no tool chain information.
Args:
diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py
index d81752e..ecbd368 100644
--- a/tools/buildman/builder.py
+++ b/tools/buildman/builder.py
@@ -134,7 +134,7 @@ class Config:
for fname in config_filename:
self.config[fname] = {}
- def Add(self, fname, key, value):
+ def add(self, fname, key, value):
self.config[fname][key] = value
def __hash__(self):
@@ -151,7 +151,7 @@ class Environment:
self.target = target
self.environment = {}
- def Add(self, key, value):
+ def add(self, key, value):
self.environment[key] = value
class Builder:
@@ -163,7 +163,8 @@ class Builder:
checkout: True to check out source, False to skip that step.
This is used for testing.
col: terminal.Color() object
- count: Number of commits to build
+ count: Total number of commits to build, which is the number of commits
+ multiplied by the number of boards
do_make: Method to call to invoke Make
fail: Number of builds that failed due to error
force_build: Force building even if a build already exists
@@ -255,7 +256,10 @@ class Builder:
config_only=False, squash_config_y=False,
warnings_as_errors=False, work_in_output=False,
test_thread_exceptions=False, adjust_cfg=None,
- allow_missing=False, no_lto=False, reproducible_builds=False):
+ allow_missing=False, no_lto=False, reproducible_builds=False,
+ force_build=False, force_build_failures=False,
+ force_reconfig=False, in_tree=False,
+ force_config_on_failure=False, make_func=None):
"""Create a new Builder object
Args:
@@ -295,7 +299,14 @@ class Builder:
a string Kconfig
allow_missing: Run build with BINMAN_ALLOW_MISSING=1
no_lto (bool): True to set the NO_LTO flag when building
-
+ force_build (bool): Rebuild even commits that are already built
+ force_build_failures (bool): Rebuild commits that have not been
+ built, or failed to build
+ force_reconfig (bool): Reconfigure on each commit
+ in_tree (bool): Bulid in tree instead of out-of-tree
+ force_config_on_failure (bool): Reconfigure the build before
+ retrying a failed build
+ make_func (function): Function to call to run 'make'
"""
self.toolchains = toolchains
self.base_dir = base_dir
@@ -304,7 +315,7 @@ class Builder:
else:
self._working_dir = os.path.join(base_dir, '.bm-work')
self.threads = []
- self.do_make = self.Make
+ self.do_make = make_func or self.make
self.gnu_make = gnu_make
self.checkout = checkout
self.num_threads = num_threads
@@ -318,11 +329,7 @@ class Builder:
self._complete_delay = None
self._next_delay_update = datetime.now()
self._start_time = datetime.now()
- self.force_config_on_failure = True
- self.force_build_failures = False
- self.force_reconfig = False
self._step = step
- self.in_tree = False
self._error_lines = 0
self.no_subdirs = no_subdirs
self.full_path = full_path
@@ -336,6 +343,11 @@ class Builder:
self._ide = False
self.no_lto = no_lto
self.reproducible_builds = reproducible_builds
+ self.force_build = force_build
+ self.force_build_failures = force_build_failures
+ self.force_reconfig = force_reconfig
+ self.in_tree = in_tree
+ self.force_config_on_failure = force_config_on_failure
if not self.squash_config_y:
self.config_filenames += EXTRA_CONFIG_FILENAMES
@@ -389,7 +401,7 @@ class Builder:
def signal_handler(self, signal, frame):
sys.exit(1)
- def SetDisplayOptions(self, show_errors=False, show_sizes=False,
+ def set_display_options(self, show_errors=False, show_sizes=False,
show_detail=False, show_bloat=False,
list_error_boards=False, show_config=False,
show_environment=False, filter_dtb_warnings=False,
@@ -422,7 +434,7 @@ class Builder:
self._filter_migration_warnings = filter_migration_warnings
self._ide = ide
- def _AddTimestamp(self):
+ def _add_timestamp(self):
"""Add a new timestamp to the list and record the build period.
The build period is the length of time taken to perform a single
@@ -451,14 +463,14 @@ class Builder:
self._timestamps.popleft()
count -= 1
- def SelectCommit(self, commit, checkout=True):
+ def select_commit(self, commit, checkout=True):
"""Checkout the selected commit for this build
"""
self.commit = commit
if checkout and self.checkout:
gitutil.checkout(commit.hash)
- def Make(self, commit, brd, stage, cwd, *args, **kwargs):
+ def make(self, commit, brd, stage, cwd, *args, **kwargs):
"""Run make
Args:
@@ -503,7 +515,7 @@ class Builder:
result.combined = '%s\n' % (' '.join(cmd)) + result.combined
return result
- def ProcessResult(self, result):
+ def process_result(self, result):
"""Process the result of a build, showing progress information
Args:
@@ -524,8 +536,8 @@ class Builder:
if self._verbose:
terminal.print_clear()
boards_selected = {target : result.brd}
- self.ResetResultSummary(boards_selected)
- self.ProduceResultSummary(result.commit_upto, self.commits,
+ self.reset_result_summary(boards_selected)
+ self.produce_result_summary(result.commit_upto, self.commits,
boards_selected)
else:
target = '(starting)'
@@ -544,7 +556,7 @@ class Builder:
line += ' ' * 8
# Add our current completion time estimate
- self._AddTimestamp()
+ self._add_timestamp()
if self._complete_delay:
line += '%s : ' % self._complete_delay
@@ -553,7 +565,7 @@ class Builder:
terminal.print_clear()
tprint(line, newline=False, limit_to_line=True)
- def _GetOutputDir(self, commit_upto):
+ def get_output_dir(self, commit_upto):
"""Get the name of the output directory for a commit number
The output directory is typically .../<branch>/<commit>.
@@ -568,7 +580,7 @@ class Builder:
if self.commits:
commit = self.commits[commit_upto]
subject = commit.subject.translate(trans_valid_chars)
- # See _GetOutputSpaceRemovals() which parses this name
+ # See _get_output_space_removals() which parses this name
commit_dir = ('%02d_g%s_%s' % (commit_upto + 1,
commit.hash, subject[:20]))
elif not self.no_subdirs:
@@ -577,7 +589,7 @@ class Builder:
return self.base_dir
return os.path.join(self.base_dir, commit_dir)
- def GetBuildDir(self, commit_upto, target):
+ def get_build_dir(self, commit_upto, target):
"""Get the name of the build directory for a commit number
The build directory is typically .../<branch>/<commit>/<target>.
@@ -586,30 +598,30 @@ class Builder:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- output_dir = self._GetOutputDir(commit_upto)
+ output_dir = self.get_output_dir(commit_upto)
if self.work_in_output:
return output_dir
return os.path.join(output_dir, target)
- def GetDoneFile(self, commit_upto, target):
+ def get_done_file(self, commit_upto, target):
"""Get the name of the done file for a commit number
Args:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- return os.path.join(self.GetBuildDir(commit_upto, target), 'done')
+ return os.path.join(self.get_build_dir(commit_upto, target), 'done')
- def GetSizesFile(self, commit_upto, target):
+ def get_sizes_file(self, commit_upto, target):
"""Get the name of the sizes file for a commit number
Args:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- return os.path.join(self.GetBuildDir(commit_upto, target), 'sizes')
+ return os.path.join(self.get_build_dir(commit_upto, target), 'sizes')
- def GetFuncSizesFile(self, commit_upto, target, elf_fname):
+ def get_func_sizes_file(self, commit_upto, target, elf_fname):
"""Get the name of the funcsizes file for a commit number and ELF file
Args:
@@ -617,10 +629,10 @@ class Builder:
target: Target name
elf_fname: Filename of elf image
"""
- return os.path.join(self.GetBuildDir(commit_upto, target),
+ return os.path.join(self.get_build_dir(commit_upto, target),
'%s.sizes' % elf_fname.replace('/', '-'))
- def GetObjdumpFile(self, commit_upto, target, elf_fname):
+ def get_objdump_file(self, commit_upto, target, elf_fname):
"""Get the name of the objdump file for a commit number and ELF file
Args:
@@ -628,20 +640,20 @@ class Builder:
target: Target name
elf_fname: Filename of elf image
"""
- return os.path.join(self.GetBuildDir(commit_upto, target),
+ return os.path.join(self.get_build_dir(commit_upto, target),
'%s.objdump' % elf_fname.replace('/', '-'))
- def GetErrFile(self, commit_upto, target):
+ def get_err_file(self, commit_upto, target):
"""Get the name of the err file for a commit number
Args:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- output_dir = self.GetBuildDir(commit_upto, target)
+ output_dir = self.get_build_dir(commit_upto, target)
return os.path.join(output_dir, 'err')
- def FilterErrors(self, lines):
+ def filter_errors(self, lines):
"""Filter out errors in which we have no interest
We should probably use map().
@@ -664,7 +676,7 @@ class Builder:
out_lines.append(line)
return out_lines
- def ReadFuncSizes(self, fname, fd):
+ def read_func_sizes(self, fname, fd):
"""Read function sizes from the output of 'nm'
Args:
@@ -688,7 +700,7 @@ class Builder:
sym[name] = sym.get(name, 0) + int(size, 16)
return sym
- def _ProcessConfig(self, fname):
+ def _process_config(self, fname):
"""Read in a .config, autoconf.mk or autoconf.h file
This function handles all config file types. It ignores comments and
@@ -725,7 +737,7 @@ class Builder:
config[key] = value
return config
- def _ProcessEnvironment(self, fname):
+ def _process_environment(self, fname):
"""Read in a uboot.env file
This function reads in environment variables from a file.
@@ -750,7 +762,7 @@ class Builder:
pass
return environment
- def GetBuildOutcome(self, commit_upto, target, read_func_sizes,
+ def get_build_outcome(self, commit_upto, target, read_func_sizes,
read_config, read_environment):
"""Work out the outcome of a build.
@@ -764,8 +776,8 @@ class Builder:
Returns:
Outcome object
"""
- done_file = self.GetDoneFile(commit_upto, target)
- sizes_file = self.GetSizesFile(commit_upto, target)
+ done_file = self.get_done_file(commit_upto, target)
+ sizes_file = self.get_sizes_file(commit_upto, target)
sizes = {}
func_sizes = {}
config = {}
@@ -779,10 +791,10 @@ class Builder:
# Try a rebuild
return_code = 1
err_lines = []
- err_file = self.GetErrFile(commit_upto, target)
+ err_file = self.get_err_file(commit_upto, target)
if os.path.exists(err_file):
with open(err_file, 'r') as fd:
- err_lines = self.FilterErrors(fd.readlines())
+ err_lines = self.filter_errors(fd.readlines())
# Decide whether the build was ok, failed or created warnings
if return_code:
@@ -811,30 +823,30 @@ class Builder:
sizes[values[5]] = size_dict
if read_func_sizes:
- pattern = self.GetFuncSizesFile(commit_upto, target, '*')
+ pattern = self.get_func_sizes_file(commit_upto, target, '*')
for fname in glob.glob(pattern):
with open(fname, 'r') as fd:
dict_name = os.path.basename(fname).replace('.sizes',
'')
- func_sizes[dict_name] = self.ReadFuncSizes(fname, fd)
+ func_sizes[dict_name] = self.read_func_sizes(fname, fd)
if read_config:
- output_dir = self.GetBuildDir(commit_upto, target)
+ output_dir = self.get_build_dir(commit_upto, target)
for name in self.config_filenames:
fname = os.path.join(output_dir, name)
- config[name] = self._ProcessConfig(fname)
+ config[name] = self._process_config(fname)
if read_environment:
- output_dir = self.GetBuildDir(commit_upto, target)
+ output_dir = self.get_build_dir(commit_upto, target)
fname = os.path.join(output_dir, 'uboot.env')
- environment = self._ProcessEnvironment(fname)
+ environment = self._process_environment(fname)
return Builder.Outcome(rc, err_lines, sizes, func_sizes, config,
environment)
return Builder.Outcome(OUTCOME_UNKNOWN, [], {}, {}, {}, {})
- def GetResultSummary(self, boards_selected, commit_upto, read_func_sizes,
+ def get_result_summary(self, boards_selected, commit_upto, read_func_sizes,
read_config, read_environment):
"""Calculate a summary of the results of building a commit.
@@ -865,7 +877,7 @@ class Builder:
key: environment variable
value: value of environment variable
"""
- def AddLine(lines_summary, lines_boards, line, board):
+ def add_line(lines_summary, lines_boards, line, board):
line = line.rstrip()
if line in lines_boards:
lines_boards[line].append(board)
@@ -882,7 +894,7 @@ class Builder:
environment = {}
for brd in boards_selected.values():
- outcome = self.GetBuildOutcome(commit_upto, brd.target,
+ outcome = self.get_build_outcome(commit_upto, brd.target,
read_func_sizes, read_config,
read_environment)
board_dict[brd.target] = outcome
@@ -899,15 +911,15 @@ class Builder:
is_note = self._re_note.match(line)
if is_warning or (last_was_warning and is_note):
if last_func:
- AddLine(warn_lines_summary, warn_lines_boards,
+ add_line(warn_lines_summary, warn_lines_boards,
last_func, brd)
- AddLine(warn_lines_summary, warn_lines_boards,
+ add_line(warn_lines_summary, warn_lines_boards,
line, brd)
else:
if last_func:
- AddLine(err_lines_summary, err_lines_boards,
+ add_line(err_lines_summary, err_lines_boards,
last_func, brd)
- AddLine(err_lines_summary, err_lines_boards,
+ add_line(err_lines_summary, err_lines_boards,
line, brd)
last_was_warning = is_warning
last_func = None
@@ -915,19 +927,19 @@ class Builder:
for fname in self.config_filenames:
if outcome.config:
for key, value in outcome.config[fname].items():
- tconfig.Add(fname, key, value)
+ tconfig.add(fname, key, value)
config[brd.target] = tconfig
tenvironment = Environment(brd.target)
if outcome.environment:
for key, value in outcome.environment.items():
- tenvironment.Add(key, value)
+ tenvironment.add(key, value)
environment[brd.target] = tenvironment
return (board_dict, err_lines_summary, err_lines_boards,
warn_lines_summary, warn_lines_boards, config, environment)
- def AddOutcome(self, board_dict, arch_list, changes, char, color):
+ def add_outcome(self, board_dict, arch_list, changes, char, color):
"""Add an output to our list of outcomes for each architecture
This simple function adds failing boards (changes) to the
@@ -957,19 +969,19 @@ class Builder:
arch_list[arch] += str
- def ColourNum(self, num):
+ def colour_num(self, num):
color = self.col.RED if num > 0 else self.col.GREEN
if num == 0:
return '0'
return self.col.build(color, str(num))
- def ResetResultSummary(self, board_selected):
+ def reset_result_summary(self, board_selected):
"""Reset the results summary ready for use.
Set up the base board list to be all those selected, and set the
error lines to empty.
- Following this, calls to PrintResultSummary() will use this
+ Following this, calls to print_result_summary() will use this
information to work out what has changed.
Args:
@@ -986,7 +998,7 @@ class Builder:
self._base_config = None
self._base_environment = None
- def PrintFuncSizeDetail(self, fname, old, new):
+ def print_func_size_detail(self, fname, old, new):
grow, shrink, add, remove, up, down = 0, 0, 0, 0, 0, 0
delta, common = [], {}
@@ -1020,7 +1032,7 @@ class Builder:
args = [add, -remove, grow, -shrink, up, -down, up - down]
if max(args) == 0 and min(args) == 0:
return
- args = [self.ColourNum(x) for x in args]
+ args = [self.colour_num(x) for x in args]
indent = ' ' * 15
tprint('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' %
tuple([indent, self.col.build(self.col.YELLOW, fname)] + args))
@@ -1034,7 +1046,7 @@ class Builder:
tprint(msg, colour=color)
- def PrintSizeDetail(self, target_list, show_bloat):
+ def print_size_detail(self, target_list, show_bloat):
"""Show details size information for each board
Args:
@@ -1067,12 +1079,12 @@ class Builder:
outcome = result['_outcome']
base_outcome = self._base_board_dict[target]
for fname in outcome.func_sizes:
- self.PrintFuncSizeDetail(fname,
+ self.print_func_size_detail(fname,
base_outcome.func_sizes[fname],
outcome.func_sizes[fname])
- def PrintSizeSummary(self, board_selected, board_dict, show_detail,
+ def print_size_summary(self, board_selected, board_dict, show_detail,
show_bloat):
"""Print a summary of image sizes broken down by section.
@@ -1173,10 +1185,10 @@ class Builder:
if printed_arch:
tprint()
if show_detail:
- self.PrintSizeDetail(target_list, show_bloat)
+ self.print_size_detail(target_list, show_bloat)
- def PrintResultSummary(self, board_selected, board_dict, err_lines,
+ def print_result_summary(self, board_selected, board_dict, err_lines,
err_line_boards, warn_lines, warn_line_boards,
config, environment, show_sizes, show_detail,
show_bloat, show_config, show_environment):
@@ -1212,7 +1224,7 @@ class Builder:
show_config: Show config changes
show_environment: Show environment changes
"""
- def _BoardList(line, line_boards):
+ def _board_list(line, line_boards):
"""Helper function to get a line of boards containing a line
Args:
@@ -1231,7 +1243,7 @@ class Builder:
board_set.add(brd)
return brds
- def _CalcErrorDelta(base_lines, base_line_boards, lines, line_boards,
+ def _calc_error_delta(base_lines, base_line_boards, lines, line_boards,
char):
"""Calculate the required output based on changes in errors
@@ -1255,17 +1267,17 @@ class Builder:
worse_lines = []
for line in lines:
if line not in base_lines:
- errline = ErrLine(char + '+', _BoardList(line, line_boards),
+ errline = ErrLine(char + '+', _board_list(line, line_boards),
line)
worse_lines.append(errline)
for line in base_lines:
if line not in lines:
errline = ErrLine(char + '-',
- _BoardList(line, base_line_boards), line)
+ _board_list(line, base_line_boards), line)
better_lines.append(errline)
return better_lines, worse_lines
- def _CalcConfig(delta, name, config):
+ def _calc_config(delta, name, config):
"""Calculate configuration changes
Args:
@@ -1283,7 +1295,7 @@ class Builder:
out += '%s=%s ' % (key, config[key])
return '%s %s: %s' % (delta, name, out)
- def _AddConfig(lines, name, config_plus, config_minus, config_change):
+ def _add_config(lines, name, config_plus, config_minus, config_change):
"""Add changes in configuration to a list
Args:
@@ -1300,13 +1312,13 @@ class Builder:
value: config value
"""
if config_plus:
- lines.append(_CalcConfig('+', name, config_plus))
+ lines.append(_calc_config('+', name, config_plus))
if config_minus:
- lines.append(_CalcConfig('-', name, config_minus))
+ lines.append(_calc_config('-', name, config_minus))
if config_change:
- lines.append(_CalcConfig('c', name, config_change))
+ lines.append(_calc_config('c', name, config_change))
- def _OutputConfigInfo(lines):
+ def _output_config_info(lines):
for line in lines:
if not line:
continue
@@ -1318,7 +1330,7 @@ class Builder:
col = self.col.YELLOW
tprint(' ' + line, newline=True, colour=col)
- def _OutputErrLines(err_lines, colour):
+ def _output_err_lines(err_lines, colour):
"""Output the line of error/warning lines, if not empty
Also increments self._error_lines if err_lines not empty
@@ -1376,9 +1388,9 @@ class Builder:
new_boards.append(target)
# Get a list of errors and warnings that have appeared, and disappeared
- better_err, worse_err = _CalcErrorDelta(self._base_err_lines,
+ better_err, worse_err = _calc_error_delta(self._base_err_lines,
self._base_err_line_boards, err_lines, err_line_boards, '')
- better_warn, worse_warn = _CalcErrorDelta(self._base_warn_lines,
+ better_warn, worse_warn = _calc_error_delta(self._base_warn_lines,
self._base_warn_line_boards, warn_lines, warn_line_boards, 'w')
# For the IDE mode, print out all the output
@@ -1391,26 +1403,26 @@ class Builder:
elif any((ok_boards, warn_boards, err_boards, unknown_boards, new_boards,
worse_err, better_err, worse_warn, better_warn)):
arch_list = {}
- self.AddOutcome(board_selected, arch_list, ok_boards, '',
+ self.add_outcome(board_selected, arch_list, ok_boards, '',
self.col.GREEN)
- self.AddOutcome(board_selected, arch_list, warn_boards, 'w+',
+ self.add_outcome(board_selected, arch_list, warn_boards, 'w+',
self.col.YELLOW)
- self.AddOutcome(board_selected, arch_list, err_boards, '+',
+ self.add_outcome(board_selected, arch_list, err_boards, '+',
self.col.RED)
- self.AddOutcome(board_selected, arch_list, new_boards, '*', self.col.BLUE)
+ self.add_outcome(board_selected, arch_list, new_boards, '*', self.col.BLUE)
if self._show_unknown:
- self.AddOutcome(board_selected, arch_list, unknown_boards, '?',
+ self.add_outcome(board_selected, arch_list, unknown_boards, '?',
self.col.MAGENTA)
for arch, target_list in arch_list.items():
tprint('%10s: %s' % (arch, target_list))
self._error_lines += 1
- _OutputErrLines(better_err, colour=self.col.GREEN)
- _OutputErrLines(worse_err, colour=self.col.RED)
- _OutputErrLines(better_warn, colour=self.col.CYAN)
- _OutputErrLines(worse_warn, colour=self.col.YELLOW)
+ _output_err_lines(better_err, colour=self.col.GREEN)
+ _output_err_lines(worse_err, colour=self.col.RED)
+ _output_err_lines(better_warn, colour=self.col.CYAN)
+ _output_err_lines(worse_warn, colour=self.col.YELLOW)
if show_sizes:
- self.PrintSizeSummary(board_selected, board_dict, show_detail,
+ self.print_size_summary(board_selected, board_dict, show_detail,
show_bloat)
if show_environment and self._base_environment:
@@ -1438,10 +1450,10 @@ class Builder:
desc = '%s -> %s' % (value, new_value)
environment_change[key] = desc
- _AddConfig(lines, target, environment_plus, environment_minus,
+ _add_config(lines, target, environment_plus, environment_minus,
environment_change)
- _OutputConfigInfo(lines)
+ _output_config_info(lines)
if show_config and self._base_config:
summary = {}
@@ -1504,9 +1516,9 @@ class Builder:
arch_config_minus[arch][name].update(config_minus)
arch_config_change[arch][name].update(config_change)
- _AddConfig(lines, name, config_plus, config_minus,
+ _add_config(lines, name, config_plus, config_minus,
config_change)
- _AddConfig(lines, 'all', all_config_plus, all_config_minus,
+ _add_config(lines, 'all', all_config_plus, all_config_minus,
all_config_change)
summary[target] = '\n'.join(lines)
@@ -1526,20 +1538,20 @@ class Builder:
all_plus.update(arch_config_plus[arch][name])
all_minus.update(arch_config_minus[arch][name])
all_change.update(arch_config_change[arch][name])
- _AddConfig(lines, name, arch_config_plus[arch][name],
+ _add_config(lines, name, arch_config_plus[arch][name],
arch_config_minus[arch][name],
arch_config_change[arch][name])
- _AddConfig(lines, 'all', all_plus, all_minus, all_change)
+ _add_config(lines, 'all', all_plus, all_minus, all_change)
#arch_summary[target] = '\n'.join(lines)
if lines:
tprint('%s:' % arch)
- _OutputConfigInfo(lines)
+ _output_config_info(lines)
for lines, targets in lines_by_target.items():
if not lines:
continue
tprint('%s :' % ' '.join(sorted(targets)))
- _OutputConfigInfo(lines.split('\n'))
+ _output_config_info(lines.split('\n'))
# Save our updated information for the next call to this function
@@ -1560,9 +1572,9 @@ class Builder:
tprint("Boards not built (%d): %s" % (len(not_built),
', '.join(not_built)))
- def ProduceResultSummary(self, commit_upto, commits, board_selected):
+ def produce_result_summary(self, commit_upto, commits, board_selected):
(board_dict, err_lines, err_line_boards, warn_lines,
- warn_line_boards, config, environment) = self.GetResultSummary(
+ warn_line_boards, config, environment) = self.get_result_summary(
board_selected, commit_upto,
read_func_sizes=self._show_bloat,
read_config=self._show_config,
@@ -1571,13 +1583,13 @@ class Builder:
msg = '%02d: %s' % (commit_upto + 1,
commits[commit_upto].subject)
tprint(msg, colour=self.col.BLUE)
- self.PrintResultSummary(board_selected, board_dict,
+ self.print_result_summary(board_selected, board_dict,
err_lines if self._show_errors else [], err_line_boards,
warn_lines if self._show_errors else [], warn_line_boards,
config, environment, self._show_sizes, self._show_detail,
self._show_bloat, self._show_config, self._show_environment)
- def ShowSummary(self, commits, board_selected):
+ def show_summary(self, commits, board_selected):
"""Show a build summary for U-Boot for a given board list.
Reset the result summary, then repeatedly call GetResultSummary on
@@ -1589,16 +1601,16 @@ class Builder:
"""
self.commit_count = len(commits) if commits else 1
self.commits = commits
- self.ResetResultSummary(board_selected)
+ self.reset_result_summary(board_selected)
self._error_lines = 0
for commit_upto in range(0, self.commit_count, self._step):
- self.ProduceResultSummary(commit_upto, commits, board_selected)
+ self.produce_result_summary(commit_upto, commits, board_selected)
if not self._error_lines:
tprint('(no errors to report)', colour=self.col.GREEN)
- def SetupBuild(self, board_selected, commits):
+ def setup_build(self, board_selected, commits):
"""Set up ready to start a build.
Args:
@@ -1611,7 +1623,7 @@ class Builder:
self.upto = self.warned = self.fail = 0
self._timestamps = collections.deque()
- def GetThreadDir(self, thread_num):
+ def get_thread_dir(self, thread_num):
"""Get the directory path to the working dir for a thread.
Args:
@@ -1622,7 +1634,7 @@ class Builder:
return self._working_dir
return os.path.join(self._working_dir, '%02d' % max(thread_num, 0))
- def _PrepareThread(self, thread_num, setup_git):
+ def _prepare_thread(self, thread_num, setup_git):
"""Prepare the working directory for a thread.
This clones or fetches the repo into the thread's work directory.
@@ -1635,8 +1647,8 @@ class Builder:
'clone' to set up a git clone
'worktree' to set up a git worktree
"""
- thread_dir = self.GetThreadDir(thread_num)
- builderthread.Mkdir(thread_dir)
+ thread_dir = self.get_thread_dir(thread_num)
+ builderthread.mkdir(thread_dir)
git_dir = os.path.join(thread_dir, '.git')
# Create a worktree or a git repo clone for this thread if it
@@ -1672,7 +1684,7 @@ class Builder:
else:
raise ValueError("Can't setup git repo with %s." % setup_git)
- def _PrepareWorkingSpace(self, max_threads, setup_git):
+ def _prepare_working_space(self, max_threads, setup_git):
"""Prepare the working directory for use.
Set up the git repo for each thread. Creates a linked working tree
@@ -1684,7 +1696,7 @@ class Builder:
work
setup_git: True to set up a git worktree or a git clone
"""
- builderthread.Mkdir(self._working_dir)
+ builderthread.mkdir(self._working_dir)
if setup_git and self.git_dir:
src_dir = os.path.abspath(self.git_dir)
if gitutil.check_worktree_is_available(src_dir):
@@ -1698,14 +1710,14 @@ class Builder:
# Always do at least one thread
for thread in range(max(max_threads, 1)):
- self._PrepareThread(thread, setup_git)
+ self._prepare_thread(thread, setup_git)
- def _GetOutputSpaceRemovals(self):
+ def _get_output_space_removals(self):
"""Get the output directories ready to receive files.
Figure out what needs to be deleted in the output directory before it
can be used. We only delete old buildman directories which have the
- expected name pattern. See _GetOutputDir().
+ expected name pattern. See get_output_dir().
Returns:
List of full paths of directories to remove
@@ -1714,7 +1726,7 @@ class Builder:
return
dir_list = []
for commit_upto in range(self.commit_count):
- dir_list.append(self._GetOutputDir(commit_upto))
+ dir_list.append(self.get_output_dir(commit_upto))
to_remove = []
for dirname in glob.glob(os.path.join(self.base_dir, '*')):
@@ -1725,14 +1737,14 @@ class Builder:
to_remove.append(dirname)
return to_remove
- def _PrepareOutputSpace(self):
+ def _prepare_output_space(self):
"""Get the output directories ready to receive files.
We delete any output directories which look like ones we need to
create. Having left over directories is confusing when the user wants
to check the output manually.
"""
- to_remove = self._GetOutputSpaceRemovals()
+ to_remove = self._get_output_space_removals()
if to_remove:
tprint('Removing %d old build directories...' % len(to_remove),
newline=False)
@@ -1740,7 +1752,7 @@ class Builder:
shutil.rmtree(dirname)
terminal.print_clear()
- def BuildBoards(self, commits, board_selected, keep_outputs, verbose):
+ def build_boards(self, commits, board_selected, keep_outputs, verbose):
"""Build all commits for a list of boards
Args:
@@ -1759,15 +1771,15 @@ class Builder:
self.commits = commits
self._verbose = verbose
- self.ResetResultSummary(board_selected)
- builderthread.Mkdir(self.base_dir, parents = True)
- self._PrepareWorkingSpace(min(self.num_threads, len(board_selected)),
+ self.reset_result_summary(board_selected)
+ builderthread.mkdir(self.base_dir, parents = True)
+ self._prepare_working_space(min(self.num_threads, len(board_selected)),
commits is not None)
- self._PrepareOutputSpace()
+ self._prepare_output_space()
if not self._ide:
tprint('\rStarting build...', newline=False)
- self.SetupBuild(board_selected, commits)
- self.ProcessResult(None)
+ self.setup_build(board_selected, commits)
+ self.process_result(None)
self.thread_exceptions = []
# Create jobs to build all commits for each board
for brd in board_selected.values():
@@ -1781,7 +1793,7 @@ class Builder:
if self.num_threads:
self.queue.put(job)
else:
- self._single_builder.RunJob(job)
+ self._single_builder.run_job(job)
if self.num_threads:
term = threading.Thread(target=self.queue.join)
diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py
index 635865c..25f460c 100644
--- a/tools/buildman/builderthread.py
+++ b/tools/buildman/builderthread.py
@@ -2,8 +2,15 @@
# Copyright (c) 2014 Google, Inc
#
+"""Implementation the bulider threads
+
+This module provides the BuilderThread class, which handles calling the builder
+based on the jobs provided.
+"""
+
import errno
import glob
+import io
import os
import shutil
import sys
@@ -16,11 +23,15 @@ from u_boot_pylib import command
RETURN_CODE_RETRY = -1
BASE_ELF_FILENAMES = ['u-boot', 'spl/u-boot-spl', 'tpl/u-boot-tpl']
-def Mkdir(dirname, parents = False):
+def mkdir(dirname, parents=False):
"""Make a directory if it doesn't already exist.
Args:
- dirname: Directory to create
+ dirname (str): Directory to create
+ parents (bool): True to also make parent directories
+
+ Raises:
+ OSError: File already exists
"""
try:
if parents:
@@ -30,12 +41,51 @@ def Mkdir(dirname, parents = False):
except OSError as err:
if err.errno == errno.EEXIST:
if os.path.realpath('.') == os.path.realpath(dirname):
- print("Cannot create the current working directory '%s'!" % dirname)
+ print(f"Cannot create the current working directory '{dirname}'!")
sys.exit(1)
- pass
else:
raise
+
+def _remove_old_outputs(out_dir):
+ """Remove any old output-target files
+
+ Args:
+ out_dir (str): Output directory for the build
+
+ Since we use a build directory that was previously used by another
+ board, it may have produced an SPL image. If we don't remove it (i.e.
+ see do_config and self.mrproper below) then it will appear to be the
+ output of this build, even if it does not produce SPL images.
+ """
+ for elf in BASE_ELF_FILENAMES:
+ fname = os.path.join(out_dir, elf)
+ if os.path.exists(fname):
+ os.remove(fname)
+
+
+def copy_files(out_dir, build_dir, dirname, patterns):
+ """Copy files from the build directory to the output.
+
+ Args:
+ out_dir (str): Path to output directory containing the files
+ build_dir (str): Place to copy the files
+ dirname (str): Source directory, '' for normal U-Boot, 'spl' for SPL
+ patterns (list of str): A list of filenames to copy, each relative
+ to the build directory
+ """
+ for pattern in patterns:
+ file_list = glob.glob(os.path.join(out_dir, dirname, pattern))
+ for fname in file_list:
+ target = os.path.basename(fname)
+ if dirname:
+ base, ext = os.path.splitext(target)
+ if ext:
+ target = f'{base}-{dirname}{ext}'
+ shutil.copy(fname, os.path.join(build_dir, target))
+
+
+# pylint: disable=R0903
class BuilderJob:
"""Holds information about a job to be performed by a thread
@@ -77,7 +127,7 @@ class ResultThread(threading.Thread):
"""
while True:
result = self.builder.out_queue.get()
- self.builder.ProcessResult(result)
+ self.builder.process_result(result)
self.builder.out_queue.task_done()
@@ -107,22 +157,25 @@ class BuilderThread(threading.Thread):
self.mrproper = mrproper
self.per_board_out_dir = per_board_out_dir
self.test_exception = test_exception
+ self.toolchain = None
- def Make(self, commit, brd, stage, cwd, *args, **kwargs):
+ def make(self, commit, brd, stage, cwd, *args, **kwargs):
"""Run 'make' on a particular commit and board.
The source code will already be checked out, so the 'commit'
argument is only for information.
Args:
- commit: Commit object that is being built
- brd: Board object that is being built
- stage: Stage of the build. Valid stages are:
+ commit (Commit): Commit that is being built
+ brd (Board): Board that is being built
+ stage (str): Stage of the build. Valid stages are:
mrproper - can be called to clean source
config - called to configure for a board
build - the main make invocation - it does the build
- args: A list of arguments to pass to 'make'
- kwargs: A list of keyword arguments to pass to command.run_pipe()
+ cwd (str): Working directory to set, or None to leave it alone
+ *args (list of str): Arguments to pass to 'make'
+ **kwargs (dict): A list of keyword arguments to pass to
+ command.run_pipe()
Returns:
CommandResult object
@@ -130,61 +183,140 @@ class BuilderThread(threading.Thread):
return self.builder.do_make(commit, brd, stage, cwd, *args,
**kwargs)
- def RunCommit(self, commit_upto, brd, work_dir, do_config, config_only,
- force_build, force_build_failures, work_in_output,
- adjust_cfg):
- """Build a particular commit.
-
- If the build is already done, and we are not forcing a build, we skip
- the build and just return the previously-saved results.
+ def _build_args(self, brd, out_dir, out_rel_dir, work_dir, commit_upto):
+ """Set up arguments to the args list based on the settings
Args:
- commit_upto: Commit number to build (0...n-1)
- brd: Board object to build
- work_dir: Directory to which the source will be checked out
- do_config: True to run a make <board>_defconfig on the source
- config_only: Only configure the source, do not build it
- force_build: Force a build even if one was previously done
- force_build_failures: Force a bulid if the previous result showed
- failure
- work_in_output: Use the output directory as the work directory and
- don't write to a separate output directory.
- adjust_cfg (list of str): List of changes to make to .config file
- before building. Each is one of (where C is either CONFIG_xxx
- or just xxx):
- C to enable C
- ~C to disable C
- C=val to set the value of C (val must have quotes if C is
- a string Kconfig
+ brd (Board): Board to create arguments for
+ out_dir (str): Path to output directory containing the files
+ out_rel_dir (str): Output directory relative to the current dir
+ work_dir (str): Directory to which the source will be checked out
+ commit_upto (int): Commit number to build (0...n-1)
Returns:
- tuple containing:
- - CommandResult object containing the results of the build
- - boolean indicating whether 'make config' is still needed
+ tuple:
+ list of str: Arguments to pass to make
+ str: Current working directory, or None if no commit
+ str: Source directory (typically the work directory)
"""
- # Create a default result - it will be overwritte by the call to
- # self.Make() below, in the event that we do a build.
- result = command.CommandResult()
- result.return_code = 0
- if work_in_output or self.builder.in_tree:
- out_dir = work_dir
- else:
- if self.per_board_out_dir:
- out_rel_dir = os.path.join('..', brd.target)
+ args = []
+ cwd = work_dir
+ src_dir = os.path.realpath(work_dir)
+ if not self.builder.in_tree:
+ if commit_upto is None:
+ # In this case we are building in the original source directory
+ # (i.e. the current directory where buildman is invoked. The
+ # output directory is set to this thread's selected work
+ # directory.
+ #
+ # Symlinks can confuse U-Boot's Makefile since we may use '..'
+ # in our path, so remove them.
+ real_dir = os.path.realpath(out_dir)
+ args.append(f'O={real_dir}')
+ cwd = None
+ src_dir = os.getcwd()
else:
- out_rel_dir = 'build'
- out_dir = os.path.join(work_dir, out_rel_dir)
+ args.append(f'O={out_rel_dir}')
+ if self.builder.verbose_build:
+ args.append('V=1')
+ else:
+ args.append('-s')
+ if self.builder.num_jobs is not None:
+ args.extend(['-j', str(self.builder.num_jobs)])
+ if self.builder.warnings_as_errors:
+ args.append('KCFLAGS=-Werror')
+ args.append('HOSTCFLAGS=-Werror')
+ if self.builder.allow_missing:
+ args.append('BINMAN_ALLOW_MISSING=1')
+ if self.builder.no_lto:
+ args.append('NO_LTO=1')
+ if self.builder.reproducible_builds:
+ args.append('SOURCE_DATE_EPOCH=0')
+ args.extend(self.builder.toolchains.GetMakeArguments(brd))
+ args.extend(self.toolchain.MakeArgs())
+ return args, cwd, src_dir
+
+ def _reconfigure(self, commit, brd, cwd, args, env, config_args, config_out,
+ cmd_list):
+ """Reconfigure the build
- # Check if the job was already completed last time
- done_file = self.builder.GetDoneFile(commit_upto, brd.target)
+ Args:
+ commit (Commit): Commit only being built
+ brd (Board): Board being built
+ cwd (str): Current working directory
+ args (list of str): Arguments to pass to make
+ env (dict): Environment strings
+ config_args (list of str): defconfig arg for this board
+ cmd_list (list of str): List to add the commands to, for logging
+
+ Returns:
+ CommandResult object
+ """
+ if self.mrproper:
+ result = self.make(commit, brd, 'mrproper', cwd, 'mrproper', *args,
+ env=env)
+ config_out.write(result.combined)
+ cmd_list.append([self.builder.gnu_make, 'mrproper', *args])
+ result = self.make(commit, brd, 'config', cwd, *(args + config_args),
+ env=env)
+ cmd_list.append([self.builder.gnu_make] + args + config_args)
+ config_out.write(result.combined)
+ return result
+
+ def _build(self, commit, brd, cwd, args, env, cmd_list, config_only):
+ """Perform the build
+
+ Args:
+ commit (Commit): Commit only being built
+ brd (Board): Board being built
+ cwd (str): Current working directory
+ args (list of str): Arguments to pass to make
+ env (dict): Environment strings
+ cmd_list (list of str): List to add the commands to, for logging
+ config_only (bool): True if this is a config-only build (using the
+ 'make cfg' target)
+
+ Returns:
+ CommandResult object
+ """
+ if config_only:
+ args.append('cfg')
+ result = self.make(commit, brd, 'build', cwd, *args, env=env)
+ cmd_list.append([self.builder.gnu_make] + args)
+ if (result.return_code == 2 and
+ ('Some images are invalid' in result.stderr)):
+ # This is handled later by the check for output in stderr
+ result.return_code = 0
+ return result
+
+ def _read_done_file(self, commit_upto, brd, force_build,
+ force_build_failures):
+ """Check the 'done' file and see if this commit should be built
+
+ Args:
+ commit (Commit): Commit only being built
+ brd (Board): Board being built
+ force_build (bool): Force a build even if one was previously done
+ force_build_failures (bool): Force a bulid if the previous result
+ showed failure
+
+ Returns:
+ tuple:
+ bool: True if build should be built
+ CommandResult: if there was a previous run:
+ - already_done set to True
+ - return_code set to return code
+ - result.stderr set to 'bad' if stderr output was recorded
+ """
+ result = command.CommandResult()
+ done_file = self.builder.get_done_file(commit_upto, brd.target)
result.already_done = os.path.exists(done_file)
will_build = (force_build or force_build_failures or
not result.already_done)
if result.already_done:
- # Get the return code from that build and use it
- with open(done_file, 'r') as fd:
+ with open(done_file, 'r', encoding='utf-8') as outf:
try:
- result.return_code = int(fd.readline())
+ result.return_code = int(outf.readline())
except ValueError:
# The file may be empty due to running out of disk space.
# Try a rebuild
@@ -194,12 +326,155 @@ class BuilderThread(threading.Thread):
if result.return_code == RETURN_CODE_RETRY:
will_build = True
elif will_build:
- err_file = self.builder.GetErrFile(commit_upto, brd.target)
+ err_file = self.builder.get_err_file(commit_upto, brd.target)
if os.path.exists(err_file) and os.stat(err_file).st_size:
result.stderr = 'bad'
elif not force_build:
# The build passed, so no need to build it again
will_build = False
+ return will_build, result
+
+ def _decide_dirs(self, brd, work_dir, work_in_output):
+ """Decide the output directory to use
+
+ Args:
+ work_dir (str): Directory to which the source will be checked out
+ work_in_output (bool): Use the output directory as the work
+ directory and don't write to a separate output directory.
+
+ Returns:
+ tuple:
+ out_dir (str): Output directory for the build
+ out_rel_dir (str): Output directory relatie to the current dir
+ """
+ if work_in_output or self.builder.in_tree:
+ out_rel_dir = None
+ out_dir = work_dir
+ else:
+ if self.per_board_out_dir:
+ out_rel_dir = os.path.join('..', brd.target)
+ else:
+ out_rel_dir = 'build'
+ out_dir = os.path.join(work_dir, out_rel_dir)
+ return out_dir, out_rel_dir
+
+ def _checkout(self, commit_upto, work_dir):
+ """Checkout the right commit
+
+ Args:
+ commit_upto (int): Commit number to build (0...n-1)
+ work_dir (str): Directory to which the source will be checked out
+
+ Returns:
+ Commit: Commit being built, or 'current' for current source
+ """
+ if self.builder.commits:
+ commit = self.builder.commits[commit_upto]
+ if self.builder.checkout:
+ git_dir = os.path.join(work_dir, '.git')
+ gitutil.checkout(commit.hash, git_dir, work_dir, force=True)
+ else:
+ commit = 'current'
+ return commit
+
+ def _config_and_build(self, commit_upto, brd, work_dir, do_config,
+ config_only, adjust_cfg, commit, out_dir, out_rel_dir,
+ result):
+ """Do the build, configuring first if necessary
+
+ Args:
+ commit_upto (int): Commit number to build (0...n-1)
+ brd (Board): Board to create arguments for
+ work_dir (str): Directory to which the source will be checked out
+ do_config (bool): True to run a make <board>_defconfig on the source
+ config_only (bool): Only configure the source, do not build it
+ adjust_cfg (list of str): See the cfgutil module and run_commit()
+ commit (Commit): Commit only being built
+ out_dir (str): Output directory for the build
+ out_rel_dir (str): Output directory relatie to the current dir
+ result (CommandResult): Previous result
+
+ Returns:
+ tuple:
+ result (CommandResult): Result of the build
+ do_config (bool): indicates whether 'make config' is needed on
+ the next incremental build
+ """
+ # Set up the environment and command line
+ env = self.toolchain.MakeEnvironment(self.builder.full_path)
+ mkdir(out_dir)
+
+ args, cwd, src_dir = self._build_args(brd, out_dir, out_rel_dir,
+ work_dir, commit_upto)
+ config_args = [f'{brd.target}_defconfig']
+ config_out = io.StringIO()
+
+ _remove_old_outputs(out_dir)
+
+ # If we need to reconfigure, do that now
+ cfg_file = os.path.join(out_dir, '.config')
+ cmd_list = []
+ if do_config or adjust_cfg:
+ result = self._reconfigure(
+ commit, brd, cwd, args, env, config_args, config_out, cmd_list)
+ do_config = False # No need to configure next time
+ if adjust_cfg:
+ cfgutil.adjust_cfg_file(cfg_file, adjust_cfg)
+
+ # Now do the build, if everything looks OK
+ if result.return_code == 0:
+ result = self._build(commit, brd, cwd, args, env, cmd_list,
+ config_only)
+ if adjust_cfg:
+ errs = cfgutil.check_cfg_file(cfg_file, adjust_cfg)
+ if errs:
+ result.stderr += errs
+ result.return_code = 1
+ result.stderr = result.stderr.replace(src_dir + '/', '')
+ if self.builder.verbose_build:
+ result.stdout = config_out.getvalue() + result.stdout
+ result.cmd_list = cmd_list
+ return result, do_config
+
+ def run_commit(self, commit_upto, brd, work_dir, do_config, config_only,
+ force_build, force_build_failures, work_in_output,
+ adjust_cfg):
+ """Build a particular commit.
+
+ If the build is already done, and we are not forcing a build, we skip
+ the build and just return the previously-saved results.
+
+ Args:
+ commit_upto (int): Commit number to build (0...n-1)
+ brd (Board): Board to build
+ work_dir (str): Directory to which the source will be checked out
+ do_config (bool): True to run a make <board>_defconfig on the source
+ config_only (bool): Only configure the source, do not build it
+ force_build (bool): Force a build even if one was previously done
+ force_build_failures (bool): Force a bulid if the previous result
+ showed failure
+ work_in_output (bool) : Use the output directory as the work
+ directory and don't write to a separate output directory.
+ adjust_cfg (list of str): List of changes to make to .config file
+ before building. Each is one of (where C is either CONFIG_xxx
+ or just xxx):
+ C to enable C
+ ~C to disable C
+ C=val to set the value of C (val must have quotes if C is
+ a string Kconfig
+
+ Returns:
+ tuple containing:
+ - CommandResult object containing the results of the build
+ - boolean indicating whether 'make config' is still needed
+ """
+ # Create a default result - it will be overwritte by the call to
+ # self.make() below, in the event that we do a build.
+ out_dir, out_rel_dir = self._decide_dirs(brd, work_dir, work_in_output)
+
+ # Check if the job was already completed last time
+ will_build, result = self._read_done_file(commit_upto, brd, force_build,
+ force_build_failures)
if will_build:
# We are going to have to build it. First, get a toolchain
@@ -209,115 +484,13 @@ class BuilderThread(threading.Thread):
except ValueError as err:
result.return_code = 10
result.stdout = ''
- result.stderr = str(err)
- # TODO(sjg@chromium.org): This gets swallowed, but needs
- # to be reported.
+ result.stderr = f'Tool chain error for {brd.arch}: {str(err)}'
if self.toolchain:
- # Checkout the right commit
- if self.builder.commits:
- commit = self.builder.commits[commit_upto]
- if self.builder.checkout:
- git_dir = os.path.join(work_dir, '.git')
- gitutil.checkout(commit.hash, git_dir, work_dir,
- force=True)
- else:
- commit = 'current'
-
- # Set up the environment and command line
- env = self.toolchain.MakeEnvironment(self.builder.full_path)
- Mkdir(out_dir)
- args = []
- cwd = work_dir
- src_dir = os.path.realpath(work_dir)
- if not self.builder.in_tree:
- if commit_upto is None:
- # In this case we are building in the original source
- # directory (i.e. the current directory where buildman
- # is invoked. The output directory is set to this
- # thread's selected work directory.
- #
- # Symlinks can confuse U-Boot's Makefile since
- # we may use '..' in our path, so remove them.
- out_dir = os.path.realpath(out_dir)
- args.append('O=%s' % out_dir)
- cwd = None
- src_dir = os.getcwd()
- else:
- args.append('O=%s' % out_rel_dir)
- if self.builder.verbose_build:
- args.append('V=1')
- else:
- args.append('-s')
- if self.builder.num_jobs is not None:
- args.extend(['-j', str(self.builder.num_jobs)])
- if self.builder.warnings_as_errors:
- args.append('KCFLAGS=-Werror')
- args.append('HOSTCFLAGS=-Werror')
- if self.builder.allow_missing:
- args.append('BINMAN_ALLOW_MISSING=1')
- if self.builder.no_lto:
- args.append('NO_LTO=1')
- if self.builder.reproducible_builds:
- args.append('SOURCE_DATE_EPOCH=0')
- config_args = ['%s_defconfig' % brd.target]
- config_out = ''
- args.extend(self.builder.toolchains.GetMakeArguments(brd))
- args.extend(self.toolchain.MakeArgs())
-
- # Remove any output targets. Since we use a build directory that
- # was previously used by another board, it may have produced an
- # SPL image. If we don't remove it (i.e. see do_config and
- # self.mrproper below) then it will appear to be the output of
- # this build, even if it does not produce SPL images.
- build_dir = self.builder.GetBuildDir(commit_upto, brd.target)
- for elf in BASE_ELF_FILENAMES:
- fname = os.path.join(out_dir, elf)
- if os.path.exists(fname):
- os.remove(fname)
-
- # If we need to reconfigure, do that now
- cfg_file = os.path.join(out_dir, '.config')
- cmd_list = []
- if do_config or adjust_cfg:
- config_out = ''
- if self.mrproper:
- result = self.Make(commit, brd, 'mrproper', cwd,
- 'mrproper', *args, env=env)
- config_out += result.combined
- cmd_list.append([self.builder.gnu_make, 'mrproper',
- *args])
- result = self.Make(commit, brd, 'config', cwd,
- *(args + config_args), env=env)
- cmd_list.append([self.builder.gnu_make] + args +
- config_args)
- config_out += result.combined
- do_config = False # No need to configure next time
- if adjust_cfg:
- cfgutil.adjust_cfg_file(cfg_file, adjust_cfg)
- if result.return_code == 0:
- if config_only:
- args.append('cfg')
- result = self.Make(commit, brd, 'build', cwd, *args,
- env=env)
- cmd_list.append([self.builder.gnu_make] + args)
- if (result.return_code == 2 and
- ('Some images are invalid' in result.stderr)):
- # This is handled later by the check for output in
- # stderr
- result.return_code = 0
- if adjust_cfg:
- errs = cfgutil.check_cfg_file(cfg_file, adjust_cfg)
- if errs:
- result.stderr += errs
- result.return_code = 1
- result.stderr = result.stderr.replace(src_dir + '/', '')
- if self.builder.verbose_build:
- result.stdout = config_out + result.stdout
- result.cmd_list = cmd_list
- else:
- result.return_code = 1
- result.stderr = 'No tool chain for %s\n' % brd.arch
+ commit = self._checkout(commit_upto, work_dir)
+ result, do_config = self._config_and_build(
+ commit_upto, brd, work_dir, do_config, config_only,
+ adjust_cfg, commit, out_dir, out_rel_dir, result)
result.already_done = False
result.toolchain = self.toolchain
@@ -326,15 +499,15 @@ class BuilderThread(threading.Thread):
result.out_dir = out_dir
return result, do_config
- def _WriteResult(self, result, keep_outputs, work_in_output):
+ def _write_result(self, result, keep_outputs, work_in_output):
"""Write a built result to the output directory.
Args:
- result: CommandResult object containing result to write
- keep_outputs: True to store the output binaries, False
+ result (CommandResult): result to write
+ keep_outputs (bool): True to store the output binaries, False
to delete them
- work_in_output: Use the output directory as the work directory and
- don't write to a separate output directory.
+ work_in_output (bool): Use the output directory as the work
+ directory and don't write to a separate output directory.
"""
# If we think this might have been aborted with Ctrl-C, record the
# failure but not that we are 'done' with this board. A retry may fix
@@ -345,22 +518,22 @@ class BuilderThread(threading.Thread):
return
# Write the output and stderr
- output_dir = self.builder._GetOutputDir(result.commit_upto)
- Mkdir(output_dir)
- build_dir = self.builder.GetBuildDir(result.commit_upto,
+ output_dir = self.builder.get_output_dir(result.commit_upto)
+ mkdir(output_dir)
+ build_dir = self.builder.get_build_dir(result.commit_upto,
result.brd.target)
- Mkdir(build_dir)
+ mkdir(build_dir)
outfile = os.path.join(build_dir, 'log')
- with open(outfile, 'w') as fd:
+ with open(outfile, 'w', encoding='utf-8') as outf:
if result.stdout:
- fd.write(result.stdout)
+ outf.write(result.stdout)
- errfile = self.builder.GetErrFile(result.commit_upto,
+ errfile = self.builder.get_err_file(result.commit_upto,
result.brd.target)
if result.stderr:
- with open(errfile, 'w') as fd:
- fd.write(result.stderr)
+ with open(errfile, 'w', encoding='utf-8') as outf:
+ outf.write(result.stderr)
elif os.path.exists(errfile):
os.remove(errfile)
@@ -370,60 +543,61 @@ class BuilderThread(threading.Thread):
if result.toolchain:
# Write the build result and toolchain information.
- done_file = self.builder.GetDoneFile(result.commit_upto,
+ done_file = self.builder.get_done_file(result.commit_upto,
result.brd.target)
- with open(done_file, 'w') as fd:
+ with open(done_file, 'w', encoding='utf-8') as outf:
if maybe_aborted:
# Special code to indicate we need to retry
- fd.write('%s' % RETURN_CODE_RETRY)
+ outf.write(f'{RETURN_CODE_RETRY}')
else:
- fd.write('%s' % result.return_code)
- with open(os.path.join(build_dir, 'toolchain'), 'w') as fd:
- print('gcc', result.toolchain.gcc, file=fd)
- print('path', result.toolchain.path, file=fd)
- print('cross', result.toolchain.cross, file=fd)
- print('arch', result.toolchain.arch, file=fd)
- fd.write('%s' % result.return_code)
+ outf.write(f'{result.return_code}')
+ with open(os.path.join(build_dir, 'toolchain'), 'w',
+ encoding='utf-8') as outf:
+ print('gcc', result.toolchain.gcc, file=outf)
+ print('path', result.toolchain.path, file=outf)
+ print('cross', result.toolchain.cross, file=outf)
+ print('arch', result.toolchain.arch, file=outf)
+ outf.write(f'{result.return_code}')
# Write out the image and function size information and an objdump
env = result.toolchain.MakeEnvironment(self.builder.full_path)
- with open(os.path.join(build_dir, 'out-env'), 'wb') as fd:
+ with open(os.path.join(build_dir, 'out-env'), 'wb') as outf:
for var in sorted(env.keys()):
- fd.write(b'%s="%s"' % (var, env[var]))
+ outf.write(b'%s="%s"' % (var, env[var]))
with open(os.path.join(build_dir, 'out-cmd'), 'w',
- encoding='utf-8') as fd:
+ encoding='utf-8') as outf:
for cmd in result.cmd_list:
- print(' '.join(cmd), file=fd)
+ print(' '.join(cmd), file=outf)
lines = []
for fname in BASE_ELF_FILENAMES:
- cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname]
+ cmd = [f'{self.toolchain.cross}nm', '--size-sort', fname]
nm_result = command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
if nm_result.stdout:
- nm = self.builder.GetFuncSizesFile(result.commit_upto,
- result.brd.target, fname)
- with open(nm, 'w') as fd:
- print(nm_result.stdout, end=' ', file=fd)
+ nm_fname = self.builder.get_func_sizes_file(
+ result.commit_upto, result.brd.target, fname)
+ with open(nm_fname, 'w', encoding='utf-8') as outf:
+ print(nm_result.stdout, end=' ', file=outf)
- cmd = ['%sobjdump' % self.toolchain.cross, '-h', fname]
+ cmd = [f'{self.toolchain.cross}objdump', '-h', fname]
dump_result = command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
rodata_size = ''
if dump_result.stdout:
- objdump = self.builder.GetObjdumpFile(result.commit_upto,
+ objdump = self.builder.get_objdump_file(result.commit_upto,
result.brd.target, fname)
- with open(objdump, 'w') as fd:
- print(dump_result.stdout, end=' ', file=fd)
+ with open(objdump, 'w', encoding='utf-8') as outf:
+ print(dump_result.stdout, end=' ', file=outf)
for line in dump_result.stdout.splitlines():
fields = line.split()
if len(fields) > 5 and fields[1] == '.rodata':
rodata_size = fields[2]
- cmd = ['%ssize' % self.toolchain.cross, fname]
+ cmd = [f'{self.toolchain.cross}size', fname]
size_result = command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
@@ -432,30 +606,29 @@ class BuilderThread(threading.Thread):
rodata_size)
# Extract the environment from U-Boot and dump it out
- cmd = ['%sobjcopy' % self.toolchain.cross, '-O', 'binary',
+ cmd = [f'{self.toolchain.cross}objcopy', '-O', 'binary',
'-j', '.rodata.default_environment',
'env/built-in.o', 'uboot.env']
command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
- ubootenv = os.path.join(result.out_dir, 'uboot.env')
if not work_in_output:
- self.CopyFiles(result.out_dir, build_dir, '', ['uboot.env'])
+ copy_files(result.out_dir, build_dir, '', ['uboot.env'])
# Write out the image sizes file. This is similar to the output
# of binutil's 'size' utility, but it omits the header line and
# adds an additional hex value at the end of each line for the
# rodata size
- if len(lines):
- sizes = self.builder.GetSizesFile(result.commit_upto,
+ if lines:
+ sizes = self.builder.get_sizes_file(result.commit_upto,
result.brd.target)
- with open(sizes, 'w') as fd:
- print('\n'.join(lines), file=fd)
+ with open(sizes, 'w', encoding='utf-8') as outf:
+ print('\n'.join(lines), file=outf)
if not work_in_output:
# Write out the configuration files, with a special case for SPL
for dirname in ['', 'spl', 'tpl']:
- self.CopyFiles(
+ copy_files(
result.out_dir, build_dir, dirname,
['u-boot.cfg', 'spl/u-boot-spl.cfg', 'tpl/u-boot-tpl.cfg',
'.config', 'include/autoconf.mk',
@@ -463,60 +636,40 @@ class BuilderThread(threading.Thread):
# Now write the actual build output
if keep_outputs:
- self.CopyFiles(
+ copy_files(
result.out_dir, build_dir, '',
['u-boot*', '*.bin', '*.map', '*.img', 'MLO', 'SPL',
'include/autoconf.mk', 'spl/u-boot-spl*'])
- def CopyFiles(self, out_dir, build_dir, dirname, patterns):
- """Copy files from the build directory to the output.
-
- Args:
- out_dir: Path to output directory containing the files
- build_dir: Place to copy the files
- dirname: Source directory, '' for normal U-Boot, 'spl' for SPL
- patterns: A list of filenames (strings) to copy, each relative
- to the build directory
- """
- for pattern in patterns:
- file_list = glob.glob(os.path.join(out_dir, dirname, pattern))
- for fname in file_list:
- target = os.path.basename(fname)
- if dirname:
- base, ext = os.path.splitext(target)
- if ext:
- target = '%s-%s%s' % (base, dirname, ext)
- shutil.copy(fname, os.path.join(build_dir, target))
-
- def _SendResult(self, result):
+ def _send_result(self, result):
"""Send a result to the builder for processing
Args:
- result: CommandResult object containing the results of the build
+ result (CommandResult): results of the build
Raises:
- ValueError if self.test_exception is true (for testing)
+ ValueError: self.test_exception is true (for testing)
"""
if self.test_exception:
raise ValueError('test exception')
if self.thread_num != -1:
self.builder.out_queue.put(result)
else:
- self.builder.ProcessResult(result)
+ self.builder.process_result(result)
- def RunJob(self, job):
+ def run_job(self, job):
"""Run a single job
A job consists of a building a list of commits for a particular board.
Args:
- job: Job to build
+ job (Job): Job to build
- Returns:
- List of Result objects
+ Raises:
+ ValueError: Thread was interrupted
"""
brd = job.brd
- work_dir = self.builder.GetThreadDir(self.thread_num)
+ work_dir = self.builder.get_thread_dir(self.thread_num)
self.toolchain = None
if job.commits:
# Run 'make board_defconfig' on the first commit
@@ -524,7 +677,7 @@ class BuilderThread(threading.Thread):
commit_upto = 0
force_build = False
for commit_upto in range(0, len(job.commits), job.step):
- result, request_config = self.RunCommit(commit_upto, brd,
+ result, request_config = self.run_commit(commit_upto, brd,
work_dir, do_config, self.builder.config_only,
force_build or self.builder.force_build,
self.builder.force_build_failures,
@@ -535,7 +688,7 @@ class BuilderThread(threading.Thread):
# If our incremental build failed, try building again
# with a reconfig.
if self.builder.force_config_on_failure:
- result, request_config = self.RunCommit(commit_upto,
+ result, request_config = self.run_commit(commit_upto,
brd, work_dir, True, False, True, False,
job.work_in_output, job.adjust_cfg)
did_config = True
@@ -576,17 +729,17 @@ class BuilderThread(threading.Thread):
raise ValueError('Interrupt')
# We have the build results, so output the result
- self._WriteResult(result, job.keep_outputs, job.work_in_output)
- self._SendResult(result)
+ self._write_result(result, job.keep_outputs, job.work_in_output)
+ self._send_result(result)
else:
# Just build the currently checked-out build
- result, request_config = self.RunCommit(None, brd, work_dir, True,
+ result, request_config = self.run_commit(None, brd, work_dir, True,
self.builder.config_only, True,
self.builder.force_build_failures, job.work_in_output,
job.adjust_cfg)
result.commit_upto = 0
- self._WriteResult(result, job.keep_outputs, job.work_in_output)
- self._SendResult(result)
+ self._write_result(result, job.keep_outputs, job.work_in_output)
+ self._send_result(result)
def run(self):
"""Our thread's run function
@@ -597,8 +750,9 @@ class BuilderThread(threading.Thread):
while True:
job = self.builder.queue.get()
try:
- self.RunJob(job)
- except Exception as e:
- print('Thread exception (use -T0 to run without threads):', e)
- self.builder.thread_exceptions.append(e)
+ self.run_job(job)
+ except Exception as exc:
+ print('Thread exception (use -T0 to run without threads):',
+ exc)
+ self.builder.thread_exceptions.append(exc)
self.builder.queue.task_done()
diff --git a/tools/buildman/buildman.rst b/tools/buildman/buildman.rst
index 6808727..aae2477 100644
--- a/tools/buildman/buildman.rst
+++ b/tools/buildman/buildman.rst
@@ -159,7 +159,7 @@ on the command line:
.. code-block:: bash
- buildman --boards sandbox,snow --boards
+ buildman --boards sandbox,snow --boards firefly-rk3399
It is convenient to use the -n option to see what will be built based on
the subset given. Use -v as well to get an actual list of boards.
@@ -1062,9 +1062,9 @@ same as 'am335x_evm_usbspl'/
The -K option uses the u-boot.cfg, spl/u-boot-spl.cfg and tpl/u-boot-tpl.cfg
files which are produced by a build. If all you want is to check the
-configuration you can in fact avoid doing a full build, using -D. This tells
-buildman to configuration U-Boot and create the .cfg files, but not actually
-build the source. This is 5-10 times faster than doing a full build.
+configuration you can in fact avoid doing a full build, using --config-only.
+This tells buildman to configuration U-Boot and create the .cfg files, but not
+actually build the source. This is 5-10 times faster than doing a full build.
By default buildman considers the follow two configuration methods
equivalent::
@@ -1303,14 +1303,32 @@ Using boards.cfg
This file is no-longer needed by buildman but it is still generated in the
working directory. This helps avoid a delay on every build, since scanning all
-the Kconfig files takes a few seconds. Use the -R flag to force regeneration
-of the file - in that case buildman exits after writing the file. with exit code
-2 if there was an error in the maintainer files.
+the Kconfig files takes a few seconds. Use the `-R <filename>` flag to force
+regeneration of the file - in that case buildman exits after writing the file
+with exit code 2 if there was an error in the maintainer files. To use the
+default filename, use a hyphen, i.e. `-R -`.
You should use 'buildman -nv <criteria>' instead of greoing the boards.cfg file,
since it may be dropped altogether in future.
+Checking maintainers
+--------------------
+
+Sometimes a board is added without a corresponding entry in a MAINTAINERS file.
+Use the `--maintainer-check` option to check this::
+
+ $ buildman --maintainer-check
+ WARNING: board/mikrotik/crs3xx-98dx3236/MAINTAINERS: missing defconfig ending at line 7
+ WARNING: no maintainers for 'clearfog_spi'
+
+Buildman returns with an exit code of 2 if there area any warnings.
+
+An experimental `--full-check option` also checks for boards which don't have a
+CONFIG_TARGET_xxx where xxx corresponds to their defconfig filename. This is
+not strictly necessary, but may be useful information.
+
+
Checking the command
--------------------
diff --git a/tools/buildman/cmdline.py b/tools/buildman/cmdline.py
index a9cda24..03211bd 100644
--- a/tools/buildman/cmdline.py
+++ b/tools/buildman/cmdline.py
@@ -2,148 +2,190 @@
# Copyright (c) 2014 Google, Inc
#
-from optparse import OptionParser
+"""Handles parsing of buildman arguments
+
+This creates the argument parser and uses it to parse the arguments passed in
+"""
+
+import argparse
import os
import pathlib
BUILDMAN_DIR = pathlib.Path(__file__).parent
HAS_TESTS = os.path.exists(BUILDMAN_DIR / "test.py")
-def ParseArgs():
- """Parse command line arguments from sys.argv[]
+def add_upto_m(parser):
+ """Add arguments up to 'M'
- Returns:
- tuple containing:
- options: command line options
- args: command lin arguments
+ Args:
+ parser (ArgumentParser): Parse to add to
+
+ This is split out to avoid having too many statements in one function
"""
- parser = OptionParser()
- parser.add_option('-a', '--adjust-cfg', type=str, action='append',
+ parser.add_argument('-a', '--adjust-cfg', type=str, action='append',
help='Adjust the Kconfig settings in .config before building')
- parser.add_option('-A', '--print-prefix', action='store_true',
+ parser.add_argument('-A', '--print-prefix', action='store_true',
help='Print the tool-chain prefix for a board (CROSS_COMPILE=)')
- parser.add_option('-b', '--branch', type='string',
+ parser.add_argument('-b', '--branch', type=str,
help='Branch name to build, or range of commits to build')
- parser.add_option('-B', '--bloat', dest='show_bloat',
+ parser.add_argument('-B', '--bloat', dest='show_bloat',
action='store_true', default=False,
help='Show changes in function code size for each board')
- parser.add_option('--boards', type='string', action='append',
+ parser.add_argument('--boards', type=str, action='append',
help='List of board names to build separated by comma')
- parser.add_option('-c', '--count', dest='count', type='int',
+ parser.add_argument('-c', '--count', dest='count', type=int,
default=-1, help='Run build on the top n commits')
- parser.add_option('-C', '--force-reconfig', dest='force_reconfig',
+ parser.add_argument('-C', '--force-reconfig', dest='force_reconfig',
action='store_true', default=False,
help='Reconfigure for every commit (disable incremental build)')
- parser.add_option('-d', '--detail', dest='show_detail',
+ parser.add_argument('--config-only', action='store_true',
+ default=False,
+ help="Don't build, just configure each commit")
+ parser.add_argument('-d', '--detail', dest='show_detail',
action='store_true', default=False,
help='Show detailed size delta for each board in the -S summary')
- parser.add_option('-D', '--config-only', action='store_true', default=False,
- help="Don't build, just configure each commit")
- parser.add_option('--debug', action='store_true',
+ parser.add_argument('-D', '--debug', action='store_true',
help='Enabling debugging (provides a full traceback on error)')
- parser.add_option('-e', '--show_errors', action='store_true',
+ parser.add_argument('-e', '--show_errors', action='store_true',
default=False, help='Show errors and warnings')
- parser.add_option('-E', '--warnings-as-errors', action='store_true',
+ parser.add_argument('-E', '--warnings-as-errors', action='store_true',
default=False, help='Treat all compiler warnings as errors')
- parser.add_option('-f', '--force-build', dest='force_build',
+ parser.add_argument('-f', '--force-build', dest='force_build',
action='store_true', default=False,
help='Force build of boards even if already built')
- parser.add_option('-F', '--force-build-failures', dest='force_build_failures',
+ parser.add_argument('-F', '--force-build-failures', dest='force_build_failures',
action='store_true', default=False,
help='Force build of previously-failed build')
- parser.add_option('--fetch-arch', type='string',
+ parser.add_argument('--fetch-arch', type=str,
help="Fetch a toolchain for architecture FETCH_ARCH ('list' to list)."
' You can also fetch several toolchains separate by comma, or'
" 'all' to download all")
- parser.add_option('-g', '--git', type='string',
+ parser.add_argument(
+ '--full-check', action='store_true',
+ help='Check maintainer entries and TARGET configs')
+ parser.add_argument('-g', '--git', type=str,
help='Git repo containing branch to build', default='.')
- parser.add_option('-G', '--config-file', type='string',
+ parser.add_argument('-G', '--config-file', type=str,
help='Path to buildman config file', default='')
- parser.add_option('-H', '--full-help', action='store_true', dest='full_help',
+ parser.add_argument('-H', '--full-help', action='store_true', dest='full_help',
default=False, help='Display the README file')
- parser.add_option('-i', '--in-tree', dest='in_tree',
+ parser.add_argument('-i', '--in-tree', dest='in_tree',
action='store_true', default=False,
help='Build in the source tree instead of a separate directory')
- parser.add_option('-I', '--ide', action='store_true', default=False,
+ parser.add_argument('-I', '--ide', action='store_true', default=False,
help='Create build output that can be parsed by an IDE')
- parser.add_option('-j', '--jobs', dest='jobs', type='int',
+ parser.add_argument('-j', '--jobs', dest='jobs', type=int,
default=None, help='Number of jobs to run at once (passed to make)')
- parser.add_option('-k', '--keep-outputs', action='store_true',
+ parser.add_argument('-k', '--keep-outputs', action='store_true',
default=False, help='Keep all build output files (e.g. binaries)')
- parser.add_option('-K', '--show-config', action='store_true',
- default=False, help='Show configuration changes in summary (both board config files and Kconfig)')
- parser.add_option('--preserve-config-y', action='store_true',
+ parser.add_argument('-K', '--show-config', action='store_true',
+ default=False,
+ help='Show configuration changes in summary (both board config files and Kconfig)')
+ parser.add_argument('--preserve-config-y', action='store_true',
default=False, help="Don't convert y to 1 in configs")
- parser.add_option('-l', '--list-error-boards', action='store_true',
+ parser.add_argument('-l', '--list-error-boards', action='store_true',
default=False, help='Show a list of boards next to each error/warning')
- parser.add_option('-L', '--no-lto', action='store_true',
+ parser.add_argument('-L', '--no-lto', action='store_true',
default=False, help='Disable Link-time Optimisation (LTO) for builds')
- parser.add_option('--list-tool-chains', action='store_true', default=False,
+ parser.add_argument('--list-tool-chains', action='store_true', default=False,
help='List available tool chains (use -v to see probing detail)')
- parser.add_option('-m', '--mrproper', action='store_true',
+ parser.add_argument('-m', '--mrproper', action='store_true',
default=False, help="Run 'make mrproper before reconfiguring")
- parser.add_option(
+ parser.add_argument(
'-M', '--allow-missing', action='store_true', default=False,
- help='Tell binman to allow missing blobs and generate fake ones as needed'),
- parser.add_option(
+ help='Tell binman to allow missing blobs and generate fake ones as needed')
+ parser.add_argument(
+ '--maintainer-check', action='store_true',
+ help='Check that maintainer entries exist for each board')
+ parser.add_argument(
'--no-allow-missing', action='store_true', default=False,
- help='Disable telling binman to allow missing blobs'),
- parser.add_option('-n', '--dry-run', action='store_true', dest='dry_run',
+ help='Disable telling binman to allow missing blobs')
+ parser.add_argument('-n', '--dry-run', action='store_true', dest='dry_run',
default=False, help="Do a dry run (describe actions, but do nothing)")
- parser.add_option('-N', '--no-subdirs', action='store_true', dest='no_subdirs',
- default=False, help="Don't create subdirectories when building current source for a single board")
- parser.add_option('-o', '--output-dir', type='string', dest='output_dir',
+ parser.add_argument('-N', '--no-subdirs', action='store_true', dest='no_subdirs',
+ default=False,
+ help="Don't create subdirectories when building current source for a single board")
+
+
+def add_after_m(parser):
+ """Add arguments after 'M'
+
+ Args:
+ parser (ArgumentParser): Parse to add to
+
+ This is split out to avoid having too many statements in one function
+ """
+ parser.add_argument('-o', '--output-dir', type=str, dest='output_dir',
help='Directory where all builds happen and buildman has its workspace (default is ../)')
- parser.add_option('-O', '--override-toolchain', type='string',
+ parser.add_argument('-O', '--override-toolchain', type=str,
help="Override host toochain to use for sandbox (e.g. 'clang-7')")
- parser.add_option('-Q', '--quick', action='store_true',
+ parser.add_argument('-Q', '--quick', action='store_true',
default=False, help='Do a rough build, with limited warning resolution')
- parser.add_option('-p', '--full-path', action='store_true',
+ parser.add_argument('-p', '--full-path', action='store_true',
default=False, help="Use full toolchain path in CROSS_COMPILE")
- parser.add_option('-P', '--per-board-out-dir', action='store_true',
+ parser.add_argument('-P', '--per-board-out-dir', action='store_true',
default=False, help="Use an O= (output) directory per board rather than per thread")
- parser.add_option('-r', '--reproducible-builds', action='store_true',
+ parser.add_argument('--print-arch', action='store_true',
+ default=False, help="Print the architecture for a board (ARCH=)")
+ parser.add_argument('-r', '--reproducible-builds', action='store_true',
help='Set SOURCE_DATE_EPOCH=0 to suuport a reproducible build')
- parser.add_option('-R', '--regen-board-list', action='store_true',
+ parser.add_argument('-R', '--regen-board-list', type=str,
help='Force regeneration of the list of boards, like the old boards.cfg file')
- parser.add_option('-s', '--summary', action='store_true',
+ parser.add_argument('-s', '--summary', action='store_true',
default=False, help='Show a build summary')
- parser.add_option('-S', '--show-sizes', action='store_true',
+ parser.add_argument('-S', '--show-sizes', action='store_true',
default=False, help='Show image size variation in summary')
- parser.add_option('--step', type='int',
+ parser.add_argument('--step', type=int,
default=1, help='Only build every n commits (0=just first and last)')
if HAS_TESTS:
- parser.add_option('--skip-net-tests', action='store_true', default=False,
+ parser.add_argument('--skip-net-tests', action='store_true', default=False,
help='Skip tests which need the network')
- parser.add_option('-t', '--test', action='store_true', dest='test',
+ parser.add_argument('-t', '--test', action='store_true', dest='test',
default=False, help='run tests')
- parser.add_option('-T', '--threads', type='int',
+ parser.add_argument('--coverage', action='store_true',
+ help='Calculated test coverage')
+ parser.add_argument('-T', '--threads', type=int,
default=None,
help='Number of builder threads to use (0=single-thread)')
- parser.add_option('-u', '--show_unknown', action='store_true',
+ parser.add_argument('-u', '--show_unknown', action='store_true',
default=False, help='Show boards with unknown build result')
- parser.add_option('-U', '--show-environment', action='store_true',
+ parser.add_argument('-U', '--show-environment', action='store_true',
default=False, help='Show environment changes in summary')
- parser.add_option('-v', '--verbose', action='store_true',
+ parser.add_argument('-v', '--verbose', action='store_true',
default=False, help='Show build results while the build progresses')
- parser.add_option('-V', '--verbose-build', action='store_true',
+ parser.add_argument('-V', '--verbose-build', action='store_true',
default=False, help='Run make with V=1, logging all output')
- parser.add_option('-w', '--work-in-output', action='store_true',
+ parser.add_argument('-w', '--work-in-output', action='store_true',
default=False, help='Use the output directory as the work directory')
- parser.add_option('-W', '--ignore-warnings', action='store_true',
+ parser.add_argument('-W', '--ignore-warnings', action='store_true',
default=False, help='Return success even if there are warnings')
- parser.add_option('-x', '--exclude', dest='exclude',
- type='string', action='append',
+ parser.add_argument('-x', '--exclude', dest='exclude',
+ type=str, action='append',
help='Specify a list of boards to exclude, separated by comma')
- parser.add_option('-y', '--filter-dtb-warnings', action='store_true',
+ parser.add_argument('-y', '--filter-dtb-warnings', action='store_true',
default=False,
help='Filter out device-tree-compiler warnings from output')
- parser.add_option('-Y', '--filter-migration-warnings', action='store_true',
+ parser.add_argument('-Y', '--filter-migration-warnings', action='store_true',
default=False,
help='Filter out migration warnings from output')
- parser.usage += """ [list of target/arch/cpu/board/vendor/soc to build]
+
+def parse_args():
+ """Parse command line arguments from sys.argv[]
+
+ Returns:
+ tuple containing:
+ options: command line options
+ args: command lin arguments
+ """
+ epilog = """ [list of target/arch/cpu/board/vendor/soc to build]
Build U-Boot for all commits in a branch. Use -n to do a dry run"""
+ parser = argparse.ArgumentParser(epilog=epilog)
+ add_upto_m(parser)
+ add_after_m(parser)
+ parser.add_argument('terms', type=str, nargs='*',
+ help='Board / SoC names to build')
+
return parser.parse_args()
diff --git a/tools/buildman/control.py b/tools/buildman/control.py
index 09a11f2..5c57200 100644
--- a/tools/buildman/control.py
+++ b/tools/buildman/control.py
@@ -2,15 +2,14 @@
# Copyright (c) 2013 The Chromium OS Authors.
#
+"""Control module for buildman
+
+This holds the main control logic for buildman, when not running tests.
+"""
+
import multiprocessing
-try:
- import importlib.resources
-except ImportError:
- # for Python 3.6
- import importlib_resources
import os
import shutil
-import subprocess
import sys
from buildman import boards
@@ -22,34 +21,58 @@ from patman import gitutil
from patman import patchstream
from u_boot_pylib import command
from u_boot_pylib import terminal
-from u_boot_pylib import tools
from u_boot_pylib.terminal import tprint
-def GetPlural(count):
+TEST_BUILDER = None
+
+def get_plural(count):
"""Returns a plural 's' if count is not 1"""
return 's' if count != 1 else ''
-def GetActionSummary(is_summary, commits, selected, options):
- """Return a string summarising the intended action.
+
+def count_build_commits(commits, step):
+ """Calculate the number of commits to be built
+
+ Args:
+ commits (list of Commit): Commits to build or None
+ step (int): Step value for commits, typically 1
Returns:
- Summary string.
+ Number of commits that will be built
"""
if commits:
count = len(commits)
- count = (count + options.step - 1) // options.step
- commit_str = '%d commit%s' % (count, GetPlural(count))
+ return (count + step - 1) // step
+ return 0
+
+
+def get_action_summary(is_summary, commit_count, selected, threads, jobs):
+ """Return a string summarising the intended action.
+
+ Args:
+ is_summary (bool): True if this is a summary (otherwise it is building)
+ commits (list): List of commits being built
+ selected (list of Board): List of Board objects that are marked
+ step (int): Step increment through commits
+ threads (int): Number of processor threads being used
+ jobs (int): Number of jobs to build at once
+
+ Returns:
+ Summary string.
+ """
+ if commit_count:
+ commit_str = f'{commit_count} commit{get_plural(commit_count)}'
else:
commit_str = 'current source'
- str = '%s %s for %d boards' % (
- 'Summary of' if is_summary else 'Building', commit_str,
- len(selected))
- str += ' (%d thread%s, %d job%s per thread)' % (options.threads,
- GetPlural(options.threads), options.jobs, GetPlural(options.jobs))
- return str
-
-def ShowActions(series, why_selected, boards_selected, builder, options,
- board_warnings):
+ msg = (f"{'Summary of' if is_summary else 'Building'} "
+ f'{commit_str} for {len(selected)} boards')
+ msg += (f' ({threads} thread{get_plural(threads)}, '
+ f'{jobs} job{get_plural(jobs)} per thread)')
+ return msg
+
+# pylint: disable=R0913
+def show_actions(series, why_selected, boards_selected, output_dir,
+ board_warnings, step, threads, jobs, verbose):
"""Display a list of actions that we would take, if not a dry run.
Args:
@@ -61,9 +84,12 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
the value would be a list of board names.
boards_selected: Dict of selected boards, key is target name,
value is Board object
- builder: The builder that will be used to build the commits
- options: Command line options object
+ output_dir (str): Output directory for builder
board_warnings: List of warnings obtained from board selected
+ step (int): Step increment through commits
+ threads (int): Number of processor threads being used
+ jobs (int): Number of jobs to build at once
+ verbose (bool): True to indicate why each board was selected
"""
col = terminal.Color()
print('Dry run, so not doing much. But I would do this:')
@@ -72,27 +98,27 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
commits = series.commits
else:
commits = None
- print(GetActionSummary(False, commits, boards_selected,
- options))
- print('Build directory: %s' % builder.base_dir)
+ print(get_action_summary(False, count_build_commits(commits, step),
+ boards_selected, threads, jobs))
+ print(f'Build directory: {output_dir}')
if commits:
- for upto in range(0, len(series.commits), options.step):
+ for upto in range(0, len(series.commits), step):
commit = series.commits[upto]
print(' ', col.build(col.YELLOW, commit.hash[:8], bright=False), end=' ')
print(commit.subject)
print()
for arg in why_selected:
if arg != 'all':
- print(arg, ': %d boards' % len(why_selected[arg]))
- if options.verbose:
- print(' %s' % ' '.join(why_selected[arg]))
- print(('Total boards to build for each commit: %d\n' %
- len(why_selected['all'])))
+ print(arg, f': {len(why_selected[arg])} boards')
+ if verbose:
+ print(f" {' '.join(why_selected[arg])}")
+ print('Total boards to build for each '
+ f"commit: {len(why_selected['all'])}\n")
if board_warnings:
for warning in board_warnings:
print(col.build(col.YELLOW, warning))
-def ShowToolchainPrefix(brds, toolchains):
+def show_toolchain_prefix(brds, toolchains):
"""Show information about a the tool chain used by one or more boards
The function checks that all boards use the same toolchain, then prints
@@ -110,15 +136,48 @@ def ShowToolchainPrefix(brds, toolchains):
for brd in board_selected.values():
tc_set.add(toolchains.Select(brd.arch))
if len(tc_set) != 1:
- return 'Supplied boards must share one toolchain'
- return False
- tc = tc_set.pop()
- print(tc.GetEnvArgs(toolchain.VAR_CROSS_COMPILE))
- return None
+ sys.exit('Supplied boards must share one toolchain')
+ tchain = tc_set.pop()
+ print(tchain.GetEnvArgs(toolchain.VAR_CROSS_COMPILE))
+
+def show_arch(brds):
+ """Show information about a the architecture used by one or more boards
+
+ The function checks that all boards use the same architecture, then prints
+ the correct value for ARCH.
+
+ Args:
+ boards: Boards object containing selected boards
+
+ Return:
+ None on success, string error message otherwise
+ """
+ board_selected = brds.get_selected_dict()
+ arch_set = set()
+ for brd in board_selected.values():
+ arch_set.add(brd.arch)
+ if len(arch_set) != 1:
+ sys.exit('Supplied boards must share one arch')
+ print(arch_set.pop())
def get_allow_missing(opt_allow, opt_no_allow, num_selected, has_branch):
+ """Figure out whether to allow external blobs
+
+ Uses the allow-missing setting and the provided arguments to decide whether
+ missing external blobs should be allowed
+
+ Args:
+ opt_allow (bool): True if --allow-missing flag is set
+ opt_no_allow (bool): True if --no-allow-missing flag is set
+ num_selected (int): Number of selected board
+ has_branch (bool): True if a git branch (to build) has been provided
+
+ Returns:
+ bool: True to allow missing external blobs, False to produce an error if
+ external blobs are used
+ """
allow_missing = False
- am_setting = bsettings.GetGlobalItemValue('allow-missing')
+ am_setting = bsettings.get_global_item_value('allow-missing')
if am_setting:
if am_setting == 'always':
allow_missing = True
@@ -133,142 +192,82 @@ def get_allow_missing(opt_allow, opt_no_allow, num_selected, has_branch):
allow_missing = False
return allow_missing
-def DoBuildman(options, args, toolchains=None, make_func=None, brds=None,
- clean_dir=False, test_thread_exceptions=False):
- """The main control code for buildman
-
- Args:
- options: Command line options object
- args: Command line arguments (list of strings)
- toolchains: Toolchains to use - this should be a Toolchains()
- object. If None, then it will be created and scanned
- make_func: Make function to use for the builder. This is called
- to execute 'make'. If this is None, the normal function
- will be used, which calls the 'make' tool with suitable
- arguments. This setting is useful for tests.
- brds: Boards() object to use, containing a list of available
- boards. If this is None it will be created and scanned.
- clean_dir: Used for tests only, indicates that the existing output_dir
- should be removed before starting the build
- test_thread_exceptions: Uses for tests only, True to make the threads
- raise an exception instead of reporting their result. This simulates
- a failure in the code somewhere
- """
- global builder
-
- if options.full_help:
- with importlib.resources.path('buildman', 'README.rst') as readme:
- tools.print_full_help(str(readme))
- return 0
- gitutil.setup()
- col = terminal.Color()
+def count_commits(branch, count, col, git_dir):
+ """Could the number of commits in the branch/ranch being built
- options.git_dir = os.path.join(options.git, '.git')
-
- no_toolchains = toolchains is None
- if no_toolchains:
- toolchains = toolchain.Toolchains(options.override_toolchain)
-
- if options.fetch_arch:
- if options.fetch_arch == 'list':
- sorted_list = toolchains.ListArchs()
- print(col.build(col.BLUE, 'Available architectures: %s\n' %
- ' '.join(sorted_list)))
- return 0
- else:
- fetch_arch = options.fetch_arch
- if fetch_arch == 'all':
- fetch_arch = ','.join(toolchains.ListArchs())
- print(col.build(col.CYAN, '\nDownloading toolchains: %s' %
- fetch_arch))
- for arch in fetch_arch.split(','):
- print()
- ret = toolchains.FetchAndInstall(arch)
- if ret:
- return ret
- return 0
-
- if no_toolchains:
- toolchains.GetSettings()
- toolchains.Scan(options.list_tool_chains and options.verbose)
- if options.list_tool_chains:
- toolchains.List()
- print()
- return 0
-
- if not options.output_dir:
- if options.work_in_output:
- sys.exit(col.build(col.RED, '-w requires that you specify -o'))
- options.output_dir = '..'
-
- # Work out what subset of the boards we are building
- if not brds:
- if not os.path.exists(options.output_dir):
- os.makedirs(options.output_dir)
- board_file = os.path.join(options.output_dir, 'boards.cfg')
-
- brds = boards.Boards()
- ok = brds.ensure_board_list(board_file,
- options.threads or multiprocessing.cpu_count(),
- force=options.regen_board_list,
- quiet=not options.verbose)
- if options.regen_board_list:
- return 0 if ok else 2
- brds.read_boards(board_file)
-
- exclude = []
- if options.exclude:
- for arg in options.exclude:
- exclude += arg.split(',')
-
- if options.boards:
- requested_boards = []
- for b in options.boards:
- requested_boards += b.split(',')
- else:
- requested_boards = None
- why_selected, board_warnings = brds.select_boards(args, exclude,
- requested_boards)
- selected = brds.get_selected()
- if not len(selected):
- sys.exit(col.build(col.RED, 'No matching boards found'))
-
- if options.print_prefix:
- err = ShowToolchainPrefix(brds, toolchains)
- if err:
- sys.exit(col.build(col.RED, err))
- return 0
+ Args:
+ branch (str): Name of branch to build, or None if none
+ count (int): Number of commits to build, or -1 for all
+ col (Terminal.Color): Color object to use
+ git_dir (str): Git directory to use, e.g. './.git'
- # Work out how many commits to build. We want to build everything on the
- # branch. We also build the upstream commit as a control so we can see
- # problems introduced by the first commit on the branch.
- count = options.count
- has_range = options.branch and '..' in options.branch
+ Returns:
+ tuple:
+ Number of commits being built
+ True if the 'branch' string contains a range rather than a simple
+ name
+ """
+ has_range = branch and '..' in branch
if count == -1:
- if not options.branch:
+ if not branch:
count = 1
else:
if has_range:
- count, msg = gitutil.count_commits_in_range(options.git_dir,
- options.branch)
+ count, msg = gitutil.count_commits_in_range(git_dir, branch)
else:
- count, msg = gitutil.count_commits_in_branch(options.git_dir,
- options.branch)
+ count, msg = gitutil.count_commits_in_branch(git_dir, branch)
if count is None:
sys.exit(col.build(col.RED, msg))
elif count == 0:
- sys.exit(col.build(col.RED, "Range '%s' has no commits" %
- options.branch))
+ sys.exit(col.build(col.RED,
+ f"Range '{branch}' has no commits"))
if msg:
print(col.build(col.YELLOW, msg))
count += 1 # Build upstream commit also
if not count:
- msg = ("No commits found to process in branch '%s': "
- "set branch's upstream or use -c flag" % options.branch)
+ msg = (f"No commits found to process in branch '{branch}': "
+ "set branch's upstream or use -c flag")
sys.exit(col.build(col.RED, msg))
- if options.work_in_output:
+ return count, has_range
+
+
+def determine_series(selected, col, git_dir, count, branch, work_in_output):
+ """Determine the series which is to be built, if any
+
+ If there is a series, the commits in that series are numbered by setting
+ their sequence value (starting from 0). This is used by tests.
+
+ Args:
+ selected (list of Board): List of Board objects that are marked
+ selected
+ col (Terminal.Color): Color object to use
+ git_dir (str): Git directory to use, e.g. './.git'
+ count (int): Number of commits in branch
+ branch (str): Name of branch to build, or None if none
+ work_in_output (bool): True to work in the output directory
+
+ Returns:
+ Series: Series to build, or None for none
+
+ Read the metadata from the commits. First look at the upstream commit,
+ then the ones in the branch. We would like to do something like
+ upstream/master~..branch but that isn't possible if upstream/master is
+ a merge commit (it will list all the commits that form part of the
+ merge)
+
+ Conflicting tags are not a problem for buildman, since it does not use
+ them. For example, Series-version is not useful for buildman. On the
+ other hand conflicting tags will cause an error. So allow later tags
+ to overwrite earlier ones by setting allow_overwrite=True
+ """
+
+ # Work out how many commits to build. We want to build everything on the
+ # branch. We also build the upstream commit as a control so we can see
+ # problems introduced by the first commit on the branch.
+ count, has_range = count_commits(branch, count, col, git_dir)
+ if work_in_output:
if len(selected) != 1:
sys.exit(col.build(col.RED,
'-w can only be used with a single board'))
@@ -276,141 +275,402 @@ def DoBuildman(options, args, toolchains=None, make_func=None, brds=None,
sys.exit(col.build(col.RED,
'-w can only be used with a single commit'))
- # Read the metadata from the commits. First look at the upstream commit,
- # then the ones in the branch. We would like to do something like
- # upstream/master~..branch but that isn't possible if upstream/master is
- # a merge commit (it will list all the commits that form part of the
- # merge)
- # Conflicting tags are not a problem for buildman, since it does not use
- # them. For example, Series-version is not useful for buildman. On the
- # other hand conflicting tags will cause an error. So allow later tags
- # to overwrite earlier ones by setting allow_overwrite=True
- if options.branch:
+ if branch:
if count == -1:
if has_range:
- range_expr = options.branch
+ range_expr = branch
else:
- range_expr = gitutil.get_range_in_branch(options.git_dir,
- options.branch)
- upstream_commit = gitutil.get_upstream(options.git_dir,
- options.branch)
+ range_expr = gitutil.get_range_in_branch(git_dir, branch)
+ upstream_commit = gitutil.get_upstream(git_dir, branch)
series = patchstream.get_metadata_for_list(upstream_commit,
- options.git_dir, 1, series=None, allow_overwrite=True)
+ git_dir, 1, series=None, allow_overwrite=True)
series = patchstream.get_metadata_for_list(range_expr,
- options.git_dir, None, series, allow_overwrite=True)
+ git_dir, None, series, allow_overwrite=True)
else:
# Honour the count
- series = patchstream.get_metadata_for_list(options.branch,
- options.git_dir, count, series=None, allow_overwrite=True)
+ series = patchstream.get_metadata_for_list(branch,
+ git_dir, count, series=None, allow_overwrite=True)
+
+ # Number the commits for test purposes
+ for i, commit in enumerate(series.commits):
+ commit.sequence = i
else:
series = None
- if not options.dry_run:
- options.verbose = True
- if not options.summary:
- options.show_errors = True
+ return series
+
+
+def do_fetch_arch(toolchains, col, fetch_arch):
+ """Handle the --fetch-arch option
+
+ Args:
+ toolchains (Toolchains): Tool chains to use
+ col (terminal.Color): Color object to build
+ fetch_arch (str): Argument passed to the --fetch-arch option
+
+ Returns:
+ int: Return code for buildman
+ """
+ if fetch_arch == 'list':
+ sorted_list = toolchains.ListArchs()
+ print(col.build(
+ col.BLUE,
+ f"Available architectures: {' '.join(sorted_list)}\n"))
+ return 0
+
+ if fetch_arch == 'all':
+ fetch_arch = ','.join(toolchains.ListArchs())
+ print(col.build(col.CYAN,
+ f'\nDownloading toolchains: {fetch_arch}'))
+ for arch in fetch_arch.split(','):
+ print()
+ ret = toolchains.FetchAndInstall(arch)
+ if ret:
+ return ret
+ return 0
+
+
+def get_toolchains(toolchains, col, override_toolchain, fetch_arch,
+ list_tool_chains, verbose):
+ """Get toolchains object to use
+
+ Args:
+ toolchains (Toolchains or None): Toolchains to use. If None, then a
+ Toolchains object will be created and scanned
+ col (Terminal.Color): Color object
+ override_toolchain (str or None): Override value for toolchain, or None
+ fetch_arch (bool): True to fetch the toolchain for the architectures
+ list_tool_chains (bool): True to list all tool chains
+ verbose (bool): True for verbose output when listing toolchains
+
+ Returns:
+ Either:
+ int: Operation completed and buildman should exit with exit code
+ Toolchains: Toolchains object to use
+ """
+ no_toolchains = toolchains is None
+ if no_toolchains:
+ toolchains = toolchain.Toolchains(override_toolchain)
+
+ if fetch_arch:
+ return do_fetch_arch(toolchains, col, fetch_arch)
+
+ if no_toolchains:
+ toolchains.GetSettings()
+ toolchains.Scan(list_tool_chains and verbose)
+ if list_tool_chains:
+ toolchains.List()
+ print()
+ return 0
+ return toolchains
+
+
+def get_boards_obj(output_dir, regen_board_list, maintainer_check, full_check,
+ threads, verbose):
+ """Object the Boards object to use
+
+ Creates the output directory and ensures there is a boards.cfg file, then
+ read it in.
+
+ Args:
+ output_dir (str): Output directory to use
+ regen_board_list (bool): True to just regenerate the board list
+ maintainer_check (bool): True to just run a maintainer check
+ full_check (bool): True to just run a full check of Kconfig and
+ maintainers
+ threads (int or None): Number of threads to use to create boards file
+ verbose (bool): False to suppress output from boards-file generation
+
+ Returns:
+ Either:
+ int: Operation completed and buildman should exit with exit code
+ Boards: Boards object to use
+ """
+ brds = boards.Boards()
+ nr_cpus = threads or multiprocessing.cpu_count()
+ if maintainer_check or full_check:
+ warnings = brds.build_board_list(jobs=nr_cpus,
+ warn_targets=full_check)[1]
+ if warnings:
+ for warn in warnings:
+ print(warn, file=sys.stderr)
+ return 2
+ return 0
+
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ board_file = os.path.join(output_dir, 'boards.cfg')
+ if regen_board_list and regen_board_list != '-':
+ board_file = regen_board_list
+
+ okay = brds.ensure_board_list(board_file, nr_cpus, force=regen_board_list,
+ quiet=not verbose)
+ if regen_board_list:
+ return 0 if okay else 2
+ brds.read_boards(board_file)
+ return brds
+
+
+def determine_boards(brds, args, col, opt_boards, exclude_list):
+ """Determine which boards to build
+
+ Each element of args and exclude can refer to a board name, arch or SoC
+
+ Args:
+ brds (Boards): Boards object
+ args (list of str): Arguments describing boards to build
+ col (Terminal.Color): Color object
+ opt_boards (list of str): Specific boards to build, or None for all
+ exclude_list (list of str): Arguments describing boards to exclude
+
+ Returns:
+ tuple:
+ list of Board: List of Board objects that are marked selected
+ why_selected: Dictionary where each key is a buildman argument
+ provided by the user, and the value is the list of boards
+ brought in by that argument. For example, 'arm' might bring
+ in 400 boards, so in this case the key would be 'arm' and
+ the value would be a list of board names.
+ board_warnings: List of warnings obtained from board selected
+ """
+ exclude = []
+ if exclude_list:
+ for arg in exclude_list:
+ exclude += arg.split(',')
+
+ if opt_boards:
+ requested_boards = []
+ for brd in opt_boards:
+ requested_boards += brd.split(',')
+ else:
+ requested_boards = None
+ why_selected, board_warnings = brds.select_boards(args, exclude,
+ requested_boards)
+ selected = brds.get_selected()
+ if not selected:
+ sys.exit(col.build(col.RED, 'No matching boards found'))
+ return selected, why_selected, board_warnings
+
+
+def adjust_args(args, series, selected):
+ """Adjust arguments according to various constraints
+
+ Updates verbose, show_errors, threads, jobs and step
+
+ Args:
+ args (Namespace): Namespace object to adjust
+ series (Series): Series being built / summarised
+ selected (list of Board): List of Board objects that are marked
+ """
+ if not series and not args.dry_run:
+ args.verbose = True
+ if not args.summary:
+ args.show_errors = True
# By default we have one thread per CPU. But if there are not enough jobs
# we can have fewer threads and use a high '-j' value for make.
- if options.threads is None:
- options.threads = min(multiprocessing.cpu_count(), len(selected))
- if not options.jobs:
- options.jobs = max(1, (multiprocessing.cpu_count() +
+ if args.threads is None:
+ args.threads = min(multiprocessing.cpu_count(), len(selected))
+ if not args.jobs:
+ args.jobs = max(1, (multiprocessing.cpu_count() +
len(selected) - 1) // len(selected))
- if not options.step:
- options.step = len(series.commits) - 1
+ if not args.step:
+ args.step = len(series.commits) - 1
+
+ # We can't show function sizes without board details at present
+ if args.show_bloat:
+ args.show_detail = True
- gnu_make = command.output(os.path.join(options.git,
- 'scripts/show-gnu-make'), raise_on_error=False).rstrip()
- if not gnu_make:
- sys.exit('GNU Make not found')
- allow_missing = get_allow_missing(options.allow_missing,
- options.no_allow_missing, len(selected),
- options.branch)
+def setup_output_dir(output_dir, work_in_output, branch, no_subdirs, col,
+ clean_dir):
+ """Set up the output directory
- # Create a new builder with the selected options.
- output_dir = options.output_dir
- if options.branch:
- dirname = options.branch.replace('/', '_')
+ Args:
+ output_dir (str): Output directory provided by the user, or None if none
+ work_in_output (bool): True to work in the output directory
+ branch (str): Name of branch to build, or None if none
+ no_subdirs (bool): True to put the output in the top-level output dir
+ clean_dir: Used for tests only, indicates that the existing output_dir
+ should be removed before starting the build
+
+ Returns:
+ str: Updated output directory pathname
+ """
+ if not output_dir:
+ if work_in_output:
+ sys.exit(col.build(col.RED, '-w requires that you specify -o'))
+ output_dir = '..'
+ if branch and not no_subdirs:
# As a special case allow the board directory to be placed in the
# output directory itself rather than any subdirectory.
- if not options.no_subdirs:
- output_dir = os.path.join(options.output_dir, dirname)
+ dirname = branch.replace('/', '_')
+ output_dir = os.path.join(output_dir, dirname)
if clean_dir and os.path.exists(output_dir):
shutil.rmtree(output_dir)
- adjust_cfg = cfgutil.convert_list_to_dict(options.adjust_cfg)
+ return output_dir
+
+
+def run_builder(builder, commits, board_selected, args):
+ """Run the builder or show the summary
+
+ Args:
+ commits (list of Commit): List of commits being built, None if no branch
+ boards_selected (dict): Dict of selected boards:
+ key: target name
+ value: Board object
+ args (Namespace): Namespace to use
+
+ Returns:
+ int: Return code for buildman
+ """
+ gnu_make = command.output(os.path.join(args.git,
+ 'scripts/show-gnu-make'), raise_on_error=False).rstrip()
+ if not gnu_make:
+ sys.exit('GNU Make not found')
+ builder.gnu_make = gnu_make
+
+ if not args.ide:
+ commit_count = count_build_commits(commits, args.step)
+ tprint(get_action_summary(args.summary, commit_count, board_selected,
+ args.threads, args.jobs))
+
+ builder.set_display_options(
+ args.show_errors, args.show_sizes, args.show_detail, args.show_bloat,
+ args.list_error_boards, args.show_config, args.show_environment,
+ args.filter_dtb_warnings, args.filter_migration_warnings, args.ide)
+ if args.summary:
+ builder.show_summary(commits, board_selected)
+ else:
+ fail, warned, excs = builder.build_boards(
+ commits, board_selected, args.keep_outputs, args.verbose)
+ if excs:
+ return 102
+ if fail:
+ return 100
+ if warned and not args.ignore_warnings:
+ return 101
+ return 0
+
+
+def calc_adjust_cfg(adjust_cfg, reproducible_builds):
+ """Calculate the value to use for adjust_cfg
+
+ Args:
+ adjust_cfg (list of str): List of configuration changes. See cfgutil for
+ details
+ reproducible_builds (bool): True to adjust the configuration to get
+ reproduceable builds
+
+ Returns:
+ adjust_cfg (list of str): List of configuration changes
+ """
+ adjust_cfg = cfgutil.convert_list_to_dict(adjust_cfg)
# Drop LOCALVERSION_AUTO since it changes the version string on every commit
- if options.reproducible_builds:
+ if reproducible_builds:
# If these are mentioned, leave the local version alone
if 'LOCALVERSION' in adjust_cfg or 'LOCALVERSION_AUTO' in adjust_cfg:
print('Not dropping LOCALVERSION_AUTO for reproducible build')
else:
adjust_cfg['LOCALVERSION_AUTO'] = '~'
+ return adjust_cfg
- builder = Builder(toolchains, output_dir, options.git_dir,
- options.threads, options.jobs, gnu_make=gnu_make, checkout=True,
- show_unknown=options.show_unknown, step=options.step,
- no_subdirs=options.no_subdirs, full_path=options.full_path,
- verbose_build=options.verbose_build,
- mrproper=options.mrproper,
- per_board_out_dir=options.per_board_out_dir,
- config_only=options.config_only,
- squash_config_y=not options.preserve_config_y,
- warnings_as_errors=options.warnings_as_errors,
- work_in_output=options.work_in_output,
- test_thread_exceptions=test_thread_exceptions,
- adjust_cfg=adjust_cfg,
- allow_missing=allow_missing, no_lto=options.no_lto,
- reproducible_builds=options.reproducible_builds)
- builder.force_config_on_failure = not options.quick
- if make_func:
- builder.do_make = make_func
+
+def do_buildman(args, toolchains=None, make_func=None, brds=None,
+ clean_dir=False, test_thread_exceptions=False):
+ """The main control code for buildman
+
+ Args:
+ args: ArgumentParser object
+ args: Command line arguments (list of strings)
+ toolchains: Toolchains to use - this should be a Toolchains()
+ object. If None, then it will be created and scanned
+ make_func: Make function to use for the builder. This is called
+ to execute 'make'. If this is None, the normal function
+ will be used, which calls the 'make' tool with suitable
+ arguments. This setting is useful for tests.
+ brds: Boards() object to use, containing a list of available
+ boards. If this is None it will be created and scanned.
+ clean_dir: Used for tests only, indicates that the existing output_dir
+ should be removed before starting the build
+ test_thread_exceptions: Uses for tests only, True to make the threads
+ raise an exception instead of reporting their result. This simulates
+ a failure in the code somewhere
+ """
+ # Used so testing can obtain the builder: pylint: disable=W0603
+ global TEST_BUILDER
+
+ gitutil.setup()
+ col = terminal.Color()
+
+ git_dir = os.path.join(args.git, '.git')
+
+ toolchains = get_toolchains(toolchains, col, args.override_toolchain,
+ args.fetch_arch, args.list_tool_chains,
+ args.verbose)
+ output_dir = setup_output_dir(
+ args.output_dir, args.work_in_output, args.branch,
+ args.no_subdirs, col, clean_dir)
+
+ # Work out what subset of the boards we are building
+ if not brds:
+ brds = get_boards_obj(output_dir, args.regen_board_list,
+ args.maintainer_check, args.full_check,
+ args.threads, args.verbose)
+ if isinstance(brds, int):
+ return brds
+
+ selected, why_selected, board_warnings = determine_boards(
+ brds, args.terms, col, args.boards, args.exclude)
+
+ if args.print_prefix:
+ show_toolchain_prefix(brds, toolchains)
+ return 0
+
+ if args.print_arch:
+ show_arch(brds)
+ return 0
+
+ series = determine_series(selected, col, git_dir, args.count,
+ args.branch, args.work_in_output)
+
+ adjust_args(args, series, selected)
# For a dry run, just show our actions as a sanity check
- if options.dry_run:
- ShowActions(series, why_selected, selected, builder, options,
- board_warnings)
- else:
- builder.force_build = options.force_build
- builder.force_build_failures = options.force_build_failures
- builder.force_reconfig = options.force_reconfig
- builder.in_tree = options.in_tree
-
- # Work out which boards to build
- board_selected = brds.get_selected_dict()
-
- if series:
- commits = series.commits
- # Number the commits for test purposes
- for commit in range(len(commits)):
- commits[commit].sequence = commit
- else:
- commits = None
-
- if not options.ide:
- tprint(GetActionSummary(options.summary, commits, board_selected,
- options))
-
- # We can't show function sizes without board details at present
- if options.show_bloat:
- options.show_detail = True
- builder.SetDisplayOptions(
- options.show_errors, options.show_sizes, options.show_detail,
- options.show_bloat, options.list_error_boards, options.show_config,
- options.show_environment, options.filter_dtb_warnings,
- options.filter_migration_warnings, options.ide)
- if options.summary:
- builder.ShowSummary(commits, board_selected)
- else:
- fail, warned, excs = builder.BuildBoards(
- commits, board_selected, options.keep_outputs, options.verbose)
- if excs:
- return 102
- elif fail:
- return 100
- elif warned and not options.ignore_warnings:
- return 101
- return 0
+ if args.dry_run:
+ show_actions(series, why_selected, selected, output_dir, board_warnings,
+ args.step, args.threads, args.jobs,
+ args.verbose)
+ return 0
+
+ # Create a new builder with the selected args
+ builder = Builder(toolchains, output_dir, git_dir,
+ args.threads, args.jobs, checkout=True,
+ show_unknown=args.show_unknown, step=args.step,
+ no_subdirs=args.no_subdirs, full_path=args.full_path,
+ verbose_build=args.verbose_build,
+ mrproper=args.mrproper,
+ per_board_out_dir=args.per_board_out_dir,
+ config_only=args.config_only,
+ squash_config_y=not args.preserve_config_y,
+ warnings_as_errors=args.warnings_as_errors,
+ work_in_output=args.work_in_output,
+ test_thread_exceptions=test_thread_exceptions,
+ adjust_cfg=calc_adjust_cfg(args.adjust_cfg,
+ args.reproducible_builds),
+ allow_missing=get_allow_missing(args.allow_missing,
+ args.no_allow_missing,
+ len(selected), args.branch),
+ no_lto=args.no_lto,
+ reproducible_builds=args.reproducible_builds,
+ force_build = args.force_build,
+ force_build_failures = args.force_build_failures,
+ force_reconfig = args.force_reconfig, in_tree = args.in_tree,
+ force_config_on_failure=not args.quick, make_func=make_func)
+
+ TEST_BUILDER = builder
+
+ return run_builder(builder, series.commits if series else None,
+ brds.get_selected_dict(), args)
diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py
index ebd78f2..58a9bf3 100644
--- a/tools/buildman/func_test.py
+++ b/tools/buildman/func_test.py
@@ -3,9 +3,11 @@
#
import os
+from pathlib import Path
import shutil
import sys
import tempfile
+import time
import unittest
from buildman import board
@@ -38,8 +40,8 @@ chromeos_peach=VBOOT=${chroot}/build/peach_pit/usr ${vboot}
'''
BOARDS = [
- ['Active', 'arm', 'armv7', '', 'Tester', 'ARM Board 1', 'board0', ''],
- ['Active', 'arm', 'armv7', '', 'Tester', 'ARM Board 2', 'board1', ''],
+ ['Active', 'arm', 'armv7', '', 'Tester', 'ARM Board 0', 'board0', ''],
+ ['Active', 'arm', 'armv7', '', 'Tester', 'ARM Board 1', 'board1', ''],
['Active', 'powerpc', 'powerpc', '', 'Tester', 'PowerPC board 1', 'board2', ''],
['Active', 'sandbox', 'sandbox', '', 'Tester', 'Sandbox board', 'board4', ''],
]
@@ -184,8 +186,8 @@ class TestFunctional(unittest.TestCase):
self._buildman_pathname = sys.argv[0]
self._buildman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
command.test_result = self._HandleCommand
- bsettings.Setup(None)
- bsettings.AddFile(settings_data)
+ bsettings.setup(None)
+ bsettings.add_file(settings_data)
self.setupToolchains()
self._toolchains.Add('arm-gcc', test=False)
self._toolchains.Add('powerpc-gcc', test=False)
@@ -209,6 +211,12 @@ class TestFunctional(unittest.TestCase):
# Set to True to report missing blobs
self._missing = False
+ self._buildman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
+ self._test_dir = os.path.join(self._buildman_dir, 'test')
+
+ # Set up some fake source files
+ shutil.copytree(self._test_dir, self._git_dir)
+
# Avoid sending any output and clear all terminal output
terminal.set_print_test_mode()
terminal.get_print_test_lines()
@@ -225,29 +233,34 @@ class TestFunctional(unittest.TestCase):
return command.run_pipe([[self._buildman_pathname] + list(args)],
capture=True, capture_stderr=True)
- def _RunControl(self, *args, brds=None, clean_dir=False,
- test_thread_exceptions=False):
+ def _RunControl(self, *args, brds=False, clean_dir=False,
+ test_thread_exceptions=False, get_builder=True):
"""Run buildman
Args:
args: List of arguments to pass
- brds: Boards object
+ brds: Boards object, or False to pass self._boards, or None to pass
+ None
clean_dir: Used for tests only, indicates that the existing output_dir
should be removed before starting the build
test_thread_exceptions: Uses for tests only, True to make the threads
raise an exception instead of reporting their result. This simulates
a failure in the code somewhere
+ get_builder (bool): Set self._builder to the resulting builder
Returns:
result code from buildman
"""
sys.argv = [sys.argv[0]] + list(args)
- options, args = cmdline.ParseArgs()
- result = control.DoBuildman(options, args, toolchains=self._toolchains,
- make_func=self._HandleMake, brds=brds or self._boards,
- clean_dir=clean_dir,
- test_thread_exceptions=test_thread_exceptions)
- self._builder = control.builder
+ args = cmdline.parse_args()
+ if brds == False:
+ brds = self._boards
+ result = control.do_buildman(
+ args, toolchains=self._toolchains, make_func=self._HandleMake,
+ brds=brds, clean_dir=clean_dir,
+ test_thread_exceptions=test_thread_exceptions)
+ if get_builder:
+ self._builder = control.TEST_BUILDER
return result
def testFullHelp(self):
@@ -496,10 +509,12 @@ Some images are invalid'''
for commit in range(self._commits):
for brd in self._boards.get_list():
if brd.arch != 'sandbox':
- errfile = self._builder.GetErrFile(commit, brd.target)
+ errfile = self._builder.get_err_file(commit, brd.target)
fd = open(errfile)
- self.assertEqual(fd.readlines(),
- ['No tool chain for %s\n' % brd.arch])
+ self.assertEqual(
+ fd.readlines(),
+ [f'Tool chain error for {brd.arch}: '
+ f"No tool chain found for arch '{brd.arch}'"])
fd.close()
def testBranch(self):
@@ -686,7 +701,7 @@ Some images are invalid'''
def testBlobSettingsAlways(self):
"""Test the 'always' policy"""
- bsettings.SetItem('global', 'allow-missing', 'always')
+ bsettings.set_item('global', 'allow-missing', 'always')
self.assertEqual(True,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(False,
@@ -694,7 +709,7 @@ Some images are invalid'''
def testBlobSettingsBranch(self):
"""Test the 'branch' policy"""
- bsettings.SetItem('global', 'allow-missing', 'branch')
+ bsettings.set_item('global', 'allow-missing', 'branch')
self.assertEqual(False,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(True,
@@ -704,7 +719,7 @@ Some images are invalid'''
def testBlobSettingsMultiple(self):
"""Test the 'multiple' policy"""
- bsettings.SetItem('global', 'allow-missing', 'multiple')
+ bsettings.set_item('global', 'allow-missing', 'multiple')
self.assertEqual(False,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(True,
@@ -714,7 +729,7 @@ Some images are invalid'''
def testBlobSettingsBranchMultiple(self):
"""Test the 'branch multiple' policy"""
- bsettings.SetItem('global', 'allow-missing', 'branch multiple')
+ bsettings.set_item('global', 'allow-missing', 'branch multiple')
self.assertEqual(False,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(True,
@@ -779,3 +794,276 @@ Some images are invalid'''
CONFIG_LOCALVERSION=y
''', cfg_data)
self.assertIn('Not dropping LOCALVERSION_AUTO', stdout.getvalue())
+
+ def test_scan_defconfigs(self):
+ """Test scanning the defconfigs to obtain all the boards"""
+ src = self._git_dir
+
+ # Scan the test directory which contains a Kconfig and some *_defconfig
+ # files
+ params, warnings = self._boards.scan_defconfigs(src, src)
+
+ # We should get two boards
+ self.assertEquals(2, len(params))
+ self.assertFalse(warnings)
+ first = 0 if params[0]['target'] == 'board0' else 1
+ board0 = params[first]
+ board2 = params[1 - first]
+
+ self.assertEquals('arm', board0['arch'])
+ self.assertEquals('armv7', board0['cpu'])
+ self.assertEquals('-', board0['soc'])
+ self.assertEquals('Tester', board0['vendor'])
+ self.assertEquals('ARM Board 0', board0['board'])
+ self.assertEquals('config0', board0['config'])
+ self.assertEquals('board0', board0['target'])
+
+ self.assertEquals('powerpc', board2['arch'])
+ self.assertEquals('ppc', board2['cpu'])
+ self.assertEquals('mpc85xx', board2['soc'])
+ self.assertEquals('Tester', board2['vendor'])
+ self.assertEquals('PowerPC board 1', board2['board'])
+ self.assertEquals('config2', board2['config'])
+ self.assertEquals('board2', board2['target'])
+
+ def test_output_is_new(self):
+ """Test detecting new changes to Kconfig"""
+ base = self._base_dir
+ src = self._git_dir
+ config_dir = os.path.join(src, 'configs')
+ delay = 0.02
+
+ # Create a boards.cfg file
+ boards_cfg = os.path.join(base, 'boards.cfg')
+ content = b'''#
+# List of boards
+# Automatically generated by buildman/boards.py: don't edit
+#
+# Status, Arch, CPU, SoC, Vendor, Board, Target, Config, Maintainers
+
+Active aarch64 armv8 - armltd corstone1000 board0
+Active aarch64 armv8 - armltd total_compute board2
+'''
+ # Check missing file
+ self.assertFalse(boards.output_is_new(boards_cfg, config_dir, src))
+
+ # Check that the board.cfg file is newer
+ time.sleep(delay)
+ tools.write_file(boards_cfg, content)
+ self.assertTrue(boards.output_is_new(boards_cfg, config_dir, src))
+
+ # Touch the Kconfig files after a show delay to avoid a race
+ time.sleep(delay)
+ Path(os.path.join(src, 'Kconfig')).touch()
+ self.assertFalse(boards.output_is_new(boards_cfg, config_dir, src))
+ Path(boards_cfg).touch()
+ self.assertTrue(boards.output_is_new(boards_cfg, config_dir, src))
+
+ # Touch a different Kconfig file
+ time.sleep(delay)
+ Path(os.path.join(src, 'Kconfig.something')).touch()
+ self.assertFalse(boards.output_is_new(boards_cfg, config_dir, src))
+ Path(boards_cfg).touch()
+ self.assertTrue(boards.output_is_new(boards_cfg, config_dir, src))
+
+ # Touch a MAINTAINERS file
+ time.sleep(delay)
+ Path(os.path.join(src, 'MAINTAINERS')).touch()
+ self.assertFalse(boards.output_is_new(boards_cfg, config_dir, src))
+
+ Path(boards_cfg).touch()
+ self.assertTrue(boards.output_is_new(boards_cfg, config_dir, src))
+
+ # Touch a defconfig file
+ time.sleep(delay)
+ Path(os.path.join(config_dir, 'board0_defconfig')).touch()
+ self.assertFalse(boards.output_is_new(boards_cfg, config_dir, src))
+ Path(boards_cfg).touch()
+ self.assertTrue(boards.output_is_new(boards_cfg, config_dir, src))
+
+ # Remove a board and check that the board.cfg file is now older
+ Path(os.path.join(config_dir, 'board0_defconfig')).unlink()
+ self.assertFalse(boards.output_is_new(boards_cfg, config_dir, src))
+
+ def test_maintainers(self):
+ """Test detecting boards without a MAINTAINERS entry"""
+ src = self._git_dir
+ main = os.path.join(src, 'boards', 'board0', 'MAINTAINERS')
+ other = os.path.join(src, 'boards', 'board2', 'MAINTAINERS')
+ kc_file = os.path.join(src, 'Kconfig')
+ config_dir = os.path.join(src, 'configs')
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+
+ # There should be two boards no warnings
+ self.assertEquals(2, len(params_list))
+ self.assertFalse(warnings)
+
+ # Set an invalid status line in the file
+ orig_data = tools.read_file(main, binary=False)
+ lines = ['S: Other\n' if line.startswith('S:') else line
+ for line in orig_data.splitlines(keepends=True)]
+ tools.write_file(main, ''.join(lines), binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ params = params_list[0]
+ if params['target'] == 'board2':
+ params = params_list[1]
+ self.assertEquals('-', params['status'])
+ self.assertEquals(["WARNING: Other: unknown status for 'board0'"],
+ warnings)
+
+ # Remove the status line (S:) from a file
+ lines = [line for line in orig_data.splitlines(keepends=True)
+ if not line.startswith('S:')]
+ tools.write_file(main, ''.join(lines), binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(["WARNING: -: unknown status for 'board0'"], warnings)
+
+ # Remove the configs/ line (F:) from a file - this is the last line
+ data = ''.join(orig_data.splitlines(keepends=True)[:-1])
+ tools.write_file(main, data, binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(
+ ["WARNING: no maintainers for 'board0'",
+ 'WARNING: orphaned defconfig in boards/board0/MAINTAINERS ending at line 4',
+ ], warnings)
+
+ # Mark a board as orphaned - this should give a warning
+ lines = ['S: Orphaned' if line.startswith('S') else line
+ for line in orig_data.splitlines(keepends=True)]
+ tools.write_file(main, ''.join(lines), binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(["WARNING: no maintainers for 'board0'"], warnings)
+
+ # Change the maintainer to '-' - this should give a warning
+ lines = ['M: -' if line.startswith('M') else line
+ for line in orig_data.splitlines(keepends=True)]
+ tools.write_file(main, ''.join(lines), binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(["WARNING: -: unknown status for 'board0'"], warnings)
+
+ # Remove the maintainer line (M:) from a file
+ lines = [line for line in orig_data.splitlines(keepends=True)
+ if not line.startswith('M:')]
+ tools.write_file(main, ''.join(lines), binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(["WARNING: no maintainers for 'board0'"], warnings)
+
+ # Move the contents of the second file into this one, removing the
+ # second file, to check multiple records in a single file.
+ both_data = orig_data + tools.read_file(other, binary=False)
+ tools.write_file(main, both_data, binary=False)
+ os.remove(other)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertFalse(warnings)
+
+ # Add another record, this should be ignored with a warning
+ extra = '\n\nAnother\nM: Fred\nF: configs/board9_defconfig\nS: other\n'
+ tools.write_file(main, both_data + extra, binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(
+ ['WARNING: orphaned defconfig in boards/board0/MAINTAINERS ending at line 16'],
+ warnings)
+
+ # Add another TARGET to the Kconfig
+ tools.write_file(main, both_data, binary=False)
+ orig_kc_data = tools.read_file(kc_file)
+ extra = (b'''
+if TARGET_BOARD2
+config TARGET_OTHER
+\tbool "other"
+\tdefault y
+endif
+''')
+ tools.write_file(kc_file, orig_kc_data + extra)
+ params_list, warnings = self._boards.build_board_list(config_dir, src,
+ warn_targets=True)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(
+ ['WARNING: board2_defconfig: Duplicate TARGET_xxx: board2 and other'],
+ warnings)
+
+ # Remove the TARGET_BOARD0 Kconfig option
+ lines = [b'' if line == b'config TARGET_BOARD2\n' else line
+ for line in orig_kc_data.splitlines(keepends=True)]
+ tools.write_file(kc_file, b''.join(lines))
+ params_list, warnings = self._boards.build_board_list(config_dir, src,
+ warn_targets=True)
+ self.assertEquals(2, len(params_list))
+ self.assertEquals(
+ ['WARNING: board2_defconfig: No TARGET_BOARD2 enabled'],
+ warnings)
+ tools.write_file(kc_file, orig_kc_data)
+
+ # Replace the last F: line of board 2 with an N: line
+ data = ''.join(both_data.splitlines(keepends=True)[:-1])
+ tools.write_file(main, data + 'N: oa.*2\n', binary=False)
+ params_list, warnings = self._boards.build_board_list(config_dir, src)
+ self.assertEquals(2, len(params_list))
+ self.assertFalse(warnings)
+
+ def testRegenBoards(self):
+ """Test that we can regenerate the boards.cfg file"""
+ outfile = os.path.join(self._output_dir, 'test-boards.cfg')
+ if os.path.exists(outfile):
+ os.remove(outfile)
+ with test_util.capture_sys_output() as (stdout, stderr):
+ result = self._RunControl('-R', outfile, brds=None,
+ get_builder=False)
+ self.assertTrue(os.path.exists(outfile))
+
+ def test_print_prefix(self):
+ """Test that we can print the toolchain prefix"""
+ with test_util.capture_sys_output() as (stdout, stderr):
+ result = self._RunControl('-A', 'board0')
+ self.assertEqual('arm-\n', stdout.getvalue())
+ self.assertEqual('', stderr.getvalue())
+
+ def test_exclude_one(self):
+ """Test excluding a single board from an arch"""
+ self._RunControl('arm', '-x', 'board1')
+ self.assertEqual(['board0'],
+ [b.target for b in self._boards.get_selected()])
+
+ def test_exclude_arch(self):
+ """Test excluding an arch"""
+ self._RunControl('-x', 'arm')
+ self.assertEqual(['board2', 'board4'],
+ [b.target for b in self._boards.get_selected()])
+
+ def test_exclude_comma(self):
+ """Test excluding a comma-separated list of things"""
+ self._RunControl('-x', 'arm,powerpc')
+ self.assertEqual(['board4'],
+ [b.target for b in self._boards.get_selected()])
+
+ def test_exclude_list(self):
+ """Test excluding a list of things"""
+ self._RunControl('-x', 'board2', '-x' 'board4')
+ self.assertEqual(['board0', 'board1'],
+ [b.target for b in self._boards.get_selected()])
+
+ def test_single_boards(self):
+ """Test building single boards"""
+ self._RunControl('--boards', 'board1')
+ self.assertEqual(1, self._builder.count)
+
+ self._RunControl('--boards', 'board1', '--boards', 'board2')
+ self.assertEqual(2, self._builder.count)
+
+ self._RunControl('--boards', 'board1,board2', '--boards', 'board4')
+ self.assertEqual(3, self._builder.count)
+
+ def test_print_arch(self):
+ """Test that we can print the board architecture"""
+ with test_util.capture_sys_output() as (stdout, stderr):
+ result = self._RunControl('--print-arch', 'board0')
+ self.assertEqual('arm\n', stdout.getvalue())
+ self.assertEqual('', stderr.getvalue())
diff --git a/tools/buildman/main.py b/tools/buildman/main.py
index 5e1f68d..5f42a58 100755
--- a/tools/buildman/main.py
+++ b/tools/buildman/main.py
@@ -6,62 +6,91 @@
"""See README for more information"""
-import doctest
-import multiprocessing
+try:
+ from importlib.resources import files
+except ImportError:
+ # for Python 3.6
+ import importlib_resources
import os
-import re
import sys
# Bring in the patman libraries
+# pylint: disable=C0413
our_path = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(1, os.path.join(our_path, '..'))
# Our modules
-from buildman import board
from buildman import bsettings
-from buildman import builder
from buildman import cmdline
from buildman import control
-from buildman import toolchain
-from patman import patchstream
-from patman import gitutil
-from u_boot_pylib import terminal
from u_boot_pylib import test_util
+from u_boot_pylib import tools
-def RunTests(skip_net_tests, verboose, args):
+def run_tests(skip_net_tests, debug, verbose, args):
+ """Run the buildman tests
+
+ Args:
+ skip_net_tests (bool): True to skip tests which need the network
+ debug (bool): True to run in debugging mode (full traceback)
+ verbosity (int): Verbosity level to use (0-4)
+ args (list of str): List of tests to run, empty to run all
+ """
+ # These imports are here since tests are not available when buildman is
+ # installed as a Python module
+ # pylint: disable=C0415
from buildman import func_test
from buildman import test
- import doctest
- test_name = args and args[0] or None
+ test_name = args.terms and args.terms[0] or None
if skip_net_tests:
test.use_network = False
# Run the entry tests first ,since these need to be the first to import the
# 'entry' module.
result = test_util.run_test_suites(
- 'buildman', False, verboose, False, None, test_name, [],
+ 'buildman', debug, verbose, False, args.threads, test_name, [],
[test.TestBuild, func_test.TestFunctional,
'buildman.toolchain', 'patman.gitutil'])
return (0 if result.wasSuccessful() else 1)
+def run_test_coverage():
+ """Run the tests and check that we get 100% coverage"""
+ test_util.run_test_coverage(
+ 'tools/buildman/buildman', None,
+ ['tools/patman/*.py', 'tools/u_boot_pylib/*', '*test_fdt.py',
+ 'tools/buildman/kconfiglib.py', 'tools/buildman/*test*.py',
+ 'tools/buildman/main.py'],
+ '/tmp/b', single_thread='-T1')
+
+
def run_buildman():
- options, args = cmdline.ParseArgs()
+ """Run bulidman
- if not options.debug:
+ This is the main program. It collects arguments and runs either the tests or
+ the control module.
+ """
+ args = cmdline.parse_args()
+
+ if not args.debug:
sys.tracebacklimit = 0
# Run our meagre tests
- if cmdline.HAS_TESTS and options.test:
- RunTests(options.skip_net_tests, options.verbose, args)
+ if cmdline.HAS_TESTS and args.test:
+ return run_tests(args.skip_net_tests, args.debug, args.verbose, args)
+
+ elif cmdline.HAS_TESTS and args.coverage:
+ run_test_coverage()
+
+ elif args.full_help:
+ tools.print_full_help(str(files('buildman').joinpath('README.rst')))
# Build selected commits for selected boards
else:
- bsettings.Setup(options.config_file)
- ret_code = control.DoBuildman(options, args)
- sys.exit(ret_code)
+ bsettings.setup(args.config_file)
+ ret_code = control.do_buildman(args)
+ return ret_code
if __name__ == "__main__":
- run_buildman()
+ sys.exit(run_buildman())
diff --git a/tools/buildman/test.py b/tools/buildman/test.py
index 9fa6445..bdd3d84 100644
--- a/tools/buildman/test.py
+++ b/tools/buildman/test.py
@@ -138,8 +138,8 @@ class TestBuild(unittest.TestCase):
self.brds.select_boards([])
# Add some test settings
- bsettings.Setup(None)
- bsettings.AddFile(settings_data)
+ bsettings.setup(None)
+ bsettings.add_file(settings_data)
# Set up the toolchains
self.toolchains = toolchain.Toolchains()
@@ -208,8 +208,8 @@ class TestBuild(unittest.TestCase):
# Build the boards for the pre-defined commits and warnings/errors
# associated with each. This calls our Make() to inject the fake output.
- build.BuildBoards(self.commits, board_selected, keep_outputs=False,
- verbose=False)
+ build.build_boards(self.commits, board_selected, keep_outputs=False,
+ verbose=False)
lines = terminal.get_print_test_lines()
count = 0
for line in lines:
@@ -219,8 +219,8 @@ class TestBuild(unittest.TestCase):
# We should get two starting messages, an update for every commit built
# and a summary message
self.assertEqual(count, len(commits) * len(BOARDS) + 3)
- build.SetDisplayOptions(**kwdisplay_args);
- build.ShowSummary(self.commits, board_selected)
+ build.set_display_options(**kwdisplay_args);
+ build.show_summary(self.commits, board_selected)
if echo_lines:
terminal.echo_print_test_lines()
return iter(terminal.get_print_test_lines())
@@ -465,7 +465,7 @@ class TestBuild(unittest.TestCase):
options.show_errors = False
options.keep_outputs = False
args = ['tegra20']
- control.DoBuildman(options, args)
+ control.do_buildman(options, args)
def testBoardSingle(self):
"""Test single board selection"""
@@ -528,17 +528,17 @@ class TestBuild(unittest.TestCase):
'sandbox']),
({'all': ['board4'], 'sandbox': ['board4']}, []))
def CheckDirs(self, build, dirname):
- self.assertEqual('base%s' % dirname, build._GetOutputDir(1))
+ self.assertEqual('base%s' % dirname, build.get_output_dir(1))
self.assertEqual('base%s/fred' % dirname,
- build.GetBuildDir(1, 'fred'))
+ build.get_build_dir(1, 'fred'))
self.assertEqual('base%s/fred/done' % dirname,
- build.GetDoneFile(1, 'fred'))
+ build.get_done_file(1, 'fred'))
self.assertEqual('base%s/fred/u-boot.sizes' % dirname,
- build.GetFuncSizesFile(1, 'fred', 'u-boot'))
+ build.get_func_sizes_file(1, 'fred', 'u-boot'))
self.assertEqual('base%s/fred/u-boot.objdump' % dirname,
- build.GetObjdumpFile(1, 'fred', 'u-boot'))
+ build.get_objdump_file(1, 'fred', 'u-boot'))
self.assertEqual('base%s/fred/err' % dirname,
- build.GetErrFile(1, 'fred'))
+ build.get_err_file(1, 'fred'))
def testOutputDir(self):
build = builder.Builder(self.toolchains, BASE_DIR, None, 1, 2,
@@ -622,7 +622,7 @@ class TestBuild(unittest.TestCase):
build = builder.Builder(self.toolchains, base_dir, None, 1, 2)
build.commits = self.commits
build.commit_count = len(commits)
- result = set(build._GetOutputSpaceRemovals())
+ result = set(build._get_output_space_removals())
expected = set([os.path.join(base_dir, f) for f in to_remove])
self.assertEqual(expected, result)
diff --git a/tools/buildman/test/Kconfig b/tools/buildman/test/Kconfig
new file mode 100644
index 0000000..a87660c
--- /dev/null
+++ b/tools/buildman/test/Kconfig
@@ -0,0 +1,72 @@
+# Board properties
+config SYS_ARCH
+ string
+
+config SYS_CPU
+ string
+
+config SYS_SOC
+ string
+
+config SYS_VENDOR
+ string
+
+config SYS_BOARD
+ string
+
+config SYS_CONFIG_NAME
+ string
+
+
+# Available targets
+config TARGET_BOARD0
+ bool "board 9"
+
+config TARGET_BOARD2
+ bool "board 2"
+
+
+# Settings for each board
+if TARGET_BOARD0
+
+config SYS_ARCH
+ default "arm"
+
+config SYS_CPU
+ default "armv7"
+
+#config SYS_SOC
+# string
+
+config SYS_VENDOR
+ default "Tester"
+
+config SYS_BOARD
+ default "ARM Board 0"
+
+config SYS_CONFIG_NAME
+ default "config0"
+
+endif
+
+if TARGET_BOARD2
+
+config SYS_ARCH
+ default "powerpc"
+
+config SYS_CPU
+ default "ppc"
+
+config SYS_SOC
+ default "mpc85xx"
+
+config SYS_VENDOR
+ default "Tester"
+
+config SYS_BOARD
+ default "PowerPC board 1"
+
+config SYS_CONFIG_NAME
+ default "config2"
+
+endif
diff --git a/tools/buildman/test/boards/board0/MAINTAINERS b/tools/buildman/test/boards/board0/MAINTAINERS
new file mode 100644
index 0000000..08207ff
--- /dev/null
+++ b/tools/buildman/test/boards/board0/MAINTAINERS
@@ -0,0 +1,5 @@
+ARM Board 0
+M: Mary Mary <quite@contrary.org>
+S: Maintained
+F: boards/board0
+F: configs/board0_defconfig
diff --git a/tools/buildman/test/boards/board2/MAINTAINERS b/tools/buildman/test/boards/board2/MAINTAINERS
new file mode 100644
index 0000000..c154782
--- /dev/null
+++ b/tools/buildman/test/boards/board2/MAINTAINERS
@@ -0,0 +1,5 @@
+ARM Board 2
+M: Old Mother <hubbard@cupboard.org>
+S: Maintained
+F: boards/board2
+F: configs/board2_defconfig
diff --git a/tools/buildman/test/configs/board0_defconfig b/tools/buildman/test/configs/board0_defconfig
new file mode 100644
index 0000000..50e562e
--- /dev/null
+++ b/tools/buildman/test/configs/board0_defconfig
@@ -0,0 +1 @@
+CONFIG_TARGET_BOARD0=y
diff --git a/tools/buildman/test/configs/board2_defconfig b/tools/buildman/test/configs/board2_defconfig
new file mode 100644
index 0000000..8b76c0a
--- /dev/null
+++ b/tools/buildman/test/configs/board2_defconfig
@@ -0,0 +1 @@
+CONFIG_TARGET_BOARD2=y
diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py
index 1001b61..b050011 100644
--- a/tools/buildman/toolchain.py
+++ b/tools/buildman/toolchain.py
@@ -139,7 +139,7 @@ class Toolchain:
"""Get toolchain wrapper from the setting file.
"""
value = ''
- for name, value in bsettings.GetItems('toolchain-wrapper'):
+ for name, value in bsettings.get_items('toolchain-wrapper'):
if not value:
print("Warning: Wrapper not found")
if value:
@@ -249,7 +249,7 @@ class Toolchains:
self.prefixes = {}
self.paths = []
self.override_toolchain = override_toolchain
- self._make_flags = dict(bsettings.GetItems('make-flags'))
+ self._make_flags = dict(bsettings.get_items('make-flags'))
def GetPathList(self, show_warning=True):
"""Get a list of available toolchain paths
@@ -261,7 +261,7 @@ class Toolchains:
List of strings, each a path to a toolchain mentioned in the
[toolchain] section of the settings file.
"""
- toolchains = bsettings.GetItems('toolchain')
+ toolchains = bsettings.get_items('toolchain')
if show_warning and not toolchains:
print(("Warning: No tool chains. Please run 'buildman "
"--fetch-arch all' to download all available toolchains, or "
@@ -283,7 +283,7 @@ class Toolchains:
Args:
show_warning: True to show a warning if there are no tool chains.
"""
- self.prefixes = bsettings.GetItems('toolchain-prefix')
+ self.prefixes = bsettings.get_items('toolchain-prefix')
self.paths += self.GetPathList(show_warning)
def Add(self, fname, test=True, verbose=False, priority=PRIORITY_CALC,
@@ -399,7 +399,7 @@ class Toolchains:
returns:
toolchain object, or None if none found
"""
- for tag, value in bsettings.GetItems('toolchain-alias'):
+ for tag, value in bsettings.get_items('toolchain-alias'):
if arch == tag:
for alias in value.split():
if alias in self.toolchains:
@@ -421,7 +421,7 @@ class Toolchains:
Returns:
Resolved string
- >>> bsettings.Setup(None)
+ >>> bsettings.setup(None)
>>> tcs = Toolchains()
>>> tcs.Add('fred', False)
>>> var_dict = {'oblique' : 'OBLIQUE', 'first' : 'fi${second}rst', \
@@ -598,5 +598,5 @@ class Toolchains:
if not self.TestSettingsHasPath(dirpath):
print(("Adding 'download' to config file '%s'" %
bsettings.config_fname))
- bsettings.SetItem('toolchain', 'download', '%s/*/*' % dest)
+ bsettings.set_item('toolchain', 'download', '%s/*/*' % dest)
return 0
diff --git a/tools/moveconfig.py b/tools/moveconfig.py
index c4d72ed..6cbecc3 100755
--- a/tools/moveconfig.py
+++ b/tools/moveconfig.py
@@ -2037,7 +2037,7 @@ doc/develop/moveconfig.rst for documentation.'''
if not args.cleanup_headers_only:
check_clean_directory()
- bsettings.Setup('')
+ bsettings.setup('')
toolchains = toolchain.Toolchains()
toolchains.GetSettings()
toolchains.Scan(verbose=False)
diff --git a/tools/u_boot_pylib/test_util.py b/tools/u_boot_pylib/test_util.py
index e7564e1..f18d385 100644
--- a/tools/u_boot_pylib/test_util.py
+++ b/tools/u_boot_pylib/test_util.py
@@ -24,7 +24,7 @@ except:
def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None,
- extra_args=None):
+ extra_args=None, single_thread='-P1'):
"""Run tests and check that we get 100% coverage
Args:
@@ -39,6 +39,9 @@ def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None
required: List of modules which must be in the coverage report
extra_args (str): Extra arguments to pass to the tool before the -t/test
arg
+ single_thread (str): Argument string to make the tests run
+ single-threaded. This is necessary to get proper coverage results.
+ The default is '-P0'
Raises:
ValueError if the code coverage is not 100%
@@ -58,8 +61,9 @@ def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None
if build_dir:
prefix = 'PYTHONPATH=$PYTHONPATH:%s/sandbox_spl/tools ' % build_dir
cmd = ('%spython3-coverage run '
- '--omit "%s" %s %s %s -P1' % (prefix, ','.join(glob_list),
- prog, extra_args or '', test_cmd))
+ '--omit "%s" %s %s %s %s' % (prefix, ','.join(glob_list),
+ prog, extra_args or '', test_cmd,
+ single_thread or '-P1'))
os.system(cmd)
stdout = command.output('python3-coverage', 'report')
lines = stdout.splitlines()