aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTom Rini <trini@konsulko.com>2023-07-12 13:10:37 -0400
committerTom Rini <trini@konsulko.com>2023-07-12 13:10:37 -0400
commit5da16b9ea1755712b70bb28d04add0f02c51407b (patch)
tree044bc09f5671e799985135539935fcae1ecd2ba5
parentbf5152d0108683bbaabf9d7a7988f61649fc33f4 (diff)
parent45aa7ac492d90ddc2977ea6c9d161e7cc3333487 (diff)
downloadu-boot-WIP/12Jul2023.zip
u-boot-WIP/12Jul2023.tar.gz
u-boot-WIP/12Jul2023.tar.bz2
Merge tag 'dm-pull-12jul23a' of https://source.denx.de/u-boot/custodians/u-boot-dmWIP/12Jul2023
misc fixes buildman refactoring (no functional change)
-rw-r--r--.azure-pipelines.yml2
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--arch/sandbox/include/asm/sdl.h23
-rw-r--r--arch/sandbox/include/asm/test.h25
-rw-r--r--boot/bootmeth_script.c5
-rw-r--r--cmd/load.c16
-rw-r--r--drivers/core/of_access.c5
-rw-r--r--drivers/core/read.c5
-rw-r--r--drivers/reset/reset-rockchip.c2
-rw-r--r--include/dm/read.h12
-rw-r--r--test/dm/video.c1
-rw-r--r--tools/buildman/bsettings.py14
-rw-r--r--tools/buildman/builder.py262
-rw-r--r--tools/buildman/builderthread.py652
-rw-r--r--tools/buildman/buildman.rst9
-rw-r--r--tools/buildman/cmdline.py172
-rw-r--r--tools/buildman/control.py778
-rw-r--r--tools/buildman/func_test.py76
-rwxr-xr-xtools/buildman/main.py66
-rw-r--r--tools/buildman/test.py28
-rw-r--r--tools/buildman/toolchain.py14
-rwxr-xr-xtools/moveconfig.py2
-rw-r--r--tools/u_boot_pylib/pyproject.toml6
-rw-r--r--tools/u_boot_pylib/test_util.py10
24 files changed, 1360 insertions, 827 deletions
diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml
index 06c46b6..c27607a 100644
--- a/.azure-pipelines.yml
+++ b/.azure-pipelines.yml
@@ -123,7 +123,7 @@ stages:
options: $(container_option)
steps:
- script: |
- ./tools/buildman/buildman -R
+ ./tools/buildman/buildman -R -
- job: tools_only
displayName: 'Ensure host tools build'
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index cfd5851..9944a74 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -184,7 +184,7 @@ sloccount:
Check for configs without MAINTAINERS entry:
stage: testsuites
script:
- - ./tools/buildman/buildman -R
+ - ./tools/buildman/buildman -R -
# Ensure host tools build
Build tools-only:
diff --git a/arch/sandbox/include/asm/sdl.h b/arch/sandbox/include/asm/sdl.h
index 56dcb84..ee4991f 100644
--- a/arch/sandbox/include/asm/sdl.h
+++ b/arch/sandbox/include/asm/sdl.h
@@ -7,6 +7,7 @@
#define __SANDBOX_SDL_H
#include <errno.h>
+#include <video.h>
#ifdef CONFIG_SANDBOX_SDL
@@ -87,6 +88,22 @@ int sandbox_sdl_sound_stop(void);
*/
int sandbox_sdl_sound_init(int rate, int channels);
+/**
+ * sandbox_sdl_set_bpp() - Set the depth of the sandbox display
+ *
+ * The device must not be active when this function is called. It activiates it
+ * before returning.
+ *
+ * This updates the depth value and adjusts a few other settings accordingly.
+ * It must be called before the display is probed.
+ *
+ * @dev: Device to adjust
+ * @l2bpp: depth to set
+ * Return: 0 if the device was already active, other error if it fails to probe
+ * after the change
+ */
+int sandbox_sdl_set_bpp(struct udevice *dev, enum video_log2_bpp l2bpp);
+
#else
static inline int sandbox_sdl_init_display(int width, int height, int log2_bpp,
bool double_size)
@@ -134,6 +151,12 @@ static inline int sandbox_sdl_sound_init(int rate, int channels)
return -ENODEV;
}
+static inline int sandbox_sdl_set_bpp(struct udevice *dev,
+ enum video_log2_bpp l2bpp)
+{
+ return -ENOSYS;
+}
+
#endif
#endif
diff --git a/arch/sandbox/include/asm/test.h b/arch/sandbox/include/asm/test.h
index e482271..17159f8 100644
--- a/arch/sandbox/include/asm/test.h
+++ b/arch/sandbox/include/asm/test.h
@@ -8,7 +8,6 @@
#ifndef __ASM_TEST_H
#define __ASM_TEST_H
-#include <video.h>
#include <pci_ids.h>
struct unit_test_state;
@@ -300,30 +299,6 @@ void sandbox_cros_ec_set_test_flags(struct udevice *dev, uint flags);
*/
int sandbox_cros_ec_get_pwm_duty(struct udevice *dev, uint index, uint *duty);
-#if IS_ENABLED(CONFIG_SANDBOX_SDL)
-/**
- * sandbox_sdl_set_bpp() - Set the depth of the sandbox display
- *
- * The device must not be active when this function is called. It activiates it
- * before returning.
- *
- * This updates the depth value and adjusts a few other settings accordingly.
- * It must be called before the display is probed.
- *
- * @dev: Device to adjust
- * @l2bpp: depth to set
- * Return: 0 if the device was already active, other error if it fails to probe
- * after the change
- */
-int sandbox_sdl_set_bpp(struct udevice *dev, enum video_log2_bpp l2bpp);
-#else
-static inline int sandbox_sdl_set_bpp(struct udevice *dev,
- enum video_log2_bpp l2bpp)
-{
- return -ENOSYS;
-}
-#endif
-
/**
* sandbox_set_fake_efi_mgr_dev() - Control EFI bootmgr producing valid bootflow
*
diff --git a/boot/bootmeth_script.c b/boot/bootmeth_script.c
index 225eb18..a4050c3 100644
--- a/boot/bootmeth_script.c
+++ b/boot/bootmeth_script.c
@@ -190,7 +190,10 @@ static int script_boot(struct udevice *dev, struct bootflow *bflow)
ulong addr;
int ret;
- ret = env_set("devtype", blk_get_devtype(bflow->blk));
+ if (desc->uclass_id == UCLASS_USB)
+ ret = env_set("devtype", "usb");
+ else
+ ret = env_set("devtype", blk_get_devtype(bflow->blk));
if (!ret)
ret = env_set_hex("devnum", desc->devnum);
if (!ret)
diff --git a/cmd/load.c b/cmd/load.c
index 5c4f347..2715cf5 100644
--- a/cmd/load.c
+++ b/cmd/load.c
@@ -181,13 +181,17 @@ static ulong load_serial(long offset)
} else
#endif
{
+ void *dst;
+
ret = lmb_reserve(&lmb, store_addr, binlen);
if (ret) {
printf("\nCannot overwrite reserved area (%08lx..%08lx)\n",
store_addr, store_addr + binlen);
return ret;
}
- memcpy((char *)(store_addr), binbuf, binlen);
+ dst = map_sysmem(store_addr, binlen);
+ memcpy(dst, binbuf, binlen);
+ unmap_sysmem(dst);
lmb_free(&lmb, store_addr, binlen);
}
if ((store_addr) < start_addr)
@@ -350,15 +354,19 @@ static int save_serial(ulong address, ulong count)
if(write_record(SREC3_START)) /* write the header */
return (-1);
do {
- if(count) { /* collect hex data in the buffer */
- c = *(volatile uchar*)(address + reclen); /* get one byte */
- checksum += c; /* accumulate checksum */
+ volatile uchar *src;
+
+ src = map_sysmem(address, count);
+ if (count) { /* collect hex data in the buffer */
+ c = src[reclen]; /* get one byte */
+ checksum += c; /* accumulate checksum */
data[2*reclen] = hex[(c>>4)&0x0f];
data[2*reclen+1] = hex[c & 0x0f];
data[2*reclen+2] = '\0';
++reclen;
--count;
}
+ unmap_sysmem((void *)src);
if(reclen == SREC_BYTES_PER_RECORD || count == 0) {
/* enough data collected for one record: dump it */
if(reclen) { /* build & write a data record: */
diff --git a/drivers/core/of_access.c b/drivers/core/of_access.c
index 81a3079..57f1044 100644
--- a/drivers/core/of_access.c
+++ b/drivers/core/of_access.c
@@ -593,11 +593,14 @@ int of_read_u64(const struct device_node *np, const char *propname, u64 *outp)
int of_property_match_string(const struct device_node *np, const char *propname,
const char *string)
{
- const struct property *prop = of_find_property(np, propname, NULL);
+ int len = 0;
+ const struct property *prop = of_find_property(np, propname, &len);
size_t l;
int i;
const char *p, *end;
+ if (!prop && len == -FDT_ERR_NOTFOUND)
+ return -ENOENT;
if (!prop)
return -EINVAL;
if (!prop->value)
diff --git a/drivers/core/read.c b/drivers/core/read.c
index 0289a2e..5749473 100644
--- a/drivers/core/read.c
+++ b/drivers/core/read.c
@@ -211,10 +211,9 @@ void *dev_remap_addr(const struct udevice *dev)
return dev_remap_addr_index(dev, 0);
}
-fdt_addr_t dev_read_addr_size(const struct udevice *dev, const char *property,
- fdt_size_t *sizep)
+fdt_addr_t dev_read_addr_size(const struct udevice *dev, fdt_size_t *sizep)
{
- return ofnode_get_addr_size(dev_ofnode(dev), property, sizep);
+ return dev_read_addr_size_index(dev, 0, sizep);
}
const char *dev_read_name(const struct udevice *dev)
diff --git a/drivers/reset/reset-rockchip.c b/drivers/reset/reset-rockchip.c
index 2ebe338..6cabaa1 100644
--- a/drivers/reset/reset-rockchip.c
+++ b/drivers/reset/reset-rockchip.c
@@ -97,7 +97,7 @@ static int rockchip_reset_probe(struct udevice *dev)
fdt_addr_t addr;
fdt_size_t size;
- addr = dev_read_addr_size(dev, "reg", &size);
+ addr = dev_read_addr_size(dev, &size);
if (addr == FDT_ADDR_T_NONE)
return -EINVAL;
diff --git a/include/dm/read.h b/include/dm/read.h
index 56ac076..137f2a5 100644
--- a/include/dm/read.h
+++ b/include/dm/read.h
@@ -347,18 +347,13 @@ fdt_addr_t dev_read_addr_pci(const struct udevice *dev);
void *dev_remap_addr(const struct udevice *dev);
/**
- * dev_read_addr_size() - get address and size from a device property
- *
- * This does no address translation. It simply reads an property that contains
- * an address and a size value, one after the other.
+ * dev_read_addr_size() - Get the reg property of a device
*
* @dev: Device to read from
- * @propname: property to read
* @sizep: place to put size value (on success)
* Return: address value, or FDT_ADDR_T_NONE on error
*/
-fdt_addr_t dev_read_addr_size(const struct udevice *dev, const char *propname,
- fdt_size_t *sizep);
+fdt_addr_t dev_read_addr_size(const struct udevice *dev, fdt_size_t *sizep);
/**
* dev_read_name() - get the name of a device's node
@@ -1002,10 +997,9 @@ static inline void *dev_remap_addr_name(const struct udevice *dev,
}
static inline fdt_addr_t dev_read_addr_size(const struct udevice *dev,
- const char *propname,
fdt_size_t *sizep)
{
- return ofnode_get_addr_size(dev_ofnode(dev), propname, sizep);
+ return dev_read_addr_size_index(dev, 0, sizep);
}
static inline const char *dev_read_name(const struct udevice *dev)
diff --git a/test/dm/video.c b/test/dm/video.c
index 3077815..1c63d16 100644
--- a/test/dm/video.c
+++ b/test/dm/video.c
@@ -15,6 +15,7 @@
#include <video.h>
#include <video_console.h>
#include <asm/test.h>
+#include <asm/sdl.h>
#include <dm/test.h>
#include <dm/uclass-internal.h>
#include <test/test.h>
diff --git a/tools/buildman/bsettings.py b/tools/buildman/bsettings.py
index 0eb894a..612ec0c 100644
--- a/tools/buildman/bsettings.py
+++ b/tools/buildman/bsettings.py
@@ -7,7 +7,7 @@ import io
config_fname = None
-def Setup(fname=''):
+def setup(fname=''):
"""Set up the buildman settings module by reading config files
Args:
@@ -23,15 +23,15 @@ def Setup(fname=''):
config_fname = '%s/.buildman' % os.getenv('HOME')
if not os.path.exists(config_fname):
print('No config file found ~/.buildman\nCreating one...\n')
- CreateBuildmanConfigFile(config_fname)
+ create_buildman_config_file(config_fname)
print('To install tool chains, please use the --fetch-arch option')
if config_fname:
settings.read(config_fname)
-def AddFile(data):
+def add_file(data):
settings.readfp(io.StringIO(data))
-def GetItems(section):
+def get_items(section):
"""Get the items from a section of the config.
Args:
@@ -47,7 +47,7 @@ def GetItems(section):
except:
raise
-def GetGlobalItemValue(name):
+def get_global_item_value(name):
"""Get an item from the 'global' section of the config.
Args:
@@ -58,7 +58,7 @@ def GetGlobalItemValue(name):
"""
return settings.get('global', name, fallback=None)
-def SetItem(section, tag, value):
+def set_item(section, tag, value):
"""Set an item and write it back to the settings file"""
global settings
global config_fname
@@ -68,7 +68,7 @@ def SetItem(section, tag, value):
with open(config_fname, 'w') as fd:
settings.write(fd)
-def CreateBuildmanConfigFile(config_fname):
+def create_buildman_config_file(config_fname):
"""Creates a new config file with no tool chain information.
Args:
diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py
index d81752e..ecbd368 100644
--- a/tools/buildman/builder.py
+++ b/tools/buildman/builder.py
@@ -134,7 +134,7 @@ class Config:
for fname in config_filename:
self.config[fname] = {}
- def Add(self, fname, key, value):
+ def add(self, fname, key, value):
self.config[fname][key] = value
def __hash__(self):
@@ -151,7 +151,7 @@ class Environment:
self.target = target
self.environment = {}
- def Add(self, key, value):
+ def add(self, key, value):
self.environment[key] = value
class Builder:
@@ -163,7 +163,8 @@ class Builder:
checkout: True to check out source, False to skip that step.
This is used for testing.
col: terminal.Color() object
- count: Number of commits to build
+ count: Total number of commits to build, which is the number of commits
+ multiplied by the number of boards
do_make: Method to call to invoke Make
fail: Number of builds that failed due to error
force_build: Force building even if a build already exists
@@ -255,7 +256,10 @@ class Builder:
config_only=False, squash_config_y=False,
warnings_as_errors=False, work_in_output=False,
test_thread_exceptions=False, adjust_cfg=None,
- allow_missing=False, no_lto=False, reproducible_builds=False):
+ allow_missing=False, no_lto=False, reproducible_builds=False,
+ force_build=False, force_build_failures=False,
+ force_reconfig=False, in_tree=False,
+ force_config_on_failure=False, make_func=None):
"""Create a new Builder object
Args:
@@ -295,7 +299,14 @@ class Builder:
a string Kconfig
allow_missing: Run build with BINMAN_ALLOW_MISSING=1
no_lto (bool): True to set the NO_LTO flag when building
-
+ force_build (bool): Rebuild even commits that are already built
+ force_build_failures (bool): Rebuild commits that have not been
+ built, or failed to build
+ force_reconfig (bool): Reconfigure on each commit
+ in_tree (bool): Bulid in tree instead of out-of-tree
+ force_config_on_failure (bool): Reconfigure the build before
+ retrying a failed build
+ make_func (function): Function to call to run 'make'
"""
self.toolchains = toolchains
self.base_dir = base_dir
@@ -304,7 +315,7 @@ class Builder:
else:
self._working_dir = os.path.join(base_dir, '.bm-work')
self.threads = []
- self.do_make = self.Make
+ self.do_make = make_func or self.make
self.gnu_make = gnu_make
self.checkout = checkout
self.num_threads = num_threads
@@ -318,11 +329,7 @@ class Builder:
self._complete_delay = None
self._next_delay_update = datetime.now()
self._start_time = datetime.now()
- self.force_config_on_failure = True
- self.force_build_failures = False
- self.force_reconfig = False
self._step = step
- self.in_tree = False
self._error_lines = 0
self.no_subdirs = no_subdirs
self.full_path = full_path
@@ -336,6 +343,11 @@ class Builder:
self._ide = False
self.no_lto = no_lto
self.reproducible_builds = reproducible_builds
+ self.force_build = force_build
+ self.force_build_failures = force_build_failures
+ self.force_reconfig = force_reconfig
+ self.in_tree = in_tree
+ self.force_config_on_failure = force_config_on_failure
if not self.squash_config_y:
self.config_filenames += EXTRA_CONFIG_FILENAMES
@@ -389,7 +401,7 @@ class Builder:
def signal_handler(self, signal, frame):
sys.exit(1)
- def SetDisplayOptions(self, show_errors=False, show_sizes=False,
+ def set_display_options(self, show_errors=False, show_sizes=False,
show_detail=False, show_bloat=False,
list_error_boards=False, show_config=False,
show_environment=False, filter_dtb_warnings=False,
@@ -422,7 +434,7 @@ class Builder:
self._filter_migration_warnings = filter_migration_warnings
self._ide = ide
- def _AddTimestamp(self):
+ def _add_timestamp(self):
"""Add a new timestamp to the list and record the build period.
The build period is the length of time taken to perform a single
@@ -451,14 +463,14 @@ class Builder:
self._timestamps.popleft()
count -= 1
- def SelectCommit(self, commit, checkout=True):
+ def select_commit(self, commit, checkout=True):
"""Checkout the selected commit for this build
"""
self.commit = commit
if checkout and self.checkout:
gitutil.checkout(commit.hash)
- def Make(self, commit, brd, stage, cwd, *args, **kwargs):
+ def make(self, commit, brd, stage, cwd, *args, **kwargs):
"""Run make
Args:
@@ -503,7 +515,7 @@ class Builder:
result.combined = '%s\n' % (' '.join(cmd)) + result.combined
return result
- def ProcessResult(self, result):
+ def process_result(self, result):
"""Process the result of a build, showing progress information
Args:
@@ -524,8 +536,8 @@ class Builder:
if self._verbose:
terminal.print_clear()
boards_selected = {target : result.brd}
- self.ResetResultSummary(boards_selected)
- self.ProduceResultSummary(result.commit_upto, self.commits,
+ self.reset_result_summary(boards_selected)
+ self.produce_result_summary(result.commit_upto, self.commits,
boards_selected)
else:
target = '(starting)'
@@ -544,7 +556,7 @@ class Builder:
line += ' ' * 8
# Add our current completion time estimate
- self._AddTimestamp()
+ self._add_timestamp()
if self._complete_delay:
line += '%s : ' % self._complete_delay
@@ -553,7 +565,7 @@ class Builder:
terminal.print_clear()
tprint(line, newline=False, limit_to_line=True)
- def _GetOutputDir(self, commit_upto):
+ def get_output_dir(self, commit_upto):
"""Get the name of the output directory for a commit number
The output directory is typically .../<branch>/<commit>.
@@ -568,7 +580,7 @@ class Builder:
if self.commits:
commit = self.commits[commit_upto]
subject = commit.subject.translate(trans_valid_chars)
- # See _GetOutputSpaceRemovals() which parses this name
+ # See _get_output_space_removals() which parses this name
commit_dir = ('%02d_g%s_%s' % (commit_upto + 1,
commit.hash, subject[:20]))
elif not self.no_subdirs:
@@ -577,7 +589,7 @@ class Builder:
return self.base_dir
return os.path.join(self.base_dir, commit_dir)
- def GetBuildDir(self, commit_upto, target):
+ def get_build_dir(self, commit_upto, target):
"""Get the name of the build directory for a commit number
The build directory is typically .../<branch>/<commit>/<target>.
@@ -586,30 +598,30 @@ class Builder:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- output_dir = self._GetOutputDir(commit_upto)
+ output_dir = self.get_output_dir(commit_upto)
if self.work_in_output:
return output_dir
return os.path.join(output_dir, target)
- def GetDoneFile(self, commit_upto, target):
+ def get_done_file(self, commit_upto, target):
"""Get the name of the done file for a commit number
Args:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- return os.path.join(self.GetBuildDir(commit_upto, target), 'done')
+ return os.path.join(self.get_build_dir(commit_upto, target), 'done')
- def GetSizesFile(self, commit_upto, target):
+ def get_sizes_file(self, commit_upto, target):
"""Get the name of the sizes file for a commit number
Args:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- return os.path.join(self.GetBuildDir(commit_upto, target), 'sizes')
+ return os.path.join(self.get_build_dir(commit_upto, target), 'sizes')
- def GetFuncSizesFile(self, commit_upto, target, elf_fname):
+ def get_func_sizes_file(self, commit_upto, target, elf_fname):
"""Get the name of the funcsizes file for a commit number and ELF file
Args:
@@ -617,10 +629,10 @@ class Builder:
target: Target name
elf_fname: Filename of elf image
"""
- return os.path.join(self.GetBuildDir(commit_upto, target),
+ return os.path.join(self.get_build_dir(commit_upto, target),
'%s.sizes' % elf_fname.replace('/', '-'))
- def GetObjdumpFile(self, commit_upto, target, elf_fname):
+ def get_objdump_file(self, commit_upto, target, elf_fname):
"""Get the name of the objdump file for a commit number and ELF file
Args:
@@ -628,20 +640,20 @@ class Builder:
target: Target name
elf_fname: Filename of elf image
"""
- return os.path.join(self.GetBuildDir(commit_upto, target),
+ return os.path.join(self.get_build_dir(commit_upto, target),
'%s.objdump' % elf_fname.replace('/', '-'))
- def GetErrFile(self, commit_upto, target):
+ def get_err_file(self, commit_upto, target):
"""Get the name of the err file for a commit number
Args:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
"""
- output_dir = self.GetBuildDir(commit_upto, target)
+ output_dir = self.get_build_dir(commit_upto, target)
return os.path.join(output_dir, 'err')
- def FilterErrors(self, lines):
+ def filter_errors(self, lines):
"""Filter out errors in which we have no interest
We should probably use map().
@@ -664,7 +676,7 @@ class Builder:
out_lines.append(line)
return out_lines
- def ReadFuncSizes(self, fname, fd):
+ def read_func_sizes(self, fname, fd):
"""Read function sizes from the output of 'nm'
Args:
@@ -688,7 +700,7 @@ class Builder:
sym[name] = sym.get(name, 0) + int(size, 16)
return sym
- def _ProcessConfig(self, fname):
+ def _process_config(self, fname):
"""Read in a .config, autoconf.mk or autoconf.h file
This function handles all config file types. It ignores comments and
@@ -725,7 +737,7 @@ class Builder:
config[key] = value
return config
- def _ProcessEnvironment(self, fname):
+ def _process_environment(self, fname):
"""Read in a uboot.env file
This function reads in environment variables from a file.
@@ -750,7 +762,7 @@ class Builder:
pass
return environment
- def GetBuildOutcome(self, commit_upto, target, read_func_sizes,
+ def get_build_outcome(self, commit_upto, target, read_func_sizes,
read_config, read_environment):
"""Work out the outcome of a build.
@@ -764,8 +776,8 @@ class Builder:
Returns:
Outcome object
"""
- done_file = self.GetDoneFile(commit_upto, target)
- sizes_file = self.GetSizesFile(commit_upto, target)
+ done_file = self.get_done_file(commit_upto, target)
+ sizes_file = self.get_sizes_file(commit_upto, target)
sizes = {}
func_sizes = {}
config = {}
@@ -779,10 +791,10 @@ class Builder:
# Try a rebuild
return_code = 1
err_lines = []
- err_file = self.GetErrFile(commit_upto, target)
+ err_file = self.get_err_file(commit_upto, target)
if os.path.exists(err_file):
with open(err_file, 'r') as fd:
- err_lines = self.FilterErrors(fd.readlines())
+ err_lines = self.filter_errors(fd.readlines())
# Decide whether the build was ok, failed or created warnings
if return_code:
@@ -811,30 +823,30 @@ class Builder:
sizes[values[5]] = size_dict
if read_func_sizes:
- pattern = self.GetFuncSizesFile(commit_upto, target, '*')
+ pattern = self.get_func_sizes_file(commit_upto, target, '*')
for fname in glob.glob(pattern):
with open(fname, 'r') as fd:
dict_name = os.path.basename(fname).replace('.sizes',
'')
- func_sizes[dict_name] = self.ReadFuncSizes(fname, fd)
+ func_sizes[dict_name] = self.read_func_sizes(fname, fd)
if read_config:
- output_dir = self.GetBuildDir(commit_upto, target)
+ output_dir = self.get_build_dir(commit_upto, target)
for name in self.config_filenames:
fname = os.path.join(output_dir, name)
- config[name] = self._ProcessConfig(fname)
+ config[name] = self._process_config(fname)
if read_environment:
- output_dir = self.GetBuildDir(commit_upto, target)
+ output_dir = self.get_build_dir(commit_upto, target)
fname = os.path.join(output_dir, 'uboot.env')
- environment = self._ProcessEnvironment(fname)
+ environment = self._process_environment(fname)
return Builder.Outcome(rc, err_lines, sizes, func_sizes, config,
environment)
return Builder.Outcome(OUTCOME_UNKNOWN, [], {}, {}, {}, {})
- def GetResultSummary(self, boards_selected, commit_upto, read_func_sizes,
+ def get_result_summary(self, boards_selected, commit_upto, read_func_sizes,
read_config, read_environment):
"""Calculate a summary of the results of building a commit.
@@ -865,7 +877,7 @@ class Builder:
key: environment variable
value: value of environment variable
"""
- def AddLine(lines_summary, lines_boards, line, board):
+ def add_line(lines_summary, lines_boards, line, board):
line = line.rstrip()
if line in lines_boards:
lines_boards[line].append(board)
@@ -882,7 +894,7 @@ class Builder:
environment = {}
for brd in boards_selected.values():
- outcome = self.GetBuildOutcome(commit_upto, brd.target,
+ outcome = self.get_build_outcome(commit_upto, brd.target,
read_func_sizes, read_config,
read_environment)
board_dict[brd.target] = outcome
@@ -899,15 +911,15 @@ class Builder:
is_note = self._re_note.match(line)
if is_warning or (last_was_warning and is_note):
if last_func:
- AddLine(warn_lines_summary, warn_lines_boards,
+ add_line(warn_lines_summary, warn_lines_boards,
last_func, brd)
- AddLine(warn_lines_summary, warn_lines_boards,
+ add_line(warn_lines_summary, warn_lines_boards,
line, brd)
else:
if last_func:
- AddLine(err_lines_summary, err_lines_boards,
+ add_line(err_lines_summary, err_lines_boards,
last_func, brd)
- AddLine(err_lines_summary, err_lines_boards,
+ add_line(err_lines_summary, err_lines_boards,
line, brd)
last_was_warning = is_warning
last_func = None
@@ -915,19 +927,19 @@ class Builder:
for fname in self.config_filenames:
if outcome.config:
for key, value in outcome.config[fname].items():
- tconfig.Add(fname, key, value)
+ tconfig.add(fname, key, value)
config[brd.target] = tconfig
tenvironment = Environment(brd.target)
if outcome.environment:
for key, value in outcome.environment.items():
- tenvironment.Add(key, value)
+ tenvironment.add(key, value)
environment[brd.target] = tenvironment
return (board_dict, err_lines_summary, err_lines_boards,
warn_lines_summary, warn_lines_boards, config, environment)
- def AddOutcome(self, board_dict, arch_list, changes, char, color):
+ def add_outcome(self, board_dict, arch_list, changes, char, color):
"""Add an output to our list of outcomes for each architecture
This simple function adds failing boards (changes) to the
@@ -957,19 +969,19 @@ class Builder:
arch_list[arch] += str
- def ColourNum(self, num):
+ def colour_num(self, num):
color = self.col.RED if num > 0 else self.col.GREEN
if num == 0:
return '0'
return self.col.build(color, str(num))
- def ResetResultSummary(self, board_selected):
+ def reset_result_summary(self, board_selected):
"""Reset the results summary ready for use.
Set up the base board list to be all those selected, and set the
error lines to empty.
- Following this, calls to PrintResultSummary() will use this
+ Following this, calls to print_result_summary() will use this
information to work out what has changed.
Args:
@@ -986,7 +998,7 @@ class Builder:
self._base_config = None
self._base_environment = None
- def PrintFuncSizeDetail(self, fname, old, new):
+ def print_func_size_detail(self, fname, old, new):
grow, shrink, add, remove, up, down = 0, 0, 0, 0, 0, 0
delta, common = [], {}
@@ -1020,7 +1032,7 @@ class Builder:
args = [add, -remove, grow, -shrink, up, -down, up - down]
if max(args) == 0 and min(args) == 0:
return
- args = [self.ColourNum(x) for x in args]
+ args = [self.colour_num(x) for x in args]
indent = ' ' * 15
tprint('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' %
tuple([indent, self.col.build(self.col.YELLOW, fname)] + args))
@@ -1034,7 +1046,7 @@ class Builder:
tprint(msg, colour=color)
- def PrintSizeDetail(self, target_list, show_bloat):
+ def print_size_detail(self, target_list, show_bloat):
"""Show details size information for each board
Args:
@@ -1067,12 +1079,12 @@ class Builder:
outcome = result['_outcome']
base_outcome = self._base_board_dict[target]
for fname in outcome.func_sizes:
- self.PrintFuncSizeDetail(fname,
+ self.print_func_size_detail(fname,
base_outcome.func_sizes[fname],
outcome.func_sizes[fname])
- def PrintSizeSummary(self, board_selected, board_dict, show_detail,
+ def print_size_summary(self, board_selected, board_dict, show_detail,
show_bloat):
"""Print a summary of image sizes broken down by section.
@@ -1173,10 +1185,10 @@ class Builder:
if printed_arch:
tprint()
if show_detail:
- self.PrintSizeDetail(target_list, show_bloat)
+ self.print_size_detail(target_list, show_bloat)
- def PrintResultSummary(self, board_selected, board_dict, err_lines,
+ def print_result_summary(self, board_selected, board_dict, err_lines,
err_line_boards, warn_lines, warn_line_boards,
config, environment, show_sizes, show_detail,
show_bloat, show_config, show_environment):
@@ -1212,7 +1224,7 @@ class Builder:
show_config: Show config changes
show_environment: Show environment changes
"""
- def _BoardList(line, line_boards):
+ def _board_list(line, line_boards):
"""Helper function to get a line of boards containing a line
Args:
@@ -1231,7 +1243,7 @@ class Builder:
board_set.add(brd)
return brds
- def _CalcErrorDelta(base_lines, base_line_boards, lines, line_boards,
+ def _calc_error_delta(base_lines, base_line_boards, lines, line_boards,
char):
"""Calculate the required output based on changes in errors
@@ -1255,17 +1267,17 @@ class Builder:
worse_lines = []
for line in lines:
if line not in base_lines:
- errline = ErrLine(char + '+', _BoardList(line, line_boards),
+ errline = ErrLine(char + '+', _board_list(line, line_boards),
line)
worse_lines.append(errline)
for line in base_lines:
if line not in lines:
errline = ErrLine(char + '-',
- _BoardList(line, base_line_boards), line)
+ _board_list(line, base_line_boards), line)
better_lines.append(errline)
return better_lines, worse_lines
- def _CalcConfig(delta, name, config):
+ def _calc_config(delta, name, config):
"""Calculate configuration changes
Args:
@@ -1283,7 +1295,7 @@ class Builder:
out += '%s=%s ' % (key, config[key])
return '%s %s: %s' % (delta, name, out)
- def _AddConfig(lines, name, config_plus, config_minus, config_change):
+ def _add_config(lines, name, config_plus, config_minus, config_change):
"""Add changes in configuration to a list
Args:
@@ -1300,13 +1312,13 @@ class Builder:
value: config value
"""
if config_plus:
- lines.append(_CalcConfig('+', name, config_plus))
+ lines.append(_calc_config('+', name, config_plus))
if config_minus:
- lines.append(_CalcConfig('-', name, config_minus))
+ lines.append(_calc_config('-', name, config_minus))
if config_change:
- lines.append(_CalcConfig('c', name, config_change))
+ lines.append(_calc_config('c', name, config_change))
- def _OutputConfigInfo(lines):
+ def _output_config_info(lines):
for line in lines:
if not line:
continue
@@ -1318,7 +1330,7 @@ class Builder:
col = self.col.YELLOW
tprint(' ' + line, newline=True, colour=col)
- def _OutputErrLines(err_lines, colour):
+ def _output_err_lines(err_lines, colour):
"""Output the line of error/warning lines, if not empty
Also increments self._error_lines if err_lines not empty
@@ -1376,9 +1388,9 @@ class Builder:
new_boards.append(target)
# Get a list of errors and warnings that have appeared, and disappeared
- better_err, worse_err = _CalcErrorDelta(self._base_err_lines,
+ better_err, worse_err = _calc_error_delta(self._base_err_lines,
self._base_err_line_boards, err_lines, err_line_boards, '')
- better_warn, worse_warn = _CalcErrorDelta(self._base_warn_lines,
+ better_warn, worse_warn = _calc_error_delta(self._base_warn_lines,
self._base_warn_line_boards, warn_lines, warn_line_boards, 'w')
# For the IDE mode, print out all the output
@@ -1391,26 +1403,26 @@ class Builder:
elif any((ok_boards, warn_boards, err_boards, unknown_boards, new_boards,
worse_err, better_err, worse_warn, better_warn)):
arch_list = {}
- self.AddOutcome(board_selected, arch_list, ok_boards, '',
+ self.add_outcome(board_selected, arch_list, ok_boards, '',
self.col.GREEN)
- self.AddOutcome(board_selected, arch_list, warn_boards, 'w+',
+ self.add_outcome(board_selected, arch_list, warn_boards, 'w+',
self.col.YELLOW)
- self.AddOutcome(board_selected, arch_list, err_boards, '+',
+ self.add_outcome(board_selected, arch_list, err_boards, '+',
self.col.RED)
- self.AddOutcome(board_selected, arch_list, new_boards, '*', self.col.BLUE)
+ self.add_outcome(board_selected, arch_list, new_boards, '*', self.col.BLUE)
if self._show_unknown:
- self.AddOutcome(board_selected, arch_list, unknown_boards, '?',
+ self.add_outcome(board_selected, arch_list, unknown_boards, '?',
self.col.MAGENTA)
for arch, target_list in arch_list.items():
tprint('%10s: %s' % (arch, target_list))
self._error_lines += 1
- _OutputErrLines(better_err, colour=self.col.GREEN)
- _OutputErrLines(worse_err, colour=self.col.RED)
- _OutputErrLines(better_warn, colour=self.col.CYAN)
- _OutputErrLines(worse_warn, colour=self.col.YELLOW)
+ _output_err_lines(better_err, colour=self.col.GREEN)
+ _output_err_lines(worse_err, colour=self.col.RED)
+ _output_err_lines(better_warn, colour=self.col.CYAN)
+ _output_err_lines(worse_warn, colour=self.col.YELLOW)
if show_sizes:
- self.PrintSizeSummary(board_selected, board_dict, show_detail,
+ self.print_size_summary(board_selected, board_dict, show_detail,
show_bloat)
if show_environment and self._base_environment:
@@ -1438,10 +1450,10 @@ class Builder:
desc = '%s -> %s' % (value, new_value)
environment_change[key] = desc
- _AddConfig(lines, target, environment_plus, environment_minus,
+ _add_config(lines, target, environment_plus, environment_minus,
environment_change)
- _OutputConfigInfo(lines)
+ _output_config_info(lines)
if show_config and self._base_config:
summary = {}
@@ -1504,9 +1516,9 @@ class Builder:
arch_config_minus[arch][name].update(config_minus)
arch_config_change[arch][name].update(config_change)
- _AddConfig(lines, name, config_plus, config_minus,
+ _add_config(lines, name, config_plus, config_minus,
config_change)
- _AddConfig(lines, 'all', all_config_plus, all_config_minus,
+ _add_config(lines, 'all', all_config_plus, all_config_minus,
all_config_change)
summary[target] = '\n'.join(lines)
@@ -1526,20 +1538,20 @@ class Builder:
all_plus.update(arch_config_plus[arch][name])
all_minus.update(arch_config_minus[arch][name])
all_change.update(arch_config_change[arch][name])
- _AddConfig(lines, name, arch_config_plus[arch][name],
+ _add_config(lines, name, arch_config_plus[arch][name],
arch_config_minus[arch][name],
arch_config_change[arch][name])
- _AddConfig(lines, 'all', all_plus, all_minus, all_change)
+ _add_config(lines, 'all', all_plus, all_minus, all_change)
#arch_summary[target] = '\n'.join(lines)
if lines:
tprint('%s:' % arch)
- _OutputConfigInfo(lines)
+ _output_config_info(lines)
for lines, targets in lines_by_target.items():
if not lines:
continue
tprint('%s :' % ' '.join(sorted(targets)))
- _OutputConfigInfo(lines.split('\n'))
+ _output_config_info(lines.split('\n'))
# Save our updated information for the next call to this function
@@ -1560,9 +1572,9 @@ class Builder:
tprint("Boards not built (%d): %s" % (len(not_built),
', '.join(not_built)))
- def ProduceResultSummary(self, commit_upto, commits, board_selected):
+ def produce_result_summary(self, commit_upto, commits, board_selected):
(board_dict, err_lines, err_line_boards, warn_lines,
- warn_line_boards, config, environment) = self.GetResultSummary(
+ warn_line_boards, config, environment) = self.get_result_summary(
board_selected, commit_upto,
read_func_sizes=self._show_bloat,
read_config=self._show_config,
@@ -1571,13 +1583,13 @@ class Builder:
msg = '%02d: %s' % (commit_upto + 1,
commits[commit_upto].subject)
tprint(msg, colour=self.col.BLUE)
- self.PrintResultSummary(board_selected, board_dict,
+ self.print_result_summary(board_selected, board_dict,
err_lines if self._show_errors else [], err_line_boards,
warn_lines if self._show_errors else [], warn_line_boards,
config, environment, self._show_sizes, self._show_detail,
self._show_bloat, self._show_config, self._show_environment)
- def ShowSummary(self, commits, board_selected):
+ def show_summary(self, commits, board_selected):
"""Show a build summary for U-Boot for a given board list.
Reset the result summary, then repeatedly call GetResultSummary on
@@ -1589,16 +1601,16 @@ class Builder:
"""
self.commit_count = len(commits) if commits else 1
self.commits = commits
- self.ResetResultSummary(board_selected)
+ self.reset_result_summary(board_selected)
self._error_lines = 0
for commit_upto in range(0, self.commit_count, self._step):
- self.ProduceResultSummary(commit_upto, commits, board_selected)
+ self.produce_result_summary(commit_upto, commits, board_selected)
if not self._error_lines:
tprint('(no errors to report)', colour=self.col.GREEN)
- def SetupBuild(self, board_selected, commits):
+ def setup_build(self, board_selected, commits):
"""Set up ready to start a build.
Args:
@@ -1611,7 +1623,7 @@ class Builder:
self.upto = self.warned = self.fail = 0
self._timestamps = collections.deque()
- def GetThreadDir(self, thread_num):
+ def get_thread_dir(self, thread_num):
"""Get the directory path to the working dir for a thread.
Args:
@@ -1622,7 +1634,7 @@ class Builder:
return self._working_dir
return os.path.join(self._working_dir, '%02d' % max(thread_num, 0))
- def _PrepareThread(self, thread_num, setup_git):
+ def _prepare_thread(self, thread_num, setup_git):
"""Prepare the working directory for a thread.
This clones or fetches the repo into the thread's work directory.
@@ -1635,8 +1647,8 @@ class Builder:
'clone' to set up a git clone
'worktree' to set up a git worktree
"""
- thread_dir = self.GetThreadDir(thread_num)
- builderthread.Mkdir(thread_dir)
+ thread_dir = self.get_thread_dir(thread_num)
+ builderthread.mkdir(thread_dir)
git_dir = os.path.join(thread_dir, '.git')
# Create a worktree or a git repo clone for this thread if it
@@ -1672,7 +1684,7 @@ class Builder:
else:
raise ValueError("Can't setup git repo with %s." % setup_git)
- def _PrepareWorkingSpace(self, max_threads, setup_git):
+ def _prepare_working_space(self, max_threads, setup_git):
"""Prepare the working directory for use.
Set up the git repo for each thread. Creates a linked working tree
@@ -1684,7 +1696,7 @@ class Builder:
work
setup_git: True to set up a git worktree or a git clone
"""
- builderthread.Mkdir(self._working_dir)
+ builderthread.mkdir(self._working_dir)
if setup_git and self.git_dir:
src_dir = os.path.abspath(self.git_dir)
if gitutil.check_worktree_is_available(src_dir):
@@ -1698,14 +1710,14 @@ class Builder:
# Always do at least one thread
for thread in range(max(max_threads, 1)):
- self._PrepareThread(thread, setup_git)
+ self._prepare_thread(thread, setup_git)
- def _GetOutputSpaceRemovals(self):
+ def _get_output_space_removals(self):
"""Get the output directories ready to receive files.
Figure out what needs to be deleted in the output directory before it
can be used. We only delete old buildman directories which have the
- expected name pattern. See _GetOutputDir().
+ expected name pattern. See get_output_dir().
Returns:
List of full paths of directories to remove
@@ -1714,7 +1726,7 @@ class Builder:
return
dir_list = []
for commit_upto in range(self.commit_count):
- dir_list.append(self._GetOutputDir(commit_upto))
+ dir_list.append(self.get_output_dir(commit_upto))
to_remove = []
for dirname in glob.glob(os.path.join(self.base_dir, '*')):
@@ -1725,14 +1737,14 @@ class Builder:
to_remove.append(dirname)
return to_remove
- def _PrepareOutputSpace(self):
+ def _prepare_output_space(self):
"""Get the output directories ready to receive files.
We delete any output directories which look like ones we need to
create. Having left over directories is confusing when the user wants
to check the output manually.
"""
- to_remove = self._GetOutputSpaceRemovals()
+ to_remove = self._get_output_space_removals()
if to_remove:
tprint('Removing %d old build directories...' % len(to_remove),
newline=False)
@@ -1740,7 +1752,7 @@ class Builder:
shutil.rmtree(dirname)
terminal.print_clear()
- def BuildBoards(self, commits, board_selected, keep_outputs, verbose):
+ def build_boards(self, commits, board_selected, keep_outputs, verbose):
"""Build all commits for a list of boards
Args:
@@ -1759,15 +1771,15 @@ class Builder:
self.commits = commits
self._verbose = verbose
- self.ResetResultSummary(board_selected)
- builderthread.Mkdir(self.base_dir, parents = True)
- self._PrepareWorkingSpace(min(self.num_threads, len(board_selected)),
+ self.reset_result_summary(board_selected)
+ builderthread.mkdir(self.base_dir, parents = True)
+ self._prepare_working_space(min(self.num_threads, len(board_selected)),
commits is not None)
- self._PrepareOutputSpace()
+ self._prepare_output_space()
if not self._ide:
tprint('\rStarting build...', newline=False)
- self.SetupBuild(board_selected, commits)
- self.ProcessResult(None)
+ self.setup_build(board_selected, commits)
+ self.process_result(None)
self.thread_exceptions = []
# Create jobs to build all commits for each board
for brd in board_selected.values():
@@ -1781,7 +1793,7 @@ class Builder:
if self.num_threads:
self.queue.put(job)
else:
- self._single_builder.RunJob(job)
+ self._single_builder.run_job(job)
if self.num_threads:
term = threading.Thread(target=self.queue.join)
diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py
index 635865c..25f460c 100644
--- a/tools/buildman/builderthread.py
+++ b/tools/buildman/builderthread.py
@@ -2,8 +2,15 @@
# Copyright (c) 2014 Google, Inc
#
+"""Implementation the bulider threads
+
+This module provides the BuilderThread class, which handles calling the builder
+based on the jobs provided.
+"""
+
import errno
import glob
+import io
import os
import shutil
import sys
@@ -16,11 +23,15 @@ from u_boot_pylib import command
RETURN_CODE_RETRY = -1
BASE_ELF_FILENAMES = ['u-boot', 'spl/u-boot-spl', 'tpl/u-boot-tpl']
-def Mkdir(dirname, parents = False):
+def mkdir(dirname, parents=False):
"""Make a directory if it doesn't already exist.
Args:
- dirname: Directory to create
+ dirname (str): Directory to create
+ parents (bool): True to also make parent directories
+
+ Raises:
+ OSError: File already exists
"""
try:
if parents:
@@ -30,12 +41,51 @@ def Mkdir(dirname, parents = False):
except OSError as err:
if err.errno == errno.EEXIST:
if os.path.realpath('.') == os.path.realpath(dirname):
- print("Cannot create the current working directory '%s'!" % dirname)
+ print(f"Cannot create the current working directory '{dirname}'!")
sys.exit(1)
- pass
else:
raise
+
+def _remove_old_outputs(out_dir):
+ """Remove any old output-target files
+
+ Args:
+ out_dir (str): Output directory for the build
+
+ Since we use a build directory that was previously used by another
+ board, it may have produced an SPL image. If we don't remove it (i.e.
+ see do_config and self.mrproper below) then it will appear to be the
+ output of this build, even if it does not produce SPL images.
+ """
+ for elf in BASE_ELF_FILENAMES:
+ fname = os.path.join(out_dir, elf)
+ if os.path.exists(fname):
+ os.remove(fname)
+
+
+def copy_files(out_dir, build_dir, dirname, patterns):
+ """Copy files from the build directory to the output.
+
+ Args:
+ out_dir (str): Path to output directory containing the files
+ build_dir (str): Place to copy the files
+ dirname (str): Source directory, '' for normal U-Boot, 'spl' for SPL
+ patterns (list of str): A list of filenames to copy, each relative
+ to the build directory
+ """
+ for pattern in patterns:
+ file_list = glob.glob(os.path.join(out_dir, dirname, pattern))
+ for fname in file_list:
+ target = os.path.basename(fname)
+ if dirname:
+ base, ext = os.path.splitext(target)
+ if ext:
+ target = f'{base}-{dirname}{ext}'
+ shutil.copy(fname, os.path.join(build_dir, target))
+
+
+# pylint: disable=R0903
class BuilderJob:
"""Holds information about a job to be performed by a thread
@@ -77,7 +127,7 @@ class ResultThread(threading.Thread):
"""
while True:
result = self.builder.out_queue.get()
- self.builder.ProcessResult(result)
+ self.builder.process_result(result)
self.builder.out_queue.task_done()
@@ -107,22 +157,25 @@ class BuilderThread(threading.Thread):
self.mrproper = mrproper
self.per_board_out_dir = per_board_out_dir
self.test_exception = test_exception
+ self.toolchain = None
- def Make(self, commit, brd, stage, cwd, *args, **kwargs):
+ def make(self, commit, brd, stage, cwd, *args, **kwargs):
"""Run 'make' on a particular commit and board.
The source code will already be checked out, so the 'commit'
argument is only for information.
Args:
- commit: Commit object that is being built
- brd: Board object that is being built
- stage: Stage of the build. Valid stages are:
+ commit (Commit): Commit that is being built
+ brd (Board): Board that is being built
+ stage (str): Stage of the build. Valid stages are:
mrproper - can be called to clean source
config - called to configure for a board
build - the main make invocation - it does the build
- args: A list of arguments to pass to 'make'
- kwargs: A list of keyword arguments to pass to command.run_pipe()
+ cwd (str): Working directory to set, or None to leave it alone
+ *args (list of str): Arguments to pass to 'make'
+ **kwargs (dict): A list of keyword arguments to pass to
+ command.run_pipe()
Returns:
CommandResult object
@@ -130,61 +183,140 @@ class BuilderThread(threading.Thread):
return self.builder.do_make(commit, brd, stage, cwd, *args,
**kwargs)
- def RunCommit(self, commit_upto, brd, work_dir, do_config, config_only,
- force_build, force_build_failures, work_in_output,
- adjust_cfg):
- """Build a particular commit.
-
- If the build is already done, and we are not forcing a build, we skip
- the build and just return the previously-saved results.
+ def _build_args(self, brd, out_dir, out_rel_dir, work_dir, commit_upto):
+ """Set up arguments to the args list based on the settings
Args:
- commit_upto: Commit number to build (0...n-1)
- brd: Board object to build
- work_dir: Directory to which the source will be checked out
- do_config: True to run a make <board>_defconfig on the source
- config_only: Only configure the source, do not build it
- force_build: Force a build even if one was previously done
- force_build_failures: Force a bulid if the previous result showed
- failure
- work_in_output: Use the output directory as the work directory and
- don't write to a separate output directory.
- adjust_cfg (list of str): List of changes to make to .config file
- before building. Each is one of (where C is either CONFIG_xxx
- or just xxx):
- C to enable C
- ~C to disable C
- C=val to set the value of C (val must have quotes if C is
- a string Kconfig
+ brd (Board): Board to create arguments for
+ out_dir (str): Path to output directory containing the files
+ out_rel_dir (str): Output directory relative to the current dir
+ work_dir (str): Directory to which the source will be checked out
+ commit_upto (int): Commit number to build (0...n-1)
Returns:
- tuple containing:
- - CommandResult object containing the results of the build
- - boolean indicating whether 'make config' is still needed
+ tuple:
+ list of str: Arguments to pass to make
+ str: Current working directory, or None if no commit
+ str: Source directory (typically the work directory)
"""
- # Create a default result - it will be overwritte by the call to
- # self.Make() below, in the event that we do a build.
- result = command.CommandResult()
- result.return_code = 0
- if work_in_output or self.builder.in_tree:
- out_dir = work_dir
- else:
- if self.per_board_out_dir:
- out_rel_dir = os.path.join('..', brd.target)
+ args = []
+ cwd = work_dir
+ src_dir = os.path.realpath(work_dir)
+ if not self.builder.in_tree:
+ if commit_upto is None:
+ # In this case we are building in the original source directory
+ # (i.e. the current directory where buildman is invoked. The
+ # output directory is set to this thread's selected work
+ # directory.
+ #
+ # Symlinks can confuse U-Boot's Makefile since we may use '..'
+ # in our path, so remove them.
+ real_dir = os.path.realpath(out_dir)
+ args.append(f'O={real_dir}')
+ cwd = None
+ src_dir = os.getcwd()
else:
- out_rel_dir = 'build'
- out_dir = os.path.join(work_dir, out_rel_dir)
+ args.append(f'O={out_rel_dir}')
+ if self.builder.verbose_build:
+ args.append('V=1')
+ else:
+ args.append('-s')
+ if self.builder.num_jobs is not None:
+ args.extend(['-j', str(self.builder.num_jobs)])
+ if self.builder.warnings_as_errors:
+ args.append('KCFLAGS=-Werror')
+ args.append('HOSTCFLAGS=-Werror')
+ if self.builder.allow_missing:
+ args.append('BINMAN_ALLOW_MISSING=1')
+ if self.builder.no_lto:
+ args.append('NO_LTO=1')
+ if self.builder.reproducible_builds:
+ args.append('SOURCE_DATE_EPOCH=0')
+ args.extend(self.builder.toolchains.GetMakeArguments(brd))
+ args.extend(self.toolchain.MakeArgs())
+ return args, cwd, src_dir
+
+ def _reconfigure(self, commit, brd, cwd, args, env, config_args, config_out,
+ cmd_list):
+ """Reconfigure the build
- # Check if the job was already completed last time
- done_file = self.builder.GetDoneFile(commit_upto, brd.target)
+ Args:
+ commit (Commit): Commit only being built
+ brd (Board): Board being built
+ cwd (str): Current working directory
+ args (list of str): Arguments to pass to make
+ env (dict): Environment strings
+ config_args (list of str): defconfig arg for this board
+ cmd_list (list of str): List to add the commands to, for logging
+
+ Returns:
+ CommandResult object
+ """
+ if self.mrproper:
+ result = self.make(commit, brd, 'mrproper', cwd, 'mrproper', *args,
+ env=env)
+ config_out.write(result.combined)
+ cmd_list.append([self.builder.gnu_make, 'mrproper', *args])
+ result = self.make(commit, brd, 'config', cwd, *(args + config_args),
+ env=env)
+ cmd_list.append([self.builder.gnu_make] + args + config_args)
+ config_out.write(result.combined)
+ return result
+
+ def _build(self, commit, brd, cwd, args, env, cmd_list, config_only):
+ """Perform the build
+
+ Args:
+ commit (Commit): Commit only being built
+ brd (Board): Board being built
+ cwd (str): Current working directory
+ args (list of str): Arguments to pass to make
+ env (dict): Environment strings
+ cmd_list (list of str): List to add the commands to, for logging
+ config_only (bool): True if this is a config-only build (using the
+ 'make cfg' target)
+
+ Returns:
+ CommandResult object
+ """
+ if config_only:
+ args.append('cfg')
+ result = self.make(commit, brd, 'build', cwd, *args, env=env)
+ cmd_list.append([self.builder.gnu_make] + args)
+ if (result.return_code == 2 and
+ ('Some images are invalid' in result.stderr)):
+ # This is handled later by the check for output in stderr
+ result.return_code = 0
+ return result
+
+ def _read_done_file(self, commit_upto, brd, force_build,
+ force_build_failures):
+ """Check the 'done' file and see if this commit should be built
+
+ Args:
+ commit (Commit): Commit only being built
+ brd (Board): Board being built
+ force_build (bool): Force a build even if one was previously done
+ force_build_failures (bool): Force a bulid if the previous result
+ showed failure
+
+ Returns:
+ tuple:
+ bool: True if build should be built
+ CommandResult: if there was a previous run:
+ - already_done set to True
+ - return_code set to return code
+ - result.stderr set to 'bad' if stderr output was recorded
+ """
+ result = command.CommandResult()
+ done_file = self.builder.get_done_file(commit_upto, brd.target)
result.already_done = os.path.exists(done_file)
will_build = (force_build or force_build_failures or
not result.already_done)
if result.already_done:
- # Get the return code from that build and use it
- with open(done_file, 'r') as fd:
+ with open(done_file, 'r', encoding='utf-8') as outf:
try:
- result.return_code = int(fd.readline())
+ result.return_code = int(outf.readline())
except ValueError:
# The file may be empty due to running out of disk space.
# Try a rebuild
@@ -194,12 +326,155 @@ class BuilderThread(threading.Thread):
if result.return_code == RETURN_CODE_RETRY:
will_build = True
elif will_build:
- err_file = self.builder.GetErrFile(commit_upto, brd.target)
+ err_file = self.builder.get_err_file(commit_upto, brd.target)
if os.path.exists(err_file) and os.stat(err_file).st_size:
result.stderr = 'bad'
elif not force_build:
# The build passed, so no need to build it again
will_build = False
+ return will_build, result
+
+ def _decide_dirs(self, brd, work_dir, work_in_output):
+ """Decide the output directory to use
+
+ Args:
+ work_dir (str): Directory to which the source will be checked out
+ work_in_output (bool): Use the output directory as the work
+ directory and don't write to a separate output directory.
+
+ Returns:
+ tuple:
+ out_dir (str): Output directory for the build
+ out_rel_dir (str): Output directory relatie to the current dir
+ """
+ if work_in_output or self.builder.in_tree:
+ out_rel_dir = None
+ out_dir = work_dir
+ else:
+ if self.per_board_out_dir:
+ out_rel_dir = os.path.join('..', brd.target)
+ else:
+ out_rel_dir = 'build'
+ out_dir = os.path.join(work_dir, out_rel_dir)
+ return out_dir, out_rel_dir
+
+ def _checkout(self, commit_upto, work_dir):
+ """Checkout the right commit
+
+ Args:
+ commit_upto (int): Commit number to build (0...n-1)
+ work_dir (str): Directory to which the source will be checked out
+
+ Returns:
+ Commit: Commit being built, or 'current' for current source
+ """
+ if self.builder.commits:
+ commit = self.builder.commits[commit_upto]
+ if self.builder.checkout:
+ git_dir = os.path.join(work_dir, '.git')
+ gitutil.checkout(commit.hash, git_dir, work_dir, force=True)
+ else:
+ commit = 'current'
+ return commit
+
+ def _config_and_build(self, commit_upto, brd, work_dir, do_config,
+ config_only, adjust_cfg, commit, out_dir, out_rel_dir,
+ result):
+ """Do the build, configuring first if necessary
+
+ Args:
+ commit_upto (int): Commit number to build (0...n-1)
+ brd (Board): Board to create arguments for
+ work_dir (str): Directory to which the source will be checked out
+ do_config (bool): True to run a make <board>_defconfig on the source
+ config_only (bool): Only configure the source, do not build it
+ adjust_cfg (list of str): See the cfgutil module and run_commit()
+ commit (Commit): Commit only being built
+ out_dir (str): Output directory for the build
+ out_rel_dir (str): Output directory relatie to the current dir
+ result (CommandResult): Previous result
+
+ Returns:
+ tuple:
+ result (CommandResult): Result of the build
+ do_config (bool): indicates whether 'make config' is needed on
+ the next incremental build
+ """
+ # Set up the environment and command line
+ env = self.toolchain.MakeEnvironment(self.builder.full_path)
+ mkdir(out_dir)
+
+ args, cwd, src_dir = self._build_args(brd, out_dir, out_rel_dir,
+ work_dir, commit_upto)
+ config_args = [f'{brd.target}_defconfig']
+ config_out = io.StringIO()
+
+ _remove_old_outputs(out_dir)
+
+ # If we need to reconfigure, do that now
+ cfg_file = os.path.join(out_dir, '.config')
+ cmd_list = []
+ if do_config or adjust_cfg:
+ result = self._reconfigure(
+ commit, brd, cwd, args, env, config_args, config_out, cmd_list)
+ do_config = False # No need to configure next time
+ if adjust_cfg:
+ cfgutil.adjust_cfg_file(cfg_file, adjust_cfg)
+
+ # Now do the build, if everything looks OK
+ if result.return_code == 0:
+ result = self._build(commit, brd, cwd, args, env, cmd_list,
+ config_only)
+ if adjust_cfg:
+ errs = cfgutil.check_cfg_file(cfg_file, adjust_cfg)
+ if errs:
+ result.stderr += errs
+ result.return_code = 1
+ result.stderr = result.stderr.replace(src_dir + '/', '')
+ if self.builder.verbose_build:
+ result.stdout = config_out.getvalue() + result.stdout
+ result.cmd_list = cmd_list
+ return result, do_config
+
+ def run_commit(self, commit_upto, brd, work_dir, do_config, config_only,
+ force_build, force_build_failures, work_in_output,
+ adjust_cfg):
+ """Build a particular commit.
+
+ If the build is already done, and we are not forcing a build, we skip
+ the build and just return the previously-saved results.
+
+ Args:
+ commit_upto (int): Commit number to build (0...n-1)
+ brd (Board): Board to build
+ work_dir (str): Directory to which the source will be checked out
+ do_config (bool): True to run a make <board>_defconfig on the source
+ config_only (bool): Only configure the source, do not build it
+ force_build (bool): Force a build even if one was previously done
+ force_build_failures (bool): Force a bulid if the previous result
+ showed failure
+ work_in_output (bool) : Use the output directory as the work
+ directory and don't write to a separate output directory.
+ adjust_cfg (list of str): List of changes to make to .config file
+ before building. Each is one of (where C is either CONFIG_xxx
+ or just xxx):
+ C to enable C
+ ~C to disable C
+ C=val to set the value of C (val must have quotes if C is
+ a string Kconfig
+
+ Returns:
+ tuple containing:
+ - CommandResult object containing the results of the build
+ - boolean indicating whether 'make config' is still needed
+ """
+ # Create a default result - it will be overwritte by the call to
+ # self.make() below, in the event that we do a build.
+ out_dir, out_rel_dir = self._decide_dirs(brd, work_dir, work_in_output)
+
+ # Check if the job was already completed last time
+ will_build, result = self._read_done_file(commit_upto, brd, force_build,
+ force_build_failures)
if will_build:
# We are going to have to build it. First, get a toolchain
@@ -209,115 +484,13 @@ class BuilderThread(threading.Thread):
except ValueError as err:
result.return_code = 10
result.stdout = ''
- result.stderr = str(err)
- # TODO(sjg@chromium.org): This gets swallowed, but needs
- # to be reported.
+ result.stderr = f'Tool chain error for {brd.arch}: {str(err)}'
if self.toolchain:
- # Checkout the right commit
- if self.builder.commits:
- commit = self.builder.commits[commit_upto]
- if self.builder.checkout:
- git_dir = os.path.join(work_dir, '.git')
- gitutil.checkout(commit.hash, git_dir, work_dir,
- force=True)
- else:
- commit = 'current'
-
- # Set up the environment and command line
- env = self.toolchain.MakeEnvironment(self.builder.full_path)
- Mkdir(out_dir)
- args = []
- cwd = work_dir
- src_dir = os.path.realpath(work_dir)
- if not self.builder.in_tree:
- if commit_upto is None:
- # In this case we are building in the original source
- # directory (i.e. the current directory where buildman
- # is invoked. The output directory is set to this
- # thread's selected work directory.
- #
- # Symlinks can confuse U-Boot's Makefile since
- # we may use '..' in our path, so remove them.
- out_dir = os.path.realpath(out_dir)
- args.append('O=%s' % out_dir)
- cwd = None
- src_dir = os.getcwd()
- else:
- args.append('O=%s' % out_rel_dir)
- if self.builder.verbose_build:
- args.append('V=1')
- else:
- args.append('-s')
- if self.builder.num_jobs is not None:
- args.extend(['-j', str(self.builder.num_jobs)])
- if self.builder.warnings_as_errors:
- args.append('KCFLAGS=-Werror')
- args.append('HOSTCFLAGS=-Werror')
- if self.builder.allow_missing:
- args.append('BINMAN_ALLOW_MISSING=1')
- if self.builder.no_lto:
- args.append('NO_LTO=1')
- if self.builder.reproducible_builds:
- args.append('SOURCE_DATE_EPOCH=0')
- config_args = ['%s_defconfig' % brd.target]
- config_out = ''
- args.extend(self.builder.toolchains.GetMakeArguments(brd))
- args.extend(self.toolchain.MakeArgs())
-
- # Remove any output targets. Since we use a build directory that
- # was previously used by another board, it may have produced an
- # SPL image. If we don't remove it (i.e. see do_config and
- # self.mrproper below) then it will appear to be the output of
- # this build, even if it does not produce SPL images.
- build_dir = self.builder.GetBuildDir(commit_upto, brd.target)
- for elf in BASE_ELF_FILENAMES:
- fname = os.path.join(out_dir, elf)
- if os.path.exists(fname):
- os.remove(fname)
-
- # If we need to reconfigure, do that now
- cfg_file = os.path.join(out_dir, '.config')
- cmd_list = []
- if do_config or adjust_cfg:
- config_out = ''
- if self.mrproper:
- result = self.Make(commit, brd, 'mrproper', cwd,
- 'mrproper', *args, env=env)
- config_out += result.combined
- cmd_list.append([self.builder.gnu_make, 'mrproper',
- *args])
- result = self.Make(commit, brd, 'config', cwd,
- *(args + config_args), env=env)
- cmd_list.append([self.builder.gnu_make] + args +
- config_args)
- config_out += result.combined
- do_config = False # No need to configure next time
- if adjust_cfg:
- cfgutil.adjust_cfg_file(cfg_file, adjust_cfg)
- if result.return_code == 0:
- if config_only:
- args.append('cfg')
- result = self.Make(commit, brd, 'build', cwd, *args,
- env=env)
- cmd_list.append([self.builder.gnu_make] + args)
- if (result.return_code == 2 and
- ('Some images are invalid' in result.stderr)):
- # This is handled later by the check for output in
- # stderr
- result.return_code = 0
- if adjust_cfg:
- errs = cfgutil.check_cfg_file(cfg_file, adjust_cfg)
- if errs:
- result.stderr += errs
- result.return_code = 1
- result.stderr = result.stderr.replace(src_dir + '/', '')
- if self.builder.verbose_build:
- result.stdout = config_out + result.stdout
- result.cmd_list = cmd_list
- else:
- result.return_code = 1
- result.stderr = 'No tool chain for %s\n' % brd.arch
+ commit = self._checkout(commit_upto, work_dir)
+ result, do_config = self._config_and_build(
+ commit_upto, brd, work_dir, do_config, config_only,
+ adjust_cfg, commit, out_dir, out_rel_dir, result)
result.already_done = False
result.toolchain = self.toolchain
@@ -326,15 +499,15 @@ class BuilderThread(threading.Thread):
result.out_dir = out_dir
return result, do_config
- def _WriteResult(self, result, keep_outputs, work_in_output):
+ def _write_result(self, result, keep_outputs, work_in_output):
"""Write a built result to the output directory.
Args:
- result: CommandResult object containing result to write
- keep_outputs: True to store the output binaries, False
+ result (CommandResult): result to write
+ keep_outputs (bool): True to store the output binaries, False
to delete them
- work_in_output: Use the output directory as the work directory and
- don't write to a separate output directory.
+ work_in_output (bool): Use the output directory as the work
+ directory and don't write to a separate output directory.
"""
# If we think this might have been aborted with Ctrl-C, record the
# failure but not that we are 'done' with this board. A retry may fix
@@ -345,22 +518,22 @@ class BuilderThread(threading.Thread):
return
# Write the output and stderr
- output_dir = self.builder._GetOutputDir(result.commit_upto)
- Mkdir(output_dir)
- build_dir = self.builder.GetBuildDir(result.commit_upto,
+ output_dir = self.builder.get_output_dir(result.commit_upto)
+ mkdir(output_dir)
+ build_dir = self.builder.get_build_dir(result.commit_upto,
result.brd.target)
- Mkdir(build_dir)
+ mkdir(build_dir)
outfile = os.path.join(build_dir, 'log')
- with open(outfile, 'w') as fd:
+ with open(outfile, 'w', encoding='utf-8') as outf:
if result.stdout:
- fd.write(result.stdout)
+ outf.write(result.stdout)
- errfile = self.builder.GetErrFile(result.commit_upto,
+ errfile = self.builder.get_err_file(result.commit_upto,
result.brd.target)
if result.stderr:
- with open(errfile, 'w') as fd:
- fd.write(result.stderr)
+ with open(errfile, 'w', encoding='utf-8') as outf:
+ outf.write(result.stderr)
elif os.path.exists(errfile):
os.remove(errfile)
@@ -370,60 +543,61 @@ class BuilderThread(threading.Thread):
if result.toolchain:
# Write the build result and toolchain information.
- done_file = self.builder.GetDoneFile(result.commit_upto,
+ done_file = self.builder.get_done_file(result.commit_upto,
result.brd.target)
- with open(done_file, 'w') as fd:
+ with open(done_file, 'w', encoding='utf-8') as outf:
if maybe_aborted:
# Special code to indicate we need to retry
- fd.write('%s' % RETURN_CODE_RETRY)
+ outf.write(f'{RETURN_CODE_RETRY}')
else:
- fd.write('%s' % result.return_code)
- with open(os.path.join(build_dir, 'toolchain'), 'w') as fd:
- print('gcc', result.toolchain.gcc, file=fd)
- print('path', result.toolchain.path, file=fd)
- print('cross', result.toolchain.cross, file=fd)
- print('arch', result.toolchain.arch, file=fd)
- fd.write('%s' % result.return_code)
+ outf.write(f'{result.return_code}')
+ with open(os.path.join(build_dir, 'toolchain'), 'w',
+ encoding='utf-8') as outf:
+ print('gcc', result.toolchain.gcc, file=outf)
+ print('path', result.toolchain.path, file=outf)
+ print('cross', result.toolchain.cross, file=outf)
+ print('arch', result.toolchain.arch, file=outf)
+ outf.write(f'{result.return_code}')
# Write out the image and function size information and an objdump
env = result.toolchain.MakeEnvironment(self.builder.full_path)
- with open(os.path.join(build_dir, 'out-env'), 'wb') as fd:
+ with open(os.path.join(build_dir, 'out-env'), 'wb') as outf:
for var in sorted(env.keys()):
- fd.write(b'%s="%s"' % (var, env[var]))
+ outf.write(b'%s="%s"' % (var, env[var]))
with open(os.path.join(build_dir, 'out-cmd'), 'w',
- encoding='utf-8') as fd:
+ encoding='utf-8') as outf:
for cmd in result.cmd_list:
- print(' '.join(cmd), file=fd)
+ print(' '.join(cmd), file=outf)
lines = []
for fname in BASE_ELF_FILENAMES:
- cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname]
+ cmd = [f'{self.toolchain.cross}nm', '--size-sort', fname]
nm_result = command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
if nm_result.stdout:
- nm = self.builder.GetFuncSizesFile(result.commit_upto,
- result.brd.target, fname)
- with open(nm, 'w') as fd:
- print(nm_result.stdout, end=' ', file=fd)
+ nm_fname = self.builder.get_func_sizes_file(
+ result.commit_upto, result.brd.target, fname)
+ with open(nm_fname, 'w', encoding='utf-8') as outf:
+ print(nm_result.stdout, end=' ', file=outf)
- cmd = ['%sobjdump' % self.toolchain.cross, '-h', fname]
+ cmd = [f'{self.toolchain.cross}objdump', '-h', fname]
dump_result = command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
rodata_size = ''
if dump_result.stdout:
- objdump = self.builder.GetObjdumpFile(result.commit_upto,
+ objdump = self.builder.get_objdump_file(result.commit_upto,
result.brd.target, fname)
- with open(objdump, 'w') as fd:
- print(dump_result.stdout, end=' ', file=fd)
+ with open(objdump, 'w', encoding='utf-8') as outf:
+ print(dump_result.stdout, end=' ', file=outf)
for line in dump_result.stdout.splitlines():
fields = line.split()
if len(fields) > 5 and fields[1] == '.rodata':
rodata_size = fields[2]
- cmd = ['%ssize' % self.toolchain.cross, fname]
+ cmd = [f'{self.toolchain.cross}size', fname]
size_result = command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
@@ -432,30 +606,29 @@ class BuilderThread(threading.Thread):
rodata_size)
# Extract the environment from U-Boot and dump it out
- cmd = ['%sobjcopy' % self.toolchain.cross, '-O', 'binary',
+ cmd = [f'{self.toolchain.cross}objcopy', '-O', 'binary',
'-j', '.rodata.default_environment',
'env/built-in.o', 'uboot.env']
command.run_pipe([cmd], capture=True,
capture_stderr=True, cwd=result.out_dir,
raise_on_error=False, env=env)
- ubootenv = os.path.join(result.out_dir, 'uboot.env')
if not work_in_output:
- self.CopyFiles(result.out_dir, build_dir, '', ['uboot.env'])
+ copy_files(result.out_dir, build_dir, '', ['uboot.env'])
# Write out the image sizes file. This is similar to the output
# of binutil's 'size' utility, but it omits the header line and
# adds an additional hex value at the end of each line for the
# rodata size
- if len(lines):
- sizes = self.builder.GetSizesFile(result.commit_upto,
+ if lines:
+ sizes = self.builder.get_sizes_file(result.commit_upto,
result.brd.target)
- with open(sizes, 'w') as fd:
- print('\n'.join(lines), file=fd)
+ with open(sizes, 'w', encoding='utf-8') as outf:
+ print('\n'.join(lines), file=outf)
if not work_in_output:
# Write out the configuration files, with a special case for SPL
for dirname in ['', 'spl', 'tpl']:
- self.CopyFiles(
+ copy_files(
result.out_dir, build_dir, dirname,
['u-boot.cfg', 'spl/u-boot-spl.cfg', 'tpl/u-boot-tpl.cfg',
'.config', 'include/autoconf.mk',
@@ -463,60 +636,40 @@ class BuilderThread(threading.Thread):
# Now write the actual build output
if keep_outputs:
- self.CopyFiles(
+ copy_files(
result.out_dir, build_dir, '',
['u-boot*', '*.bin', '*.map', '*.img', 'MLO', 'SPL',
'include/autoconf.mk', 'spl/u-boot-spl*'])
- def CopyFiles(self, out_dir, build_dir, dirname, patterns):
- """Copy files from the build directory to the output.
-
- Args:
- out_dir: Path to output directory containing the files
- build_dir: Place to copy the files
- dirname: Source directory, '' for normal U-Boot, 'spl' for SPL
- patterns: A list of filenames (strings) to copy, each relative
- to the build directory
- """
- for pattern in patterns:
- file_list = glob.glob(os.path.join(out_dir, dirname, pattern))
- for fname in file_list:
- target = os.path.basename(fname)
- if dirname:
- base, ext = os.path.splitext(target)
- if ext:
- target = '%s-%s%s' % (base, dirname, ext)
- shutil.copy(fname, os.path.join(build_dir, target))
-
- def _SendResult(self, result):
+ def _send_result(self, result):
"""Send a result to the builder for processing
Args:
- result: CommandResult object containing the results of the build
+ result (CommandResult): results of the build
Raises:
- ValueError if self.test_exception is true (for testing)
+ ValueError: self.test_exception is true (for testing)
"""
if self.test_exception:
raise ValueError('test exception')
if self.thread_num != -1:
self.builder.out_queue.put(result)
else:
- self.builder.ProcessResult(result)
+ self.builder.process_result(result)
- def RunJob(self, job):
+ def run_job(self, job):
"""Run a single job
A job consists of a building a list of commits for a particular board.
Args:
- job: Job to build
+ job (Job): Job to build
- Returns:
- List of Result objects
+ Raises:
+ ValueError: Thread was interrupted
"""
brd = job.brd
- work_dir = self.builder.GetThreadDir(self.thread_num)
+ work_dir = self.builder.get_thread_dir(self.thread_num)
self.toolchain = None
if job.commits:
# Run 'make board_defconfig' on the first commit
@@ -524,7 +677,7 @@ class BuilderThread(threading.Thread):
commit_upto = 0
force_build = False
for commit_upto in range(0, len(job.commits), job.step):
- result, request_config = self.RunCommit(commit_upto, brd,
+ result, request_config = self.run_commit(commit_upto, brd,
work_dir, do_config, self.builder.config_only,
force_build or self.builder.force_build,
self.builder.force_build_failures,
@@ -535,7 +688,7 @@ class BuilderThread(threading.Thread):
# If our incremental build failed, try building again
# with a reconfig.
if self.builder.force_config_on_failure:
- result, request_config = self.RunCommit(commit_upto,
+ result, request_config = self.run_commit(commit_upto,
brd, work_dir, True, False, True, False,
job.work_in_output, job.adjust_cfg)
did_config = True
@@ -576,17 +729,17 @@ class BuilderThread(threading.Thread):
raise ValueError('Interrupt')
# We have the build results, so output the result
- self._WriteResult(result, job.keep_outputs, job.work_in_output)
- self._SendResult(result)
+ self._write_result(result, job.keep_outputs, job.work_in_output)
+ self._send_result(result)
else:
# Just build the currently checked-out build
- result, request_config = self.RunCommit(None, brd, work_dir, True,
+ result, request_config = self.run_commit(None, brd, work_dir, True,
self.builder.config_only, True,
self.builder.force_build_failures, job.work_in_output,
job.adjust_cfg)
result.commit_upto = 0
- self._WriteResult(result, job.keep_outputs, job.work_in_output)
- self._SendResult(result)
+ self._write_result(result, job.keep_outputs, job.work_in_output)
+ self._send_result(result)
def run(self):
"""Our thread's run function
@@ -597,8 +750,9 @@ class BuilderThread(threading.Thread):
while True:
job = self.builder.queue.get()
try:
- self.RunJob(job)
- except Exception as e:
- print('Thread exception (use -T0 to run without threads):', e)
- self.builder.thread_exceptions.append(e)
+ self.run_job(job)
+ except Exception as exc:
+ print('Thread exception (use -T0 to run without threads):',
+ exc)
+ self.builder.thread_exceptions.append(exc)
self.builder.queue.task_done()
diff --git a/tools/buildman/buildman.rst b/tools/buildman/buildman.rst
index c8b0db3..d8c3957 100644
--- a/tools/buildman/buildman.rst
+++ b/tools/buildman/buildman.rst
@@ -159,7 +159,7 @@ on the command line:
.. code-block:: bash
- buildman --boards sandbox,snow --boards
+ buildman --boards sandbox,snow --boards firefly-rk3399
It is convenient to use the -n option to see what will be built based on
the subset given. Use -v as well to get an actual list of boards.
@@ -1307,9 +1307,10 @@ Using boards.cfg
This file is no-longer needed by buildman but it is still generated in the
working directory. This helps avoid a delay on every build, since scanning all
-the Kconfig files takes a few seconds. Use the -R flag to force regeneration
-of the file - in that case buildman exits after writing the file. with exit code
-2 if there was an error in the maintainer files.
+the Kconfig files takes a few seconds. Use the `-R <filename>` flag to force
+regeneration of the file - in that case buildman exits after writing the file
+with exit code 2 if there was an error in the maintainer files. To use the
+default filename, use a hyphen, i.e. `-R -`.
You should use 'buildman -nv <criteria>' instead of greoing the boards.cfg file,
since it may be dropped altogether in future.
diff --git a/tools/buildman/cmdline.py b/tools/buildman/cmdline.py
index a9cda24..9831655 100644
--- a/tools/buildman/cmdline.py
+++ b/tools/buildman/cmdline.py
@@ -2,148 +2,184 @@
# Copyright (c) 2014 Google, Inc
#
-from optparse import OptionParser
+"""Handles parsing of buildman arguments
+
+This creates the argument parser and uses it to parse the arguments passed in
+"""
+
+import argparse
import os
import pathlib
BUILDMAN_DIR = pathlib.Path(__file__).parent
HAS_TESTS = os.path.exists(BUILDMAN_DIR / "test.py")
-def ParseArgs():
- """Parse command line arguments from sys.argv[]
+def add_upto_m(parser):
+ """Add arguments up to 'M'
- Returns:
- tuple containing:
- options: command line options
- args: command lin arguments
+ Args:
+ parser (ArgumentParser): Parse to add to
+
+ This is split out to avoid having too many statements in one function
"""
- parser = OptionParser()
- parser.add_option('-a', '--adjust-cfg', type=str, action='append',
+ parser.add_argument('-a', '--adjust-cfg', type=str, action='append',
help='Adjust the Kconfig settings in .config before building')
- parser.add_option('-A', '--print-prefix', action='store_true',
+ parser.add_argument('-A', '--print-prefix', action='store_true',
help='Print the tool-chain prefix for a board (CROSS_COMPILE=)')
- parser.add_option('-b', '--branch', type='string',
+ parser.add_argument('-b', '--branch', type=str,
help='Branch name to build, or range of commits to build')
- parser.add_option('-B', '--bloat', dest='show_bloat',
+ parser.add_argument('-B', '--bloat', dest='show_bloat',
action='store_true', default=False,
help='Show changes in function code size for each board')
- parser.add_option('--boards', type='string', action='append',
+ parser.add_argument('--boards', type=str, action='append',
help='List of board names to build separated by comma')
- parser.add_option('-c', '--count', dest='count', type='int',
+ parser.add_argument('-c', '--count', dest='count', type=int,
default=-1, help='Run build on the top n commits')
- parser.add_option('-C', '--force-reconfig', dest='force_reconfig',
+ parser.add_argument('-C', '--force-reconfig', dest='force_reconfig',
action='store_true', default=False,
help='Reconfigure for every commit (disable incremental build)')
- parser.add_option('-d', '--detail', dest='show_detail',
+ parser.add_argument('-d', '--detail', dest='show_detail',
action='store_true', default=False,
help='Show detailed size delta for each board in the -S summary')
- parser.add_option('-D', '--config-only', action='store_true', default=False,
+ parser.add_argument('-D', '--config-only', action='store_true',
+ default=False,
help="Don't build, just configure each commit")
- parser.add_option('--debug', action='store_true',
+ parser.add_argument('--debug', action='store_true',
help='Enabling debugging (provides a full traceback on error)')
- parser.add_option('-e', '--show_errors', action='store_true',
+ parser.add_argument('-e', '--show_errors', action='store_true',
default=False, help='Show errors and warnings')
- parser.add_option('-E', '--warnings-as-errors', action='store_true',
+ parser.add_argument('-E', '--warnings-as-errors', action='store_true',
default=False, help='Treat all compiler warnings as errors')
- parser.add_option('-f', '--force-build', dest='force_build',
+ parser.add_argument('-f', '--force-build', dest='force_build',
action='store_true', default=False,
help='Force build of boards even if already built')
- parser.add_option('-F', '--force-build-failures', dest='force_build_failures',
+ parser.add_argument('-F', '--force-build-failures', dest='force_build_failures',
action='store_true', default=False,
help='Force build of previously-failed build')
- parser.add_option('--fetch-arch', type='string',
+ parser.add_argument('--fetch-arch', type=str,
help="Fetch a toolchain for architecture FETCH_ARCH ('list' to list)."
' You can also fetch several toolchains separate by comma, or'
" 'all' to download all")
- parser.add_option('-g', '--git', type='string',
+ parser.add_argument('-g', '--git', type=str,
help='Git repo containing branch to build', default='.')
- parser.add_option('-G', '--config-file', type='string',
+ parser.add_argument('-G', '--config-file', type=str,
help='Path to buildman config file', default='')
- parser.add_option('-H', '--full-help', action='store_true', dest='full_help',
+ parser.add_argument('-H', '--full-help', action='store_true', dest='full_help',
default=False, help='Display the README file')
- parser.add_option('-i', '--in-tree', dest='in_tree',
+ parser.add_argument('-i', '--in-tree', dest='in_tree',
action='store_true', default=False,
help='Build in the source tree instead of a separate directory')
- parser.add_option('-I', '--ide', action='store_true', default=False,
+ parser.add_argument('-I', '--ide', action='store_true', default=False,
help='Create build output that can be parsed by an IDE')
- parser.add_option('-j', '--jobs', dest='jobs', type='int',
+ parser.add_argument('-j', '--jobs', dest='jobs', type=int,
default=None, help='Number of jobs to run at once (passed to make)')
- parser.add_option('-k', '--keep-outputs', action='store_true',
+ parser.add_argument('-k', '--keep-outputs', action='store_true',
default=False, help='Keep all build output files (e.g. binaries)')
- parser.add_option('-K', '--show-config', action='store_true',
- default=False, help='Show configuration changes in summary (both board config files and Kconfig)')
- parser.add_option('--preserve-config-y', action='store_true',
+ parser.add_argument('-K', '--show-config', action='store_true',
+ default=False,
+ help='Show configuration changes in summary (both board config files and Kconfig)')
+ parser.add_argument('--preserve-config-y', action='store_true',
default=False, help="Don't convert y to 1 in configs")
- parser.add_option('-l', '--list-error-boards', action='store_true',
+ parser.add_argument('-l', '--list-error-boards', action='store_true',
default=False, help='Show a list of boards next to each error/warning')
- parser.add_option('-L', '--no-lto', action='store_true',
+ parser.add_argument('-L', '--no-lto', action='store_true',
default=False, help='Disable Link-time Optimisation (LTO) for builds')
- parser.add_option('--list-tool-chains', action='store_true', default=False,
+ parser.add_argument('--list-tool-chains', action='store_true', default=False,
help='List available tool chains (use -v to see probing detail)')
- parser.add_option('-m', '--mrproper', action='store_true',
+ parser.add_argument('-m', '--mrproper', action='store_true',
default=False, help="Run 'make mrproper before reconfiguring")
- parser.add_option(
+ parser.add_argument(
'-M', '--allow-missing', action='store_true', default=False,
- help='Tell binman to allow missing blobs and generate fake ones as needed'),
- parser.add_option(
+ help='Tell binman to allow missing blobs and generate fake ones as needed')
+ parser.add_argument(
'--no-allow-missing', action='store_true', default=False,
- help='Disable telling binman to allow missing blobs'),
- parser.add_option('-n', '--dry-run', action='store_true', dest='dry_run',
+ help='Disable telling binman to allow missing blobs')
+ parser.add_argument('-n', '--dry-run', action='store_true', dest='dry_run',
default=False, help="Do a dry run (describe actions, but do nothing)")
- parser.add_option('-N', '--no-subdirs', action='store_true', dest='no_subdirs',
- default=False, help="Don't create subdirectories when building current source for a single board")
- parser.add_option('-o', '--output-dir', type='string', dest='output_dir',
+ parser.add_argument('-N', '--no-subdirs', action='store_true', dest='no_subdirs',
+ default=False,
+ help="Don't create subdirectories when building current source for a single board")
+
+
+def add_after_m(parser):
+ """Add arguments after 'M'
+
+ Args:
+ parser (ArgumentParser): Parse to add to
+
+ This is split out to avoid having too many statements in one function
+ """
+ parser.add_argument('-o', '--output-dir', type=str, dest='output_dir',
help='Directory where all builds happen and buildman has its workspace (default is ../)')
- parser.add_option('-O', '--override-toolchain', type='string',
+ parser.add_argument('-O', '--override-toolchain', type=str,
help="Override host toochain to use for sandbox (e.g. 'clang-7')")
- parser.add_option('-Q', '--quick', action='store_true',
+ parser.add_argument('-Q', '--quick', action='store_true',
default=False, help='Do a rough build, with limited warning resolution')
- parser.add_option('-p', '--full-path', action='store_true',
+ parser.add_argument('-p', '--full-path', action='store_true',
default=False, help="Use full toolchain path in CROSS_COMPILE")
- parser.add_option('-P', '--per-board-out-dir', action='store_true',
+ parser.add_argument('-P', '--per-board-out-dir', action='store_true',
default=False, help="Use an O= (output) directory per board rather than per thread")
- parser.add_option('-r', '--reproducible-builds', action='store_true',
+ parser.add_argument('--print-arch', action='store_true',
+ default=False, help="Print the architecture for a board (ARCH=)")
+ parser.add_argument('-r', '--reproducible-builds', action='store_true',
help='Set SOURCE_DATE_EPOCH=0 to suuport a reproducible build')
- parser.add_option('-R', '--regen-board-list', action='store_true',
+ parser.add_argument('-R', '--regen-board-list', type=str,
help='Force regeneration of the list of boards, like the old boards.cfg file')
- parser.add_option('-s', '--summary', action='store_true',
+ parser.add_argument('-s', '--summary', action='store_true',
default=False, help='Show a build summary')
- parser.add_option('-S', '--show-sizes', action='store_true',
+ parser.add_argument('-S', '--show-sizes', action='store_true',
default=False, help='Show image size variation in summary')
- parser.add_option('--step', type='int',
+ parser.add_argument('--step', type=int,
default=1, help='Only build every n commits (0=just first and last)')
if HAS_TESTS:
- parser.add_option('--skip-net-tests', action='store_true', default=False,
+ parser.add_argument('--skip-net-tests', action='store_true', default=False,
help='Skip tests which need the network')
- parser.add_option('-t', '--test', action='store_true', dest='test',
+ parser.add_argument('-t', '--test', action='store_true', dest='test',
default=False, help='run tests')
- parser.add_option('-T', '--threads', type='int',
+ parser.add_argument('--coverage', action='store_true',
+ help='Calculated test coverage')
+ parser.add_argument('-T', '--threads', type=int,
default=None,
help='Number of builder threads to use (0=single-thread)')
- parser.add_option('-u', '--show_unknown', action='store_true',
+ parser.add_argument('-u', '--show_unknown', action='store_true',
default=False, help='Show boards with unknown build result')
- parser.add_option('-U', '--show-environment', action='store_true',
+ parser.add_argument('-U', '--show-environment', action='store_true',
default=False, help='Show environment changes in summary')
- parser.add_option('-v', '--verbose', action='store_true',
+ parser.add_argument('-v', '--verbose', action='store_true',
default=False, help='Show build results while the build progresses')
- parser.add_option('-V', '--verbose-build', action='store_true',
+ parser.add_argument('-V', '--verbose-build', action='store_true',
default=False, help='Run make with V=1, logging all output')
- parser.add_option('-w', '--work-in-output', action='store_true',
+ parser.add_argument('-w', '--work-in-output', action='store_true',
default=False, help='Use the output directory as the work directory')
- parser.add_option('-W', '--ignore-warnings', action='store_true',
+ parser.add_argument('-W', '--ignore-warnings', action='store_true',
default=False, help='Return success even if there are warnings')
- parser.add_option('-x', '--exclude', dest='exclude',
- type='string', action='append',
+ parser.add_argument('-x', '--exclude', dest='exclude',
+ type=str, action='append',
help='Specify a list of boards to exclude, separated by comma')
- parser.add_option('-y', '--filter-dtb-warnings', action='store_true',
+ parser.add_argument('-y', '--filter-dtb-warnings', action='store_true',
default=False,
help='Filter out device-tree-compiler warnings from output')
- parser.add_option('-Y', '--filter-migration-warnings', action='store_true',
+ parser.add_argument('-Y', '--filter-migration-warnings', action='store_true',
default=False,
help='Filter out migration warnings from output')
- parser.usage += """ [list of target/arch/cpu/board/vendor/soc to build]
+
+def parse_args():
+ """Parse command line arguments from sys.argv[]
+
+ Returns:
+ tuple containing:
+ options: command line options
+ args: command lin arguments
+ """
+ epilog = """ [list of target/arch/cpu/board/vendor/soc to build]
Build U-Boot for all commits in a branch. Use -n to do a dry run"""
+ parser = argparse.ArgumentParser(epilog=epilog)
+ add_upto_m(parser)
+ add_after_m(parser)
+ parser.add_argument('terms', type=str, nargs='*',
+ help='Board / SoC names to build')
+
return parser.parse_args()
diff --git a/tools/buildman/control.py b/tools/buildman/control.py
index 09a11f2..8a29534 100644
--- a/tools/buildman/control.py
+++ b/tools/buildman/control.py
@@ -2,15 +2,14 @@
# Copyright (c) 2013 The Chromium OS Authors.
#
+"""Control module for buildman
+
+This holds the main control logic for buildman, when not running tests.
+"""
+
import multiprocessing
-try:
- import importlib.resources
-except ImportError:
- # for Python 3.6
- import importlib_resources
import os
import shutil
-import subprocess
import sys
from buildman import boards
@@ -22,34 +21,58 @@ from patman import gitutil
from patman import patchstream
from u_boot_pylib import command
from u_boot_pylib import terminal
-from u_boot_pylib import tools
from u_boot_pylib.terminal import tprint
-def GetPlural(count):
+TEST_BUILDER = None
+
+def get_plural(count):
"""Returns a plural 's' if count is not 1"""
return 's' if count != 1 else ''
-def GetActionSummary(is_summary, commits, selected, options):
- """Return a string summarising the intended action.
+
+def count_build_commits(commits, step):
+ """Calculate the number of commits to be built
+
+ Args:
+ commits (list of Commit): Commits to build or None
+ step (int): Step value for commits, typically 1
Returns:
- Summary string.
+ Number of commits that will be built
"""
if commits:
count = len(commits)
- count = (count + options.step - 1) // options.step
- commit_str = '%d commit%s' % (count, GetPlural(count))
+ return (count + step - 1) // step
+ return 0
+
+
+def get_action_summary(is_summary, commit_count, selected, threads, jobs):
+ """Return a string summarising the intended action.
+
+ Args:
+ is_summary (bool): True if this is a summary (otherwise it is building)
+ commits (list): List of commits being built
+ selected (list of Board): List of Board objects that are marked
+ step (int): Step increment through commits
+ threads (int): Number of processor threads being used
+ jobs (int): Number of jobs to build at once
+
+ Returns:
+ Summary string.
+ """
+ if commit_count:
+ commit_str = f'{commit_count} commit{get_plural(commit_count)}'
else:
commit_str = 'current source'
- str = '%s %s for %d boards' % (
- 'Summary of' if is_summary else 'Building', commit_str,
- len(selected))
- str += ' (%d thread%s, %d job%s per thread)' % (options.threads,
- GetPlural(options.threads), options.jobs, GetPlural(options.jobs))
- return str
-
-def ShowActions(series, why_selected, boards_selected, builder, options,
- board_warnings):
+ msg = (f"{'Summary of' if is_summary else 'Building'} "
+ f'{commit_str} for {len(selected)} boards')
+ msg += (f' ({threads} thread{get_plural(threads)}, '
+ f'{jobs} job{get_plural(jobs)} per thread)')
+ return msg
+
+# pylint: disable=R0913
+def show_actions(series, why_selected, boards_selected, output_dir,
+ board_warnings, step, threads, jobs, verbose):
"""Display a list of actions that we would take, if not a dry run.
Args:
@@ -61,9 +84,12 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
the value would be a list of board names.
boards_selected: Dict of selected boards, key is target name,
value is Board object
- builder: The builder that will be used to build the commits
- options: Command line options object
+ output_dir (str): Output directory for builder
board_warnings: List of warnings obtained from board selected
+ step (int): Step increment through commits
+ threads (int): Number of processor threads being used
+ jobs (int): Number of jobs to build at once
+ verbose (bool): True to indicate why each board was selected
"""
col = terminal.Color()
print('Dry run, so not doing much. But I would do this:')
@@ -72,27 +98,27 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
commits = series.commits
else:
commits = None
- print(GetActionSummary(False, commits, boards_selected,
- options))
- print('Build directory: %s' % builder.base_dir)
+ print(get_action_summary(False, count_build_commits(commits, step),
+ boards_selected, threads, jobs))
+ print(f'Build directory: {output_dir}')
if commits:
- for upto in range(0, len(series.commits), options.step):
+ for upto in range(0, len(series.commits), step):
commit = series.commits[upto]
print(' ', col.build(col.YELLOW, commit.hash[:8], bright=False), end=' ')
print(commit.subject)
print()
for arg in why_selected:
if arg != 'all':
- print(arg, ': %d boards' % len(why_selected[arg]))
- if options.verbose:
- print(' %s' % ' '.join(why_selected[arg]))
- print(('Total boards to build for each commit: %d\n' %
- len(why_selected['all'])))
+ print(arg, f': {len(why_selected[arg])} boards')
+ if verbose:
+ print(f" {' '.join(why_selected[arg])}")
+ print('Total boards to build for each '
+ f"commit: {len(why_selected['all'])}\n")
if board_warnings:
for warning in board_warnings:
print(col.build(col.YELLOW, warning))
-def ShowToolchainPrefix(brds, toolchains):
+def show_toolchain_prefix(brds, toolchains):
"""Show information about a the tool chain used by one or more boards
The function checks that all boards use the same toolchain, then prints
@@ -110,15 +136,48 @@ def ShowToolchainPrefix(brds, toolchains):
for brd in board_selected.values():
tc_set.add(toolchains.Select(brd.arch))
if len(tc_set) != 1:
- return 'Supplied boards must share one toolchain'
- return False
- tc = tc_set.pop()
- print(tc.GetEnvArgs(toolchain.VAR_CROSS_COMPILE))
- return None
+ sys.exit('Supplied boards must share one toolchain')
+ tchain = tc_set.pop()
+ print(tchain.GetEnvArgs(toolchain.VAR_CROSS_COMPILE))
+
+def show_arch(brds):
+ """Show information about a the architecture used by one or more boards
+
+ The function checks that all boards use the same architecture, then prints
+ the correct value for ARCH.
+
+ Args:
+ boards: Boards object containing selected boards
+
+ Return:
+ None on success, string error message otherwise
+ """
+ board_selected = brds.get_selected_dict()
+ arch_set = set()
+ for brd in board_selected.values():
+ arch_set.add(brd.arch)
+ if len(arch_set) != 1:
+ sys.exit('Supplied boards must share one arch')
+ print(arch_set.pop())
def get_allow_missing(opt_allow, opt_no_allow, num_selected, has_branch):
+ """Figure out whether to allow external blobs
+
+ Uses the allow-missing setting and the provided arguments to decide whether
+ missing external blobs should be allowed
+
+ Args:
+ opt_allow (bool): True if --allow-missing flag is set
+ opt_no_allow (bool): True if --no-allow-missing flag is set
+ num_selected (int): Number of selected board
+ has_branch (bool): True if a git branch (to build) has been provided
+
+ Returns:
+ bool: True to allow missing external blobs, False to produce an error if
+ external blobs are used
+ """
allow_missing = False
- am_setting = bsettings.GetGlobalItemValue('allow-missing')
+ am_setting = bsettings.get_global_item_value('allow-missing')
if am_setting:
if am_setting == 'always':
allow_missing = True
@@ -133,142 +192,82 @@ def get_allow_missing(opt_allow, opt_no_allow, num_selected, has_branch):
allow_missing = False
return allow_missing
-def DoBuildman(options, args, toolchains=None, make_func=None, brds=None,
- clean_dir=False, test_thread_exceptions=False):
- """The main control code for buildman
- Args:
- options: Command line options object
- args: Command line arguments (list of strings)
- toolchains: Toolchains to use - this should be a Toolchains()
- object. If None, then it will be created and scanned
- make_func: Make function to use for the builder. This is called
- to execute 'make'. If this is None, the normal function
- will be used, which calls the 'make' tool with suitable
- arguments. This setting is useful for tests.
- brds: Boards() object to use, containing a list of available
- boards. If this is None it will be created and scanned.
- clean_dir: Used for tests only, indicates that the existing output_dir
- should be removed before starting the build
- test_thread_exceptions: Uses for tests only, True to make the threads
- raise an exception instead of reporting their result. This simulates
- a failure in the code somewhere
- """
- global builder
+def count_commits(branch, count, col, git_dir):
+ """Could the number of commits in the branch/ranch being built
- if options.full_help:
- with importlib.resources.path('buildman', 'README.rst') as readme:
- tools.print_full_help(str(readme))
- return 0
-
- gitutil.setup()
- col = terminal.Color()
-
- options.git_dir = os.path.join(options.git, '.git')
-
- no_toolchains = toolchains is None
- if no_toolchains:
- toolchains = toolchain.Toolchains(options.override_toolchain)
-
- if options.fetch_arch:
- if options.fetch_arch == 'list':
- sorted_list = toolchains.ListArchs()
- print(col.build(col.BLUE, 'Available architectures: %s\n' %
- ' '.join(sorted_list)))
- return 0
- else:
- fetch_arch = options.fetch_arch
- if fetch_arch == 'all':
- fetch_arch = ','.join(toolchains.ListArchs())
- print(col.build(col.CYAN, '\nDownloading toolchains: %s' %
- fetch_arch))
- for arch in fetch_arch.split(','):
- print()
- ret = toolchains.FetchAndInstall(arch)
- if ret:
- return ret
- return 0
-
- if no_toolchains:
- toolchains.GetSettings()
- toolchains.Scan(options.list_tool_chains and options.verbose)
- if options.list_tool_chains:
- toolchains.List()
- print()
- return 0
-
- if not options.output_dir:
- if options.work_in_output:
- sys.exit(col.build(col.RED, '-w requires that you specify -o'))
- options.output_dir = '..'
-
- # Work out what subset of the boards we are building
- if not brds:
- if not os.path.exists(options.output_dir):
- os.makedirs(options.output_dir)
- board_file = os.path.join(options.output_dir, 'boards.cfg')
-
- brds = boards.Boards()
- ok = brds.ensure_board_list(board_file,
- options.threads or multiprocessing.cpu_count(),
- force=options.regen_board_list,
- quiet=not options.verbose)
- if options.regen_board_list:
- return 0 if ok else 2
- brds.read_boards(board_file)
-
- exclude = []
- if options.exclude:
- for arg in options.exclude:
- exclude += arg.split(',')
-
- if options.boards:
- requested_boards = []
- for b in options.boards:
- requested_boards += b.split(',')
- else:
- requested_boards = None
- why_selected, board_warnings = brds.select_boards(args, exclude,
- requested_boards)
- selected = brds.get_selected()
- if not len(selected):
- sys.exit(col.build(col.RED, 'No matching boards found'))
-
- if options.print_prefix:
- err = ShowToolchainPrefix(brds, toolchains)
- if err:
- sys.exit(col.build(col.RED, err))
- return 0
+ Args:
+ branch (str): Name of branch to build, or None if none
+ count (int): Number of commits to build, or -1 for all
+ col (Terminal.Color): Color object to use
+ git_dir (str): Git directory to use, e.g. './.git'
- # Work out how many commits to build. We want to build everything on the
- # branch. We also build the upstream commit as a control so we can see
- # problems introduced by the first commit on the branch.
- count = options.count
- has_range = options.branch and '..' in options.branch
+ Returns:
+ tuple:
+ Number of commits being built
+ True if the 'branch' string contains a range rather than a simple
+ name
+ """
+ has_range = branch and '..' in branch
if count == -1:
- if not options.branch:
+ if not branch:
count = 1
else:
if has_range:
- count, msg = gitutil.count_commits_in_range(options.git_dir,
- options.branch)
+ count, msg = gitutil.count_commits_in_range(git_dir, branch)
else:
- count, msg = gitutil.count_commits_in_branch(options.git_dir,
- options.branch)
+ count, msg = gitutil.count_commits_in_branch(git_dir, branch)
if count is None:
sys.exit(col.build(col.RED, msg))
elif count == 0:
- sys.exit(col.build(col.RED, "Range '%s' has no commits" %
- options.branch))
+ sys.exit(col.build(col.RED,
+ f"Range '{branch}' has no commits"))
if msg:
print(col.build(col.YELLOW, msg))
count += 1 # Build upstream commit also
if not count:
- msg = ("No commits found to process in branch '%s': "
- "set branch's upstream or use -c flag" % options.branch)
+ msg = (f"No commits found to process in branch '{branch}': "
+ "set branch's upstream or use -c flag")
sys.exit(col.build(col.RED, msg))
- if options.work_in_output:
+ return count, has_range
+
+
+def determine_series(selected, col, git_dir, count, branch, work_in_output):
+ """Determine the series which is to be built, if any
+
+ If there is a series, the commits in that series are numbered by setting
+ their sequence value (starting from 0). This is used by tests.
+
+ Args:
+ selected (list of Board): List of Board objects that are marked
+ selected
+ col (Terminal.Color): Color object to use
+ git_dir (str): Git directory to use, e.g. './.git'
+ count (int): Number of commits in branch
+ branch (str): Name of branch to build, or None if none
+ work_in_output (bool): True to work in the output directory
+
+ Returns:
+ Series: Series to build, or None for none
+
+ Read the metadata from the commits. First look at the upstream commit,
+ then the ones in the branch. We would like to do something like
+ upstream/master~..branch but that isn't possible if upstream/master is
+ a merge commit (it will list all the commits that form part of the
+ merge)
+
+ Conflicting tags are not a problem for buildman, since it does not use
+ them. For example, Series-version is not useful for buildman. On the
+ other hand conflicting tags will cause an error. So allow later tags
+ to overwrite earlier ones by setting allow_overwrite=True
+ """
+
+ # Work out how many commits to build. We want to build everything on the
+ # branch. We also build the upstream commit as a control so we can see
+ # problems introduced by the first commit on the branch.
+ count, has_range = count_commits(branch, count, col, git_dir)
+ if work_in_output:
if len(selected) != 1:
sys.exit(col.build(col.RED,
'-w can only be used with a single board'))
@@ -276,141 +275,390 @@ def DoBuildman(options, args, toolchains=None, make_func=None, brds=None,
sys.exit(col.build(col.RED,
'-w can only be used with a single commit'))
- # Read the metadata from the commits. First look at the upstream commit,
- # then the ones in the branch. We would like to do something like
- # upstream/master~..branch but that isn't possible if upstream/master is
- # a merge commit (it will list all the commits that form part of the
- # merge)
- # Conflicting tags are not a problem for buildman, since it does not use
- # them. For example, Series-version is not useful for buildman. On the
- # other hand conflicting tags will cause an error. So allow later tags
- # to overwrite earlier ones by setting allow_overwrite=True
- if options.branch:
+ if branch:
if count == -1:
if has_range:
- range_expr = options.branch
+ range_expr = branch
else:
- range_expr = gitutil.get_range_in_branch(options.git_dir,
- options.branch)
- upstream_commit = gitutil.get_upstream(options.git_dir,
- options.branch)
+ range_expr = gitutil.get_range_in_branch(git_dir, branch)
+ upstream_commit = gitutil.get_upstream(git_dir, branch)
series = patchstream.get_metadata_for_list(upstream_commit,
- options.git_dir, 1, series=None, allow_overwrite=True)
+ git_dir, 1, series=None, allow_overwrite=True)
series = patchstream.get_metadata_for_list(range_expr,
- options.git_dir, None, series, allow_overwrite=True)
+ git_dir, None, series, allow_overwrite=True)
else:
# Honour the count
- series = patchstream.get_metadata_for_list(options.branch,
- options.git_dir, count, series=None, allow_overwrite=True)
+ series = patchstream.get_metadata_for_list(branch,
+ git_dir, count, series=None, allow_overwrite=True)
+
+ # Number the commits for test purposes
+ for i, commit in enumerate(series.commits):
+ commit.sequence = i
else:
series = None
- if not options.dry_run:
- options.verbose = True
- if not options.summary:
- options.show_errors = True
+ return series
+
+
+def do_fetch_arch(toolchains, col, fetch_arch):
+ """Handle the --fetch-arch option
+
+ Args:
+ toolchains (Toolchains): Tool chains to use
+ col (terminal.Color): Color object to build
+ fetch_arch (str): Argument passed to the --fetch-arch option
+
+ Returns:
+ int: Return code for buildman
+ """
+ if fetch_arch == 'list':
+ sorted_list = toolchains.ListArchs()
+ print(col.build(
+ col.BLUE,
+ f"Available architectures: {' '.join(sorted_list)}\n"))
+ return 0
+
+ if fetch_arch == 'all':
+ fetch_arch = ','.join(toolchains.ListArchs())
+ print(col.build(col.CYAN,
+ f'\nDownloading toolchains: {fetch_arch}'))
+ for arch in fetch_arch.split(','):
+ print()
+ ret = toolchains.FetchAndInstall(arch)
+ if ret:
+ return ret
+ return 0
+
+
+def get_toolchains(toolchains, col, override_toolchain, fetch_arch,
+ list_tool_chains, verbose):
+ """Get toolchains object to use
+
+ Args:
+ toolchains (Toolchains or None): Toolchains to use. If None, then a
+ Toolchains object will be created and scanned
+ col (Terminal.Color): Color object
+ override_toolchain (str or None): Override value for toolchain, or None
+ fetch_arch (bool): True to fetch the toolchain for the architectures
+ list_tool_chains (bool): True to list all tool chains
+ verbose (bool): True for verbose output when listing toolchains
+
+Returns:
+ Either:
+ int: Operation completed and buildman should exit with exit code
+ Toolchains: Toolchains object to use
+ """
+ no_toolchains = toolchains is None
+ if no_toolchains:
+ toolchains = toolchain.Toolchains(override_toolchain)
+
+ if fetch_arch:
+ return do_fetch_arch(toolchains, col, fetch_arch)
+
+ if no_toolchains:
+ toolchains.GetSettings()
+ toolchains.Scan(list_tool_chains and verbose)
+ if list_tool_chains:
+ toolchains.List()
+ print()
+ return 0
+ return toolchains
+
+
+def get_boards_obj(output_dir, regen_board_list, threads, verbose):
+ """Object the Boards object to use
+
+ Creates the output directory and ensures there is a boards.cfg file, then
+ read it in.
+
+ Args:
+ output_dir (str): Output directory to use
+ regen_board_list (bool): True to just regenerate the board list
+ threads (int or None): Number of threads to use to create boards file
+ verbose (bool): False to suppress output from boards-file generation
+
+ Returns:
+ Either:
+ int: Operation completed and buildman should exit with exit code
+ Boards: Boards object to use
+ """
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ board_file = os.path.join(output_dir, 'boards.cfg')
+ if regen_board_list and regen_board_list != '-':
+ board_file = regen_board_list
+
+ brds = boards.Boards()
+ okay = brds.ensure_board_list(
+ board_file,
+ threads or multiprocessing.cpu_count(),
+ force=regen_board_list,
+ quiet=not verbose)
+ if regen_board_list:
+ return 0 if okay else 2
+ brds.read_boards(board_file)
+ return brds
+
+
+def determine_boards(brds, args, col, opt_boards, exclude):
+ """Determine which boards to build
+
+ Each element of args and exclude can refer to a board name, arch or SoC
+
+ Args:
+ brds (Boards): Boards object
+ args (list of str): Arguments describing boards to build
+ col (Terminal.Color): Color object
+ opt_boards (list of str): Specific boards to build, or None for all
+ exclude (list of str): Arguments describing boards to exclude
+
+ Returns:
+ tuple:
+ list of Board: List of Board objects that are marked selected
+ why_selected: Dictionary where each key is a buildman argument
+ provided by the user, and the value is the list of boards
+ brought in by that argument. For example, 'arm' might bring
+ in 400 boards, so in this case the key would be 'arm' and
+ the value would be a list of board names.
+ board_warnings: List of warnings obtained from board selected
+ """
+ exclude = []
+ if exclude:
+ for arg in exclude:
+ exclude += arg.split(',')
+
+ if opt_boards:
+ requested_boards = []
+ for brd in opt_boards:
+ requested_boards += brd.split(',')
+ else:
+ requested_boards = None
+ why_selected, board_warnings = brds.select_boards(args, exclude,
+ requested_boards)
+ selected = brds.get_selected()
+ if not selected:
+ sys.exit(col.build(col.RED, 'No matching boards found'))
+ return selected, why_selected, board_warnings
+
+
+def adjust_args(args, series, selected):
+ """Adjust arguments according to various constraints
+
+ Updates verbose, show_errors, threads, jobs and step
+
+ Args:
+ args (Namespace): Namespace object to adjust
+ series (Series): Series being built / summarised
+ selected (list of Board): List of Board objects that are marked
+ """
+ if not series and not args.dry_run:
+ args.verbose = True
+ if not args.summary:
+ args.show_errors = True
# By default we have one thread per CPU. But if there are not enough jobs
# we can have fewer threads and use a high '-j' value for make.
- if options.threads is None:
- options.threads = min(multiprocessing.cpu_count(), len(selected))
- if not options.jobs:
- options.jobs = max(1, (multiprocessing.cpu_count() +
+ if args.threads is None:
+ args.threads = min(multiprocessing.cpu_count(), len(selected))
+ if not args.jobs:
+ args.jobs = max(1, (multiprocessing.cpu_count() +
len(selected) - 1) // len(selected))
- if not options.step:
- options.step = len(series.commits) - 1
+ if not args.step:
+ args.step = len(series.commits) - 1
+
+ # We can't show function sizes without board details at present
+ if args.show_bloat:
+ args.show_detail = True
- gnu_make = command.output(os.path.join(options.git,
- 'scripts/show-gnu-make'), raise_on_error=False).rstrip()
- if not gnu_make:
- sys.exit('GNU Make not found')
- allow_missing = get_allow_missing(options.allow_missing,
- options.no_allow_missing, len(selected),
- options.branch)
+def setup_output_dir(output_dir, work_in_output, branch, no_subdirs, col,
+ clean_dir):
+ """Set up the output directory
+
+ Args:
+ output_dir (str): Output directory provided by the user, or None if none
+ work_in_output (bool): True to work in the output directory
+ branch (str): Name of branch to build, or None if none
+ no_subdirs (bool): True to put the output in the top-level output dir
+ clean_dir: Used for tests only, indicates that the existing output_dir
+ should be removed before starting the build
- # Create a new builder with the selected options.
- output_dir = options.output_dir
- if options.branch:
- dirname = options.branch.replace('/', '_')
+ Returns:
+ str: Updated output directory pathname
+ """
+ if not output_dir:
+ if work_in_output:
+ sys.exit(col.build(col.RED, '-w requires that you specify -o'))
+ output_dir = '..'
+ if branch and not no_subdirs:
# As a special case allow the board directory to be placed in the
# output directory itself rather than any subdirectory.
- if not options.no_subdirs:
- output_dir = os.path.join(options.output_dir, dirname)
+ dirname = branch.replace('/', '_')
+ output_dir = os.path.join(output_dir, dirname)
if clean_dir and os.path.exists(output_dir):
shutil.rmtree(output_dir)
- adjust_cfg = cfgutil.convert_list_to_dict(options.adjust_cfg)
+ return output_dir
+
+
+def run_builder(builder, commits, board_selected, args):
+ """Run the builder or show the summary
+
+ Args:
+ commits (list of Commit): List of commits being built, None if no branch
+ boards_selected (dict): Dict of selected boards:
+ key: target name
+ value: Board object
+ args (Namespace): Namespace to use
+
+ Returns:
+ int: Return code for buildman
+ """
+ gnu_make = command.output(os.path.join(args.git,
+ 'scripts/show-gnu-make'), raise_on_error=False).rstrip()
+ if not gnu_make:
+ sys.exit('GNU Make not found')
+ builder.gnu_make = gnu_make
+
+ if not args.ide:
+ commit_count = count_build_commits(commits, args.step)
+ tprint(get_action_summary(args.summary, commit_count, board_selected,
+ args.threads, args.jobs))
+
+ builder.set_display_options(
+ args.show_errors, args.show_sizes, args.show_detail, args.show_bloat,
+ args.list_error_boards, args.show_config, args.show_environment,
+ args.filter_dtb_warnings, args.filter_migration_warnings, args.ide)
+ if args.summary:
+ builder.show_summary(commits, board_selected)
+ else:
+ fail, warned, excs = builder.build_boards(
+ commits, board_selected, args.keep_outputs, args.verbose)
+ if excs:
+ return 102
+ if fail:
+ return 100
+ if warned and not args.ignore_warnings:
+ return 101
+ return 0
+
+
+def calc_adjust_cfg(adjust_cfg, reproducible_builds):
+ """Calculate the value to use for adjust_cfg
+
+ Args:
+ adjust_cfg (list of str): List of configuration changes. See cfgutil for
+ details
+ reproducible_builds (bool): True to adjust the configuration to get
+ reproduceable builds
+
+ Returns:
+ adjust_cfg (list of str): List of configuration changes
+ """
+ adjust_cfg = cfgutil.convert_list_to_dict(adjust_cfg)
# Drop LOCALVERSION_AUTO since it changes the version string on every commit
- if options.reproducible_builds:
+ if reproducible_builds:
# If these are mentioned, leave the local version alone
if 'LOCALVERSION' in adjust_cfg or 'LOCALVERSION_AUTO' in adjust_cfg:
print('Not dropping LOCALVERSION_AUTO for reproducible build')
else:
adjust_cfg['LOCALVERSION_AUTO'] = '~'
+ return adjust_cfg
- builder = Builder(toolchains, output_dir, options.git_dir,
- options.threads, options.jobs, gnu_make=gnu_make, checkout=True,
- show_unknown=options.show_unknown, step=options.step,
- no_subdirs=options.no_subdirs, full_path=options.full_path,
- verbose_build=options.verbose_build,
- mrproper=options.mrproper,
- per_board_out_dir=options.per_board_out_dir,
- config_only=options.config_only,
- squash_config_y=not options.preserve_config_y,
- warnings_as_errors=options.warnings_as_errors,
- work_in_output=options.work_in_output,
- test_thread_exceptions=test_thread_exceptions,
- adjust_cfg=adjust_cfg,
- allow_missing=allow_missing, no_lto=options.no_lto,
- reproducible_builds=options.reproducible_builds)
- builder.force_config_on_failure = not options.quick
- if make_func:
- builder.do_make = make_func
+
+def do_buildman(args, toolchains=None, make_func=None, brds=None,
+ clean_dir=False, test_thread_exceptions=False):
+ """The main control code for buildman
+
+ Args:
+ args: ArgumentParser object
+ args: Command line arguments (list of strings)
+ toolchains: Toolchains to use - this should be a Toolchains()
+ object. If None, then it will be created and scanned
+ make_func: Make function to use for the builder. This is called
+ to execute 'make'. If this is None, the normal function
+ will be used, which calls the 'make' tool with suitable
+ arguments. This setting is useful for tests.
+ brds: Boards() object to use, containing a list of available
+ boards. If this is None it will be created and scanned.
+ clean_dir: Used for tests only, indicates that the existing output_dir
+ should be removed before starting the build
+ test_thread_exceptions: Uses for tests only, True to make the threads
+ raise an exception instead of reporting their result. This simulates
+ a failure in the code somewhere
+ """
+ # Used so testing can obtain the builder: pylint: disable=W0603
+ global TEST_BUILDER
+
+ gitutil.setup()
+ col = terminal.Color()
+
+ git_dir = os.path.join(args.git, '.git')
+
+ toolchains = get_toolchains(toolchains, col, args.override_toolchain,
+ args.fetch_arch, args.list_tool_chains,
+ args.verbose)
+ output_dir = setup_output_dir(
+ args.output_dir, args.work_in_output, args.branch,
+ args.no_subdirs, col, clean_dir)
+
+ # Work out what subset of the boards we are building
+ if not brds:
+ brds = get_boards_obj(output_dir, args.regen_board_list,
+ args.threads, args.verbose)
+ if isinstance(brds, int):
+ return brds
+
+ selected, why_selected, board_warnings = determine_boards(
+ brds, args.terms, col, args.boards, args.exclude)
+
+ if args.print_prefix:
+ show_toolchain_prefix(brds, toolchains)
+ return 0
+
+ if args.print_arch:
+ show_arch(brds)
+ return 0
+
+ series = determine_series(selected, col, git_dir, args.count,
+ args.branch, args.work_in_output)
+
+ adjust_args(args, series, selected)
# For a dry run, just show our actions as a sanity check
- if options.dry_run:
- ShowActions(series, why_selected, selected, builder, options,
- board_warnings)
- else:
- builder.force_build = options.force_build
- builder.force_build_failures = options.force_build_failures
- builder.force_reconfig = options.force_reconfig
- builder.in_tree = options.in_tree
-
- # Work out which boards to build
- board_selected = brds.get_selected_dict()
-
- if series:
- commits = series.commits
- # Number the commits for test purposes
- for commit in range(len(commits)):
- commits[commit].sequence = commit
- else:
- commits = None
-
- if not options.ide:
- tprint(GetActionSummary(options.summary, commits, board_selected,
- options))
-
- # We can't show function sizes without board details at present
- if options.show_bloat:
- options.show_detail = True
- builder.SetDisplayOptions(
- options.show_errors, options.show_sizes, options.show_detail,
- options.show_bloat, options.list_error_boards, options.show_config,
- options.show_environment, options.filter_dtb_warnings,
- options.filter_migration_warnings, options.ide)
- if options.summary:
- builder.ShowSummary(commits, board_selected)
- else:
- fail, warned, excs = builder.BuildBoards(
- commits, board_selected, options.keep_outputs, options.verbose)
- if excs:
- return 102
- elif fail:
- return 100
- elif warned and not options.ignore_warnings:
- return 101
- return 0
+ if args.dry_run:
+ show_actions(series, why_selected, selected, output_dir, board_warnings,
+ args.step, args.threads, args.jobs,
+ args.verbose)
+ return 0
+
+ # Create a new builder with the selected args
+ builder = Builder(toolchains, output_dir, git_dir,
+ args.threads, args.jobs, checkout=True,
+ show_unknown=args.show_unknown, step=args.step,
+ no_subdirs=args.no_subdirs, full_path=args.full_path,
+ verbose_build=args.verbose_build,
+ mrproper=args.mrproper,
+ per_board_out_dir=args.per_board_out_dir,
+ config_only=args.config_only,
+ squash_config_y=not args.preserve_config_y,
+ warnings_as_errors=args.warnings_as_errors,
+ work_in_output=args.work_in_output,
+ test_thread_exceptions=test_thread_exceptions,
+ adjust_cfg=calc_adjust_cfg(args.adjust_cfg,
+ args.reproducible_builds),
+ allow_missing=get_allow_missing(args.allow_missing,
+ args.no_allow_missing,
+ len(selected), args.branch),
+ no_lto=args.no_lto,
+ reproducible_builds=args.reproducible_builds,
+ force_build = args.force_build,
+ force_build_failures = args.force_build_failures,
+ force_reconfig = args.force_reconfig, in_tree = args.in_tree,
+ force_config_on_failure=not args.quick, make_func=make_func)
+
+ TEST_BUILDER = builder
+
+ return run_builder(builder, series.commits if series else None,
+ brds.get_selected_dict(), args)
diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py
index ebd78f2..d89bde1 100644
--- a/tools/buildman/func_test.py
+++ b/tools/buildman/func_test.py
@@ -184,8 +184,8 @@ class TestFunctional(unittest.TestCase):
self._buildman_pathname = sys.argv[0]
self._buildman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
command.test_result = self._HandleCommand
- bsettings.Setup(None)
- bsettings.AddFile(settings_data)
+ bsettings.setup(None)
+ bsettings.add_file(settings_data)
self.setupToolchains()
self._toolchains.Add('arm-gcc', test=False)
self._toolchains.Add('powerpc-gcc', test=False)
@@ -225,29 +225,34 @@ class TestFunctional(unittest.TestCase):
return command.run_pipe([[self._buildman_pathname] + list(args)],
capture=True, capture_stderr=True)
- def _RunControl(self, *args, brds=None, clean_dir=False,
- test_thread_exceptions=False):
+ def _RunControl(self, *args, brds=False, clean_dir=False,
+ test_thread_exceptions=False, get_builder=True):
"""Run buildman
Args:
args: List of arguments to pass
- brds: Boards object
+ brds: Boards object, or False to pass self._boards, or None to pass
+ None
clean_dir: Used for tests only, indicates that the existing output_dir
should be removed before starting the build
test_thread_exceptions: Uses for tests only, True to make the threads
raise an exception instead of reporting their result. This simulates
a failure in the code somewhere
+ get_builder (bool): Set self._builder to the resulting builder
Returns:
result code from buildman
"""
sys.argv = [sys.argv[0]] + list(args)
- options, args = cmdline.ParseArgs()
- result = control.DoBuildman(options, args, toolchains=self._toolchains,
- make_func=self._HandleMake, brds=brds or self._boards,
- clean_dir=clean_dir,
- test_thread_exceptions=test_thread_exceptions)
- self._builder = control.builder
+ args = cmdline.parse_args()
+ if brds == False:
+ brds = self._boards
+ result = control.do_buildman(
+ args, toolchains=self._toolchains, make_func=self._HandleMake,
+ brds=brds, clean_dir=clean_dir,
+ test_thread_exceptions=test_thread_exceptions)
+ if get_builder:
+ self._builder = control.TEST_BUILDER
return result
def testFullHelp(self):
@@ -496,10 +501,12 @@ Some images are invalid'''
for commit in range(self._commits):
for brd in self._boards.get_list():
if brd.arch != 'sandbox':
- errfile = self._builder.GetErrFile(commit, brd.target)
+ errfile = self._builder.get_err_file(commit, brd.target)
fd = open(errfile)
- self.assertEqual(fd.readlines(),
- ['No tool chain for %s\n' % brd.arch])
+ self.assertEqual(
+ fd.readlines(),
+ [f'Tool chain error for {brd.arch}: '
+ f"No tool chain found for arch '{brd.arch}'"])
fd.close()
def testBranch(self):
@@ -686,7 +693,7 @@ Some images are invalid'''
def testBlobSettingsAlways(self):
"""Test the 'always' policy"""
- bsettings.SetItem('global', 'allow-missing', 'always')
+ bsettings.set_item('global', 'allow-missing', 'always')
self.assertEqual(True,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(False,
@@ -694,7 +701,7 @@ Some images are invalid'''
def testBlobSettingsBranch(self):
"""Test the 'branch' policy"""
- bsettings.SetItem('global', 'allow-missing', 'branch')
+ bsettings.set_item('global', 'allow-missing', 'branch')
self.assertEqual(False,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(True,
@@ -704,7 +711,7 @@ Some images are invalid'''
def testBlobSettingsMultiple(self):
"""Test the 'multiple' policy"""
- bsettings.SetItem('global', 'allow-missing', 'multiple')
+ bsettings.set_item('global', 'allow-missing', 'multiple')
self.assertEqual(False,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(True,
@@ -714,7 +721,7 @@ Some images are invalid'''
def testBlobSettingsBranchMultiple(self):
"""Test the 'branch multiple' policy"""
- bsettings.SetItem('global', 'allow-missing', 'branch multiple')
+ bsettings.set_item('global', 'allow-missing', 'branch multiple')
self.assertEqual(False,
control.get_allow_missing(False, False, 1, False))
self.assertEqual(True,
@@ -779,3 +786,36 @@ Some images are invalid'''
CONFIG_LOCALVERSION=y
''', cfg_data)
self.assertIn('Not dropping LOCALVERSION_AUTO', stdout.getvalue())
+
+ def test_print_prefix(self):
+ """Test that we can print the toolchain prefix"""
+ with test_util.capture_sys_output() as (stdout, stderr):
+ result = self._RunControl('-A', 'board0')
+ self.assertEqual('arm-\n', stdout.getvalue())
+ self.assertEqual('', stderr.getvalue())
+
+ def test_regen_boards(self):
+ """Test that we can regenerate the boards.cfg file"""
+ outfile = os.path.join(self._output_dir, 'test-boards.cfg')
+ if os.path.exists(outfile):
+ os.remove(outfile)
+ result = self._RunControl('-R', outfile, brds=None, get_builder=False)
+ self.assertTrue(os.path.exists(outfile))
+
+ def test_single_boards(self):
+ """Test building single boards"""
+ self._RunControl('--boards', 'board1')
+ self.assertEqual(1, self._builder.count)
+
+ self._RunControl('--boards', 'board1', '--boards', 'board2')
+ self.assertEqual(2, self._builder.count)
+
+ self._RunControl('--boards', 'board1,board2', '--boards', 'board4')
+ self.assertEqual(3, self._builder.count)
+
+ def test_print_arch(self):
+ """Test that we can print the board architecture"""
+ with test_util.capture_sys_output() as (stdout, stderr):
+ result = self._RunControl('--print-arch', 'board0')
+ self.assertEqual('arm\n', stdout.getvalue())
+ self.assertEqual('', stderr.getvalue())
diff --git a/tools/buildman/main.py b/tools/buildman/main.py
index 5e1f68d..cb3d0b4 100755
--- a/tools/buildman/main.py
+++ b/tools/buildman/main.py
@@ -6,60 +6,88 @@
"""See README for more information"""
-import doctest
-import multiprocessing
+try:
+ from importlib.resources import files
+except ImportError:
+ # for Python 3.6
+ import importlib_resources
import os
-import re
import sys
# Bring in the patman libraries
+# pylint: disable=C0413
our_path = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(1, os.path.join(our_path, '..'))
# Our modules
-from buildman import board
from buildman import bsettings
-from buildman import builder
from buildman import cmdline
from buildman import control
-from buildman import toolchain
-from patman import patchstream
-from patman import gitutil
-from u_boot_pylib import terminal
from u_boot_pylib import test_util
+from u_boot_pylib import tools
-def RunTests(skip_net_tests, verboose, args):
+def run_tests(skip_net_tests, verbose, args):
+ """Run the buildman tests
+
+ Args:
+ skip_net_tests (bool): True to skip tests which need the network
+ verbosity (int): Verbosity level to use (0-4)
+ args (list of str): List of tests to run, empty to run all
+ """
+ # These imports are here since tests are not available when buildman is
+ # installed as a Python module
+ # pylint: disable=C0415
from buildman import func_test
from buildman import test
- import doctest
- test_name = args and args[0] or None
+ test_name = args.terms and args.terms[0] or None
if skip_net_tests:
test.use_network = False
# Run the entry tests first ,since these need to be the first to import the
# 'entry' module.
result = test_util.run_test_suites(
- 'buildman', False, verboose, False, None, test_name, [],
+ 'buildman', False, verbose, False, args.threads, test_name, [],
[test.TestBuild, func_test.TestFunctional,
'buildman.toolchain', 'patman.gitutil'])
return (0 if result.wasSuccessful() else 1)
+def run_test_coverage():
+ """Run the tests and check that we get 100% coverage"""
+ test_util.run_test_coverage(
+ 'tools/buildman/buildman', None,
+ ['tools/patman/*.py', 'tools/u_boot_pylib/*', '*test_fdt.py',
+ 'tools/buildman/kconfiglib.py', 'tools/buildman/*test*.py',
+ 'tools/buildman/main.py'],
+ '/tmp/b', single_thread='-T1')
+
+
def run_buildman():
- options, args = cmdline.ParseArgs()
+ """Run bulidman
- if not options.debug:
+ This is the main program. It collects arguments and runs either the tests or
+ the control module.
+ """
+ args = cmdline.parse_args()
+
+ if not args.debug:
sys.tracebacklimit = 0
# Run our meagre tests
- if cmdline.HAS_TESTS and options.test:
- RunTests(options.skip_net_tests, options.verbose, args)
+ if cmdline.HAS_TESTS and args.test:
+ run_tests(args.skip_net_tests, args.verbose, args)
+
+ elif cmdline.HAS_TESTS and args.coverage:
+ run_test_coverage()
+
+ elif args.full_help:
+ tools.print_full_help(str(files('buildman').joinpath('README.rst')))
# Build selected commits for selected boards
else:
- bsettings.Setup(options.config_file)
- ret_code = control.DoBuildman(options, args)
+ bsettings.setup(args.config_file)
+ ret_code = control.do_buildman(args)
sys.exit(ret_code)
diff --git a/tools/buildman/test.py b/tools/buildman/test.py
index 9fa6445..bdd3d84 100644
--- a/tools/buildman/test.py
+++ b/tools/buildman/test.py
@@ -138,8 +138,8 @@ class TestBuild(unittest.TestCase):
self.brds.select_boards([])
# Add some test settings
- bsettings.Setup(None)
- bsettings.AddFile(settings_data)
+ bsettings.setup(None)
+ bsettings.add_file(settings_data)
# Set up the toolchains
self.toolchains = toolchain.Toolchains()
@@ -208,8 +208,8 @@ class TestBuild(unittest.TestCase):
# Build the boards for the pre-defined commits and warnings/errors
# associated with each. This calls our Make() to inject the fake output.
- build.BuildBoards(self.commits, board_selected, keep_outputs=False,
- verbose=False)
+ build.build_boards(self.commits, board_selected, keep_outputs=False,
+ verbose=False)
lines = terminal.get_print_test_lines()
count = 0
for line in lines:
@@ -219,8 +219,8 @@ class TestBuild(unittest.TestCase):
# We should get two starting messages, an update for every commit built
# and a summary message
self.assertEqual(count, len(commits) * len(BOARDS) + 3)
- build.SetDisplayOptions(**kwdisplay_args);
- build.ShowSummary(self.commits, board_selected)
+ build.set_display_options(**kwdisplay_args);
+ build.show_summary(self.commits, board_selected)
if echo_lines:
terminal.echo_print_test_lines()
return iter(terminal.get_print_test_lines())
@@ -465,7 +465,7 @@ class TestBuild(unittest.TestCase):
options.show_errors = False
options.keep_outputs = False
args = ['tegra20']
- control.DoBuildman(options, args)
+ control.do_buildman(options, args)
def testBoardSingle(self):
"""Test single board selection"""
@@ -528,17 +528,17 @@ class TestBuild(unittest.TestCase):
'sandbox']),
({'all': ['board4'], 'sandbox': ['board4']}, []))
def CheckDirs(self, build, dirname):
- self.assertEqual('base%s' % dirname, build._GetOutputDir(1))
+ self.assertEqual('base%s' % dirname, build.get_output_dir(1))
self.assertEqual('base%s/fred' % dirname,
- build.GetBuildDir(1, 'fred'))
+ build.get_build_dir(1, 'fred'))
self.assertEqual('base%s/fred/done' % dirname,
- build.GetDoneFile(1, 'fred'))
+ build.get_done_file(1, 'fred'))
self.assertEqual('base%s/fred/u-boot.sizes' % dirname,
- build.GetFuncSizesFile(1, 'fred', 'u-boot'))
+ build.get_func_sizes_file(1, 'fred', 'u-boot'))
self.assertEqual('base%s/fred/u-boot.objdump' % dirname,
- build.GetObjdumpFile(1, 'fred', 'u-boot'))
+ build.get_objdump_file(1, 'fred', 'u-boot'))
self.assertEqual('base%s/fred/err' % dirname,
- build.GetErrFile(1, 'fred'))
+ build.get_err_file(1, 'fred'))
def testOutputDir(self):
build = builder.Builder(self.toolchains, BASE_DIR, None, 1, 2,
@@ -622,7 +622,7 @@ class TestBuild(unittest.TestCase):
build = builder.Builder(self.toolchains, base_dir, None, 1, 2)
build.commits = self.commits
build.commit_count = len(commits)
- result = set(build._GetOutputSpaceRemovals())
+ result = set(build._get_output_space_removals())
expected = set([os.path.join(base_dir, f) for f in to_remove])
self.assertEqual(expected, result)
diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py
index 0ecd845..57bf614 100644
--- a/tools/buildman/toolchain.py
+++ b/tools/buildman/toolchain.py
@@ -139,7 +139,7 @@ class Toolchain:
"""Get toolchain wrapper from the setting file.
"""
value = ''
- for name, value in bsettings.GetItems('toolchain-wrapper'):
+ for name, value in bsettings.get_items('toolchain-wrapper'):
if not value:
print("Warning: Wrapper not found")
if value:
@@ -249,7 +249,7 @@ class Toolchains:
self.prefixes = {}
self.paths = []
self.override_toolchain = override_toolchain
- self._make_flags = dict(bsettings.GetItems('make-flags'))
+ self._make_flags = dict(bsettings.get_items('make-flags'))
def GetPathList(self, show_warning=True):
"""Get a list of available toolchain paths
@@ -261,7 +261,7 @@ class Toolchains:
List of strings, each a path to a toolchain mentioned in the
[toolchain] section of the settings file.
"""
- toolchains = bsettings.GetItems('toolchain')
+ toolchains = bsettings.get_items('toolchain')
if show_warning and not toolchains:
print(("Warning: No tool chains. Please run 'buildman "
"--fetch-arch all' to download all available toolchains, or "
@@ -283,7 +283,7 @@ class Toolchains:
Args:
show_warning: True to show a warning if there are no tool chains.
"""
- self.prefixes = bsettings.GetItems('toolchain-prefix')
+ self.prefixes = bsettings.get_items('toolchain-prefix')
self.paths += self.GetPathList(show_warning)
def Add(self, fname, test=True, verbose=False, priority=PRIORITY_CALC,
@@ -399,7 +399,7 @@ class Toolchains:
returns:
toolchain object, or None if none found
"""
- for tag, value in bsettings.GetItems('toolchain-alias'):
+ for tag, value in bsettings.get_items('toolchain-alias'):
if arch == tag:
for alias in value.split():
if alias in self.toolchains:
@@ -421,7 +421,7 @@ class Toolchains:
Returns:
Resolved string
- >>> bsettings.Setup(None)
+ >>> bsettings.setup(None)
>>> tcs = Toolchains()
>>> tcs.Add('fred', False)
>>> var_dict = {'oblique' : 'OBLIQUE', 'first' : 'fi${second}rst', \
@@ -598,5 +598,5 @@ class Toolchains:
if not self.TestSettingsHasPath(dirpath):
print(("Adding 'download' to config file '%s'" %
bsettings.config_fname))
- bsettings.SetItem('toolchain', 'download', '%s/*/*' % dest)
+ bsettings.set_item('toolchain', 'download', '%s/*/*' % dest)
return 0
diff --git a/tools/moveconfig.py b/tools/moveconfig.py
index c4d72ed..6cbecc3 100755
--- a/tools/moveconfig.py
+++ b/tools/moveconfig.py
@@ -2037,7 +2037,7 @@ doc/develop/moveconfig.rst for documentation.'''
if not args.cleanup_headers_only:
check_clean_directory()
- bsettings.Setup('')
+ bsettings.setup('')
toolchains = toolchain.Toolchains()
toolchains.GetSettings()
toolchains.Scan(verbose=False)
diff --git a/tools/u_boot_pylib/pyproject.toml b/tools/u_boot_pylib/pyproject.toml
index 3f33caf..037c5d6 100644
--- a/tools/u_boot_pylib/pyproject.toml
+++ b/tools/u_boot_pylib/pyproject.toml
@@ -9,7 +9,7 @@ authors = [
{ name="Simon Glass", email="sjg@chromium.org" },
]
description = "U-Boot python library"
-readme = "README.md"
+readme = "README.rst"
requires-python = ">=3.7"
classifiers = [
"Programming Language :: Python :: 3",
@@ -20,3 +20,7 @@ classifiers = [
[project.urls]
"Homepage" = "https://u-boot.readthedocs.io"
"Bug Tracker" = "https://source.denx.de/groups/u-boot/-/issues"
+
+[tool.setuptools.packages.find]
+where = [".."]
+include = ["u_boot_pylib*"]
diff --git a/tools/u_boot_pylib/test_util.py b/tools/u_boot_pylib/test_util.py
index e7564e1..f18d385 100644
--- a/tools/u_boot_pylib/test_util.py
+++ b/tools/u_boot_pylib/test_util.py
@@ -24,7 +24,7 @@ except:
def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None,
- extra_args=None):
+ extra_args=None, single_thread='-P1'):
"""Run tests and check that we get 100% coverage
Args:
@@ -39,6 +39,9 @@ def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None
required: List of modules which must be in the coverage report
extra_args (str): Extra arguments to pass to the tool before the -t/test
arg
+ single_thread (str): Argument string to make the tests run
+ single-threaded. This is necessary to get proper coverage results.
+ The default is '-P0'
Raises:
ValueError if the code coverage is not 100%
@@ -58,8 +61,9 @@ def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None
if build_dir:
prefix = 'PYTHONPATH=$PYTHONPATH:%s/sandbox_spl/tools ' % build_dir
cmd = ('%spython3-coverage run '
- '--omit "%s" %s %s %s -P1' % (prefix, ','.join(glob_list),
- prog, extra_args or '', test_cmd))
+ '--omit "%s" %s %s %s %s' % (prefix, ','.join(glob_list),
+ prog, extra_args or '', test_cmd,
+ single_thread or '-P1'))
os.system(cmd)
stdout = command.output('python3-coverage', 'report')
lines = stdout.splitlines()