aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTom Rini <trini@konsulko.com>2021-04-29 21:03:38 -0400
committerTom Rini <trini@konsulko.com>2021-04-29 21:03:38 -0400
commit8ddaf943589756442bba21e5be645cd47526d82b (patch)
tree5790a6435ce416342bc9747a55d2f23a8f141c2e
parentf3a0d2c1af630cc09a34c2159aa2dfa12b831762 (diff)
parent5b700cdcff61426843405ca1df4b549237e8bbc2 (diff)
downloadu-boot-8ddaf943589756442bba21e5be645cd47526d82b.zip
u-boot-8ddaf943589756442bba21e5be645cd47526d82b.tar.gz
u-boot-8ddaf943589756442bba21e5be645cd47526d82b.tar.bz2
Merge tag 'dm-pull-29apr21' of https://source.denx.de/u-boot/custodians/u-boot-dmWIP/29Apr2021
buildman environment fix binman FMAP improvements minor test improvements and fixes minor dm improvements
-rw-r--r--.azure-pipelines.yml2
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--drivers/core/ofnode.c11
-rw-r--r--drivers/core/root.c2
-rw-r--r--drivers/firmware/scmi/smt.c12
-rw-r--r--drivers/net/mscc_eswitch/jr2_switch.c4
-rw-r--r--drivers/net/mscc_eswitch/ocelot_switch.c4
-rw-r--r--drivers/net/mscc_eswitch/serval_switch.c4
-rw-r--r--drivers/net/mscc_eswitch/servalt_switch.c4
-rw-r--r--include/dm/ofnode.h10
-rw-r--r--include/fdtdec.h5
-rw-r--r--include/tpm-v2.h24
-rw-r--r--lib/fdtdec.c6
-rwxr-xr-xscripts/checkpatch.pl18
-rw-r--r--test/dm/ofnode.c31
-rw-r--r--test/py/requirements.txt1
-rw-r--r--test/test-main.c23
-rw-r--r--tools/binman/entries.rst13
-rw-r--r--tools/binman/etype/fmap.py19
-rw-r--r--tools/binman/ftest.py86
-rw-r--r--tools/binman/test/095_fmap_x86_section.dts2
-rw-r--r--tools/buildman/builder.py22
-rw-r--r--tools/buildman/builderthread.py50
-rw-r--r--tools/buildman/control.py18
-rw-r--r--tools/buildman/func_test.py40
-rw-r--r--tools/buildman/toolchain.py24
-rw-r--r--tools/dtoc/dtb_platdata.py24
-rwxr-xr-xtools/dtoc/test_dtoc.py51
-rw-r--r--tools/patman/checkpatch.py218
-rw-r--r--tools/patman/func_test.py19
30 files changed, 472 insertions, 277 deletions
diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml
index d176e04..59e99b8 100644
--- a/.azure-pipelines.yml
+++ b/.azure-pipelines.yml
@@ -144,7 +144,7 @@ jobs:
export USER=azure
virtualenv -p /usr/bin/python3 /tmp/venv
. /tmp/venv/bin/activate
- pip install pyelftools pytest pygit2
+ pip install -r test/py/requirements.txt
export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 51bd643..bff4874 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -151,7 +151,7 @@ Run binman, buildman, dtoc, Kconfig and patman testsuites:
export USER=gitlab;
virtualenv -p /usr/bin/python3 /tmp/venv;
. /tmp/venv/bin/activate;
- pip install pyelftools pytest pygit2;
+ pip install -r test/py/requirements.txt;
export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl;
export PYTHONPATH="${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt";
export PATH="${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}";
diff --git a/drivers/core/ofnode.c b/drivers/core/ofnode.c
index fa0bd2a..d505333 100644
--- a/drivers/core/ofnode.c
+++ b/drivers/core/ofnode.c
@@ -303,6 +303,8 @@ fdt_addr_t ofnode_get_addr_size_index(ofnode node, int index, fdt_size_t *size)
{
int na, ns;
+ *size = FDT_SIZE_T_NONE;
+
if (ofnode_is_np(node)) {
const __be32 *prop_val;
u64 size64;
@@ -347,6 +349,15 @@ fdt_addr_t ofnode_get_addr(ofnode node)
return ofnode_get_addr_index(node, 0);
}
+fdt_size_t ofnode_get_size(ofnode node)
+{
+ fdt_size_t size;
+
+ ofnode_get_addr_size_index(node, 0, &size);
+
+ return size;
+}
+
int ofnode_stringlist_search(ofnode node, const char *property,
const char *string)
{
diff --git a/drivers/core/root.c b/drivers/core/root.c
index d9a19c5..f852d86 100644
--- a/drivers/core/root.c
+++ b/drivers/core/root.c
@@ -265,7 +265,7 @@ int dm_scan_plat(bool pre_reloc_only)
static int dm_scan_fdt_node(struct udevice *parent, ofnode parent_node,
bool pre_reloc_only)
{
- int ret = 0, err;
+ int ret = 0, err = 0;
ofnode node;
if (!ofnode_valid(parent_node))
diff --git a/drivers/firmware/scmi/smt.c b/drivers/firmware/scmi/smt.c
index f1915c0..e60c2ae 100644
--- a/drivers/firmware/scmi/smt.c
+++ b/drivers/firmware/scmi/smt.c
@@ -30,8 +30,6 @@ int scmi_dt_get_smt_buffer(struct udevice *dev, struct scmi_smt *smt)
int ret;
struct ofnode_phandle_args args;
struct resource resource;
- fdt32_t faddr;
- phys_addr_t paddr;
ret = dev_read_phandle_with_args(dev, "shmem", NULL, 0, 0, &args);
if (ret)
@@ -41,21 +39,13 @@ int scmi_dt_get_smt_buffer(struct udevice *dev, struct scmi_smt *smt)
if (ret)
return ret;
- /* TEMP workaround for ofnode_read_resource translation issue */
- if (of_live_active()) {
- paddr = resource.start;
- } else {
- faddr = cpu_to_fdt32(resource.start);
- paddr = ofnode_translate_address(args.node, &faddr);
- }
-
smt->size = resource_size(&resource);
if (smt->size < sizeof(struct scmi_smt_header)) {
dev_err(dev, "Shared memory buffer too small\n");
return -EINVAL;
}
- smt->buf = devm_ioremap(dev, paddr, smt->size);
+ smt->buf = devm_ioremap(dev, resource.start, smt->size);
if (!smt->buf)
return -ENOMEM;
diff --git a/drivers/net/mscc_eswitch/jr2_switch.c b/drivers/net/mscc_eswitch/jr2_switch.c
index 570d5a5..d1e5b61 100644
--- a/drivers/net/mscc_eswitch/jr2_switch.c
+++ b/drivers/net/mscc_eswitch/jr2_switch.c
@@ -863,7 +863,6 @@ static int jr2_probe(struct udevice *dev)
int i;
int ret;
struct resource res;
- fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@@ -926,9 +925,8 @@ static int jr2_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
- faddr = cpu_to_fdt32(res.start);
- addr_base = ofnode_translate_address(mdio_node, &faddr);
+ addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */
diff --git a/drivers/net/mscc_eswitch/ocelot_switch.c b/drivers/net/mscc_eswitch/ocelot_switch.c
index 19e725c..d1d0a48 100644
--- a/drivers/net/mscc_eswitch/ocelot_switch.c
+++ b/drivers/net/mscc_eswitch/ocelot_switch.c
@@ -530,7 +530,6 @@ static int ocelot_probe(struct udevice *dev)
struct ocelot_private *priv = dev_get_priv(dev);
int i, ret;
struct resource res;
- fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@@ -578,9 +577,8 @@ static int ocelot_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
- faddr = cpu_to_fdt32(res.start);
- addr_base = ofnode_translate_address(mdio_node, &faddr);
+ addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */
diff --git a/drivers/net/mscc_eswitch/serval_switch.c b/drivers/net/mscc_eswitch/serval_switch.c
index 09ce334..c4b81f7 100644
--- a/drivers/net/mscc_eswitch/serval_switch.c
+++ b/drivers/net/mscc_eswitch/serval_switch.c
@@ -482,7 +482,6 @@ static int serval_probe(struct udevice *dev)
struct serval_private *priv = dev_get_priv(dev);
int i, ret;
struct resource res;
- fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@@ -533,9 +532,8 @@ static int serval_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
- faddr = cpu_to_fdt32(res.start);
- addr_base = ofnode_translate_address(mdio_node, &faddr);
+ addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */
diff --git a/drivers/net/mscc_eswitch/servalt_switch.c b/drivers/net/mscc_eswitch/servalt_switch.c
index 4a4e9e4..f114086 100644
--- a/drivers/net/mscc_eswitch/servalt_switch.c
+++ b/drivers/net/mscc_eswitch/servalt_switch.c
@@ -412,7 +412,6 @@ static int servalt_probe(struct udevice *dev)
struct servalt_private *priv = dev_get_priv(dev);
int i;
struct resource res;
- fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@@ -461,9 +460,8 @@ static int servalt_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
- faddr = cpu_to_fdt32(res.start);
- addr_base = ofnode_translate_address(mdio_node, &faddr);
+ addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */
diff --git a/include/dm/ofnode.h b/include/dm/ofnode.h
index 2c0597c..8a69fd8 100644
--- a/include/dm/ofnode.h
+++ b/include/dm/ofnode.h
@@ -511,6 +511,16 @@ phys_addr_t ofnode_get_addr_index(ofnode node, int index);
phys_addr_t ofnode_get_addr(ofnode node);
/**
+ * ofnode_get_size() - get size from a node
+ *
+ * This reads the register size from a node
+ *
+ * @node: node to read from
+ * @return size of the address, or FDT_SIZE_T_NONE if not present or invalid
+ */
+fdt_size_t ofnode_get_size(ofnode node);
+
+/**
* ofnode_stringlist_search() - find a string in a string list and return index
*
* Note that it is possible for this function to succeed on property values
diff --git a/include/fdtdec.h b/include/fdtdec.h
index 62d1660..e0a49b1 100644
--- a/include/fdtdec.h
+++ b/include/fdtdec.h
@@ -24,15 +24,16 @@
typedef phys_addr_t fdt_addr_t;
typedef phys_size_t fdt_size_t;
-#ifdef CONFIG_PHYS_64BIT
#define FDT_ADDR_T_NONE (-1U)
+#define FDT_SIZE_T_NONE (-1U)
+
+#ifdef CONFIG_PHYS_64BIT
#define fdt_addr_to_cpu(reg) be64_to_cpu(reg)
#define fdt_size_to_cpu(reg) be64_to_cpu(reg)
#define cpu_to_fdt_addr(reg) cpu_to_be64(reg)
#define cpu_to_fdt_size(reg) cpu_to_be64(reg)
typedef fdt64_t fdt_val_t;
#else
-#define FDT_ADDR_T_NONE (-1U)
#define fdt_addr_to_cpu(reg) be32_to_cpu(reg)
#define fdt_size_to_cpu(reg) be32_to_cpu(reg)
#define cpu_to_fdt_addr(reg) cpu_to_be32(reg)
diff --git a/include/tpm-v2.h b/include/tpm-v2.h
index df67a19..7de7d6a 100644
--- a/include/tpm-v2.h
+++ b/include/tpm-v2.h
@@ -53,14 +53,22 @@ struct udevice;
#define TPM2_PT_MAX_COMMAND_SIZE (u32)(TPM2_PT_FIXED + 30)
#define TPM2_PT_MAX_RESPONSE_SIZE (u32)(TPM2_PT_FIXED + 31)
-/* event types */
-#define EV_POST_CODE ((u32)0x00000001)
-#define EV_NO_ACTION ((u32)0x00000003)
-#define EV_SEPARATOR ((u32)0x00000004)
-#define EV_S_CRTM_CONTENTS ((u32)0x00000007)
-#define EV_S_CRTM_VERSION ((u32)0x00000008)
-#define EV_CPU_MICROCODE ((u32)0x00000009)
-#define EV_TABLE_OF_DEVICES ((u32)0x0000000B)
+/*
+ * event types, cf.
+ * "TCG Server Management Domain Firmware Profile Specification",
+ * rev 1.00, 2020-05-01
+ */
+#define EV_POST_CODE ((u32)0x00000001)
+#define EV_NO_ACTION ((u32)0x00000003)
+#define EV_SEPARATOR ((u32)0x00000004)
+#define EV_ACTION ((u32)0x00000005)
+#define EV_TAG ((u32)0x00000006)
+#define EV_S_CRTM_CONTENTS ((u32)0x00000007)
+#define EV_S_CRTM_VERSION ((u32)0x00000008)
+#define EV_CPU_MICROCODE ((u32)0x00000009)
+#define EV_PLATFORM_CONFIG_FLAGS ((u32)0x0000000A)
+#define EV_TABLE_OF_DEVICES ((u32)0x0000000B)
+#define EV_COMPACT_HASH ((u32)0x0000000C)
/* TPMS_TAGGED_PROPERTY Structure */
struct tpms_tagged_property {
diff --git a/lib/fdtdec.c b/lib/fdtdec.c
index 8645891..4b097fb 100644
--- a/lib/fdtdec.c
+++ b/lib/fdtdec.c
@@ -942,7 +942,11 @@ int fdt_get_resource(const void *fdt, int node, const char *property,
while (ptr + na + ns <= end) {
if (i == index) {
- res->start = fdtdec_get_number(ptr, na);
+ if (CONFIG_IS_ENABLED(OF_TRANSLATE))
+ res->start = fdt_translate_address(fdt, node, ptr);
+ else
+ res->start = fdtdec_get_number(ptr, na);
+
res->end = res->start;
res->end += fdtdec_get_number(&ptr[na], ns) - 1;
return 0;
diff --git a/scripts/checkpatch.pl b/scripts/checkpatch.pl
index 4e04758..59a714a 100755
--- a/scripts/checkpatch.pl
+++ b/scripts/checkpatch.pl
@@ -2326,13 +2326,15 @@ sub get_raw_comment {
# suffix: Suffix to expect on member, e.g. "_priv"
# warning: Warning name, e.g. "PRIV_AUTO"
sub u_boot_struct_name {
- my ($line, $auto, $suffix, $warning) = @_;
+ my ($line, $auto, $suffix, $warning, $herecurr) = @_;
# Use _priv as a suffix for the device-private data struct
if ($line =~ /^\+\s*\.${auto}\s*=\s*sizeof\(struct\((\w+)\).*/) {
my $struct_name = $1;
if ($struct_name !~ /^\w+${suffix}/) {
- WARN($warning, "struct \'$struct_name\' should have a ${suffix} suffix");
+ WARN($warning,
+ "struct \'$struct_name\' should have a ${suffix} suffix\n"
+ . $herecurr);
}
}
}
@@ -2410,17 +2412,17 @@ sub u_boot_line {
}
# Check struct names for the 'auto' members of struct driver
- u_boot_struct_name($line, "priv_auto", "_priv", "PRIV_AUTO");
- u_boot_struct_name($line, "plat_auto", "_plat", "PLAT_AUTO");
- u_boot_struct_name($line, "per_child_auto", "_priv", "CHILD_PRIV_AUTO");
+ u_boot_struct_name($line, "priv_auto", "_priv", "PRIV_AUTO", $herecurr);
+ u_boot_struct_name($line, "plat_auto", "_plat", "PLAT_AUTO", $herecurr);
+ u_boot_struct_name($line, "per_child_auto", "_priv", "CHILD_PRIV_AUTO", $herecurr);
u_boot_struct_name($line, "per_child_plat_auto", "_plat",
- "CHILD_PLAT_AUTO");
+ "CHILD_PLAT_AUTO", $herecurr);
# Now the ones for struct uclass, skipping those in common with above
u_boot_struct_name($line, "per_device_auto", "_priv",
- "DEVICE_PRIV_AUTO");
+ "DEVICE_PRIV_AUTO", $herecurr);
u_boot_struct_name($line, "per_device_plat_auto", "_plat",
- "DEVICE_PLAT_AUTO");
+ "DEVICE_PLAT_AUTO", $herecurr);
}
sub process {
diff --git a/test/dm/ofnode.c b/test/dm/ofnode.c
index c539134..e0b525e 100644
--- a/test/dm/ofnode.c
+++ b/test/dm/ofnode.c
@@ -261,3 +261,34 @@ static int dm_test_ofnode_is_enabled(struct unit_test_state *uts)
return 0;
}
DM_TEST(dm_test_ofnode_is_enabled, UT_TESTF_SCAN_PDATA | UT_TESTF_SCAN_FDT);
+
+static int dm_test_ofnode_get_reg(struct unit_test_state *uts)
+{
+ ofnode node;
+ fdt_addr_t addr;
+ fdt_size_t size;
+
+ node = ofnode_path("/translation-test@8000");
+ ut_assert(ofnode_valid(node));
+ addr = ofnode_get_addr(node);
+ size = ofnode_get_size(node);
+ ut_asserteq(0x8000, addr);
+ ut_asserteq(0x4000, size);
+
+ node = ofnode_path("/translation-test@8000/dev@1,100");
+ ut_assert(ofnode_valid(node));
+ addr = ofnode_get_addr(node);
+ size = ofnode_get_size(node);
+ ut_asserteq(0x9000, addr);
+ ut_asserteq(0x1000, size);
+
+ node = ofnode_path("/emul-mux-controller");
+ ut_assert(ofnode_valid(node));
+ addr = ofnode_get_addr(node);
+ size = ofnode_get_size(node);
+ ut_asserteq(FDT_ADDR_T_NONE, addr);
+ ut_asserteq(FDT_SIZE_T_NONE, size);
+
+ return 0;
+}
+DM_TEST(dm_test_ofnode_get_reg, UT_TESTF_SCAN_PDATA | UT_TESTF_SCAN_FDT);
diff --git a/test/py/requirements.txt b/test/py/requirements.txt
index 5b48292..33c5c0b 100644
--- a/test/py/requirements.txt
+++ b/test/py/requirements.txt
@@ -17,6 +17,7 @@ pyparsing==2.4.2
pytest==5.2.1
python-mimeparse==1.6.0
python-subunit==1.3.0
+requests==2.25.1
six==1.12.0
testtools==2.3.0
traceback2==1.4.0
diff --git a/test/test-main.c b/test/test-main.c
index 8c852d7..7afe874 100644
--- a/test/test-main.c
+++ b/test/test-main.c
@@ -135,25 +135,32 @@ static bool ut_test_run_on_flattree(struct unit_test *test)
static bool test_matches(const char *prefix, const char *test_name,
const char *select_name)
{
+ size_t len;
+
if (!select_name)
return true;
- if (!strcmp(test_name, select_name))
+ /* Allow glob expansion in the test name */
+ len = select_name[strlen(select_name) - 1] == '*' ? strlen(select_name) : 0;
+ if (len-- == 1)
return true;
- if (!prefix) {
- const char *p = strstr(test_name, "_test_");
+ if (!strncmp(test_name, select_name, len))
+ return true;
- /* convert xxx_test_yyy to yyy, i.e. remove the suite name */
- if (p)
- test_name = p + 6;
- } else {
+ if (prefix) {
/* All tests have this prefix */
if (!strncmp(test_name, prefix, strlen(prefix)))
test_name += strlen(prefix);
+ } else {
+ const char *p = strstr(test_name, "_test_");
+
+ /* convert xxx_test_yyy to yyy, i.e. remove the suite name */
+ if (p)
+ test_name = p + strlen("_test_");
}
- if (!strcmp(test_name, select_name))
+ if (!strncmp(test_name, select_name, len))
return true;
return false;
diff --git a/tools/binman/entries.rst b/tools/binman/entries.rst
index a91211e..f1c3b7d 100644
--- a/tools/binman/entries.rst
+++ b/tools/binman/entries.rst
@@ -461,8 +461,12 @@ see www.flashrom.org/Flashrom for more information.
When used, this entry will be populated with an FMAP which reflects the
entries in the current image. Note that any hierarchy is squashed, since
-FMAP does not support this. Also, CBFS entries appear as a single entry -
-the sub-entries are ignored.
+FMAP does not support this. Sections are represented as an area appearing
+before its contents, so that it is possible to reconstruct the hierarchy
+from the FMAP by using the offset information. This convention does not
+seem to be documented, but is used in Chromium OS.
+
+CBFS entries appear as a single entry, i.e. the sub-entries are ignored.
@@ -804,6 +808,11 @@ Properties:
missing their contents. The second will produce an image but of
course it will not work.
+Properties:
+ _allow_missing: True if this section permits external blobs to be
+ missing their contents. The second will produce an image but of
+ course it will not work.
+
Since a section is also an entry, it inherits all the properies of entries
too.
diff --git a/tools/binman/etype/fmap.py b/tools/binman/etype/fmap.py
index fe81c6f..cac99b6 100644
--- a/tools/binman/etype/fmap.py
+++ b/tools/binman/etype/fmap.py
@@ -28,8 +28,12 @@ class Entry_fmap(Entry):
When used, this entry will be populated with an FMAP which reflects the
entries in the current image. Note that any hierarchy is squashed, since
- FMAP does not support this. Also, CBFS entries appear as a single entry -
- the sub-entries are ignored.
+ FMAP does not support this. Sections are represented as an area appearing
+ before its contents, so that it is possible to reconstruct the hierarchy
+ from the FMAP by using the offset information. This convention does not
+ seem to be documented, but is used in Chromium OS.
+
+ CBFS entries appear as a single entry, i.e. the sub-entries are ignored.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
@@ -45,6 +49,17 @@ class Entry_fmap(Entry):
tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" %
(entry.GetPath(), entry.etype, ToHexSize(entries)))
if entries and entry.etype != 'cbfs':
+ # Create an area for the section, which encompasses all entries
+ # within it
+ if entry.image_pos is None:
+ pos = 0
+ else:
+ pos = entry.image_pos - entry.GetRootSkipAtStart()
+
+ # Drop @ symbols in name
+ name = entry.name.replace('@', '')
+ areas.append(
+ fmap_util.FmapArea(pos, entry.size or 0, name, 0))
for subentry in entries.values():
_AddEntries(areas, subentry)
else:
diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py
index 89fe661..f36823f 100644
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -1341,6 +1341,7 @@ class TestFunctional(unittest.TestCase):
def testSplNoDtb(self):
"""Test that an image with spl/u-boot-spl-nodtb.bin can be created"""
+ self._SetupSplElf()
data = self._DoReadFile('052_u_boot_spl_nodtb.dts')
self.assertEqual(U_BOOT_SPL_NODTB_DATA, data[:len(U_BOOT_SPL_NODTB_DATA)])
@@ -1594,26 +1595,41 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(1, fhdr.ver_major)
self.assertEqual(0, fhdr.ver_minor)
self.assertEqual(0, fhdr.base)
- self.assertEqual(16 + 16 +
- fmap_util.FMAP_HEADER_LEN +
- fmap_util.FMAP_AREA_LEN * 3, fhdr.image_size)
+ expect_size = fmap_util.FMAP_HEADER_LEN + fmap_util.FMAP_AREA_LEN * 5
+ self.assertEqual(16 + 16 + expect_size, fhdr.image_size)
self.assertEqual(b'FMAP', fhdr.name)
- self.assertEqual(3, fhdr.nareas)
- for fentry in fentries:
- self.assertEqual(0, fentry.flags)
-
- self.assertEqual(0, fentries[0].offset)
- self.assertEqual(4, fentries[0].size)
- self.assertEqual(b'RO_U_BOOT', fentries[0].name)
-
- self.assertEqual(16, fentries[1].offset)
- self.assertEqual(4, fentries[1].size)
- self.assertEqual(b'RW_U_BOOT', fentries[1].name)
-
- self.assertEqual(32, fentries[2].offset)
- self.assertEqual(fmap_util.FMAP_HEADER_LEN +
- fmap_util.FMAP_AREA_LEN * 3, fentries[2].size)
- self.assertEqual(b'FMAP', fentries[2].name)
+ self.assertEqual(5, fhdr.nareas)
+ fiter = iter(fentries)
+
+ fentry = next(fiter)
+ self.assertEqual(b'SECTION0', fentry.name)
+ self.assertEqual(0, fentry.offset)
+ self.assertEqual(16, fentry.size)
+ self.assertEqual(0, fentry.flags)
+
+ fentry = next(fiter)
+ self.assertEqual(b'RO_U_BOOT', fentry.name)
+ self.assertEqual(0, fentry.offset)
+ self.assertEqual(4, fentry.size)
+ self.assertEqual(0, fentry.flags)
+
+ fentry = next(fiter)
+ self.assertEqual(b'SECTION1', fentry.name)
+ self.assertEqual(16, fentry.offset)
+ self.assertEqual(16, fentry.size)
+ self.assertEqual(0, fentry.flags)
+
+ fentry = next(fiter)
+ self.assertEqual(b'RW_U_BOOT', fentry.name)
+ self.assertEqual(16, fentry.offset)
+ self.assertEqual(4, fentry.size)
+ self.assertEqual(0, fentry.flags)
+
+ fentry = next(fiter)
+ self.assertEqual(b'FMAP', fentry.name)
+ self.assertEqual(32, fentry.offset)
+ self.assertEqual(expect_size, fentry.size)
+ self.assertEqual(0, fentry.flags)
def testBlobNamedByArg(self):
"""Test we can add a blob with the filename coming from an entry arg"""
@@ -2063,20 +2079,29 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(expected, data[:32])
fhdr, fentries = fmap_util.DecodeFmap(data[36:])
- self.assertEqual(0x100, fhdr.image_size)
+ self.assertEqual(0x180, fhdr.image_size)
+ expect_size = fmap_util.FMAP_HEADER_LEN + fmap_util.FMAP_AREA_LEN * 4
+ fiter = iter(fentries)
- self.assertEqual(0, fentries[0].offset)
- self.assertEqual(4, fentries[0].size)
- self.assertEqual(b'U_BOOT', fentries[0].name)
+ fentry = next(fiter)
+ self.assertEqual(b'U_BOOT', fentry.name)
+ self.assertEqual(0, fentry.offset)
+ self.assertEqual(4, fentry.size)
- self.assertEqual(4, fentries[1].offset)
- self.assertEqual(3, fentries[1].size)
- self.assertEqual(b'INTEL_MRC', fentries[1].name)
+ fentry = next(fiter)
+ self.assertEqual(b'SECTION', fentry.name)
+ self.assertEqual(4, fentry.offset)
+ self.assertEqual(0x20 + expect_size, fentry.size)
- self.assertEqual(36, fentries[2].offset)
- self.assertEqual(fmap_util.FMAP_HEADER_LEN +
- fmap_util.FMAP_AREA_LEN * 3, fentries[2].size)
- self.assertEqual(b'FMAP', fentries[2].name)
+ fentry = next(fiter)
+ self.assertEqual(b'INTEL_MRC', fentry.name)
+ self.assertEqual(4, fentry.offset)
+ self.assertEqual(3, fentry.size)
+
+ fentry = next(fiter)
+ self.assertEqual(b'FMAP', fentry.name)
+ self.assertEqual(36, fentry.offset)
+ self.assertEqual(expect_size, fentry.size)
def testElf(self):
"""Basic test of ELF entries"""
@@ -4272,6 +4297,7 @@ class TestFunctional(unittest.TestCase):
def testTplNoDtb(self):
"""Test that an image with tpl/u-boot-tpl-nodtb.bin can be created"""
+ self._SetupTplElf()
data = self._DoReadFile('192_u_boot_tpl_nodtb.dts')
self.assertEqual(U_BOOT_TPL_NODTB_DATA,
data[:len(U_BOOT_TPL_NODTB_DATA)])
diff --git a/tools/binman/test/095_fmap_x86_section.dts b/tools/binman/test/095_fmap_x86_section.dts
index 4cfce45..fd5f018 100644
--- a/tools/binman/test/095_fmap_x86_section.dts
+++ b/tools/binman/test/095_fmap_x86_section.dts
@@ -7,7 +7,7 @@
binman {
end-at-4gb;
- size = <0x100>;
+ size = <0x180>;
u-boot {
};
section {
diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py
index be8a8fa..ce852eb 100644
--- a/tools/buildman/builder.py
+++ b/tools/buildman/builder.py
@@ -182,6 +182,7 @@ class Builder:
only useful for testing in-tree builds.
work_in_output: Use the output directory as the work directory and
don't write to a separate output directory.
+ thread_exceptions: List of exceptions raised by thread jobs
Private members:
_base_board_dict: Last-summarised Dict of boards
@@ -235,7 +236,8 @@ class Builder:
no_subdirs=False, full_path=False, verbose_build=False,
mrproper=False, per_board_out_dir=False,
config_only=False, squash_config_y=False,
- warnings_as_errors=False, work_in_output=False):
+ warnings_as_errors=False, work_in_output=False,
+ test_thread_exceptions=False):
"""Create a new Builder object
Args:
@@ -262,6 +264,9 @@ class Builder:
warnings_as_errors: Treat all compiler warnings as errors
work_in_output: Use the output directory as the work directory and
don't write to a separate output directory.
+ test_thread_exceptions: Uses for tests only, True to make the
+ threads raise an exception instead of reporting their result.
+ This simulates a failure in the code somewhere
"""
self.toolchains = toolchains
self.base_dir = base_dir
@@ -311,13 +316,16 @@ class Builder:
self._re_migration_warning = re.compile(r'^={21} WARNING ={22}\n.*\n=+\n',
re.MULTILINE | re.DOTALL)
+ self.thread_exceptions = []
+ self.test_thread_exceptions = test_thread_exceptions
if self.num_threads:
self._single_builder = None
self.queue = queue.Queue()
self.out_queue = queue.Queue()
for i in range(self.num_threads):
- t = builderthread.BuilderThread(self, i, mrproper,
- per_board_out_dir)
+ t = builderthread.BuilderThread(
+ self, i, mrproper, per_board_out_dir,
+ test_exception=test_thread_exceptions)
t.setDaemon(True)
t.start()
self.threads.append(t)
@@ -1676,6 +1684,7 @@ class Builder:
Tuple containing:
- number of boards that failed to build
- number of boards that issued warnings
+ - list of thread exceptions raised
"""
self.commit_count = len(commits) if commits else 1
self.commits = commits
@@ -1689,7 +1698,7 @@ class Builder:
Print('\rStarting build...', newline=False)
self.SetupBuild(board_selected, commits)
self.ProcessResult(None)
-
+ self.thread_exceptions = []
# Create jobs to build all commits for each board
for brd in board_selected.values():
job = builderthread.BuilderJob()
@@ -1728,5 +1737,8 @@ class Builder:
rate = float(self.count) / duration.total_seconds()
msg += ', duration %s, rate %1.2f' % (duration, rate)
Print(msg)
+ if self.thread_exceptions:
+ Print('Failed: %d thread exceptions' % len(self.thread_exceptions),
+ colour=self.col.RED)
- return (self.fail, self.warned)
+ return (self.fail, self.warned, self.thread_exceptions)
diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py
index 06ed272..48128cf 100644
--- a/tools/buildman/builderthread.py
+++ b/tools/buildman/builderthread.py
@@ -89,16 +89,23 @@ class BuilderThread(threading.Thread):
Members:
builder: The builder which contains information we might need
thread_num: Our thread number (0-n-1), used to decide on a
- temporary directory. If this is -1 then there are no threads
- and we are the (only) main process
+ temporary directory. If this is -1 then there are no threads
+ and we are the (only) main process
+ mrproper: Use 'make mrproper' before each reconfigure
+ per_board_out_dir: True to build in a separate persistent directory per
+ board rather than a thread-specific directory
+ test_exception: Used for testing; True to raise an exception instead of
+ reporting the build result
"""
- def __init__(self, builder, thread_num, mrproper, per_board_out_dir):
+ def __init__(self, builder, thread_num, mrproper, per_board_out_dir,
+ test_exception=False):
"""Set up a new builder thread"""
threading.Thread.__init__(self)
self.builder = builder
self.thread_num = thread_num
self.mrproper = mrproper
self.per_board_out_dir = per_board_out_dir
+ self.test_exception = test_exception
def Make(self, commit, brd, stage, cwd, *args, **kwargs):
"""Run 'make' on a particular commit and board.
@@ -344,10 +351,9 @@ class BuilderThread(threading.Thread):
# Write out the image and function size information and an objdump
env = result.toolchain.MakeEnvironment(self.builder.full_path)
- with open(os.path.join(build_dir, 'out-env'), 'w',
- encoding='utf-8') as fd:
+ with open(os.path.join(build_dir, 'out-env'), 'wb') as fd:
for var in sorted(env.keys()):
- print('%s="%s"' % (var, env[var]), file=fd)
+ fd.write(b'%s="%s"' % (var, env[var]))
lines = []
for fname in BASE_ELF_FILENAMES:
cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname]
@@ -440,6 +446,22 @@ class BuilderThread(threading.Thread):
target = '%s-%s%s' % (base, dirname, ext)
shutil.copy(fname, os.path.join(build_dir, target))
+ def _SendResult(self, result):
+ """Send a result to the builder for processing
+
+ Args:
+ result: CommandResult object containing the results of the build
+
+ Raises:
+ ValueError if self.test_exception is true (for testing)
+ """
+ if self.test_exception:
+ raise ValueError('test exception')
+ if self.thread_num != -1:
+ self.builder.out_queue.put(result)
+ else:
+ self.builder.ProcessResult(result)
+
def RunJob(self, job):
"""Run a single job
@@ -513,10 +535,7 @@ class BuilderThread(threading.Thread):
# We have the build results, so output the result
self._WriteResult(result, job.keep_outputs, job.work_in_output)
- if self.thread_num != -1:
- self.builder.out_queue.put(result)
- else:
- self.builder.ProcessResult(result)
+ self._SendResult(result)
else:
# Just build the currently checked-out build
result, request_config = self.RunCommit(None, brd, work_dir, True,
@@ -525,10 +544,7 @@ class BuilderThread(threading.Thread):
work_in_output=job.work_in_output)
result.commit_upto = 0
self._WriteResult(result, job.keep_outputs, job.work_in_output)
- if self.thread_num != -1:
- self.builder.out_queue.put(result)
- else:
- self.builder.ProcessResult(result)
+ self._SendResult(result)
def run(self):
"""Our thread's run function
@@ -538,5 +554,9 @@ class BuilderThread(threading.Thread):
"""
while True:
job = self.builder.queue.get()
- self.RunJob(job)
+ try:
+ self.RunJob(job)
+ except Exception as e:
+ print('Thread exception:', e)
+ self.builder.thread_exceptions.append(e)
self.builder.queue.task_done()
diff --git a/tools/buildman/control.py b/tools/buildman/control.py
index a767570..a98d1b4 100644
--- a/tools/buildman/control.py
+++ b/tools/buildman/control.py
@@ -110,7 +110,7 @@ def ShowToolchainPrefix(boards, toolchains):
return None
def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
- clean_dir=False):
+ clean_dir=False, test_thread_exceptions=False):
"""The main control code for buildman
Args:
@@ -124,6 +124,11 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
arguments. This setting is useful for tests.
board: Boards() object to use, containing a list of available
boards. If this is None it will be created and scanned.
+ clean_dir: Used for tests only, indicates that the existing output_dir
+ should be removed before starting the build
+ test_thread_exceptions: Uses for tests only, True to make the threads
+ raise an exception instead of reporting their result. This simulates
+ a failure in the code somewhere
"""
global builder
@@ -328,7 +333,8 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
config_only=options.config_only,
squash_config_y=not options.preserve_config_y,
warnings_as_errors=options.warnings_as_errors,
- work_in_output=options.work_in_output)
+ work_in_output=options.work_in_output,
+ test_thread_exceptions=test_thread_exceptions)
builder.force_config_on_failure = not options.quick
if make_func:
builder.do_make = make_func
@@ -368,9 +374,11 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
if options.summary:
builder.ShowSummary(commits, board_selected)
else:
- fail, warned = builder.BuildBoards(commits, board_selected,
- options.keep_outputs, options.verbose)
- if fail:
+ fail, warned, excs = builder.BuildBoards(
+ commits, board_selected, options.keep_outputs, options.verbose)
+ if excs:
+ return 102
+ elif fail:
return 100
elif warned and not options.ignore_warnings:
return 101
diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py
index 3dd2e6e..7edbee0 100644
--- a/tools/buildman/func_test.py
+++ b/tools/buildman/func_test.py
@@ -16,6 +16,7 @@ from buildman import toolchain
from patman import command
from patman import gitutil
from patman import terminal
+from patman import test_util
from patman import tools
settings_data = '''
@@ -219,12 +220,28 @@ class TestFunctional(unittest.TestCase):
return command.RunPipe([[self._buildman_pathname] + list(args)],
capture=True, capture_stderr=True)
- def _RunControl(self, *args, clean_dir=False, boards=None):
+ def _RunControl(self, *args, boards=None, clean_dir=False,
+ test_thread_exceptions=False):
+ """Run buildman
+
+ Args:
+ args: List of arguments to pass
+ boards:
+ clean_dir: Used for tests only, indicates that the existing output_dir
+ should be removed before starting the build
+ test_thread_exceptions: Uses for tests only, True to make the threads
+ raise an exception instead of reporting their result. This simulates
+ a failure in the code somewhere
+
+ Returns:
+ result code from buildman
+ """
sys.argv = [sys.argv[0]] + list(args)
options, args = cmdline.ParseArgs()
result = control.DoBuildman(options, args, toolchains=self._toolchains,
make_func=self._HandleMake, boards=boards or self._boards,
- clean_dir=clean_dir)
+ clean_dir=clean_dir,
+ test_thread_exceptions=test_thread_exceptions)
self._builder = control.builder
return result
@@ -555,6 +572,18 @@ class TestFunctional(unittest.TestCase):
self.assertTrue(os.path.exists(os.path.join(board0_dir, 'done')))
self.assertTrue(os.path.exists(os.path.join(board0_dir, 'out-env')))
+ def testEnvironmentUnicode(self):
+ """Test there are no unicode errors when the env has non-ASCII chars"""
+ try:
+ varname = b'buildman_test_var'
+ os.environb[varname] = b'strange\x80chars'
+ self.assertEqual(0, self._RunControl('-o', self._output_dir))
+ board0_dir = os.path.join(self._output_dir, 'current', 'board0')
+ self.assertTrue(os.path.exists(os.path.join(board0_dir, 'done')))
+ self.assertTrue(os.path.exists(os.path.join(board0_dir, 'out-env')))
+ finally:
+ del os.environb[varname]
+
def testWorkInOutput(self):
"""Test the -w option which should write directly to the output dir"""
board_list = board.Boards()
@@ -588,3 +617,10 @@ class TestFunctional(unittest.TestCase):
with self.assertRaises(SystemExit) as e:
self._RunControl('-w', clean_dir=False)
self.assertIn("specify -o", str(e.exception))
+
+ def testThreadExceptions(self):
+ """Test that exceptions in threads are reported"""
+ with test_util.capture_sys_output() as (stdout, stderr):
+ self.assertEqual(102, self._RunControl('-o', self._output_dir,
+ test_thread_exceptions=True))
+ self.assertIn('Thread exception: test exception', stdout.getvalue())
diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py
index acb5a29..fd137f7 100644
--- a/tools/buildman/toolchain.py
+++ b/tools/buildman/toolchain.py
@@ -179,27 +179,35 @@ class Toolchain:
output and possibly unicode encoded output of all build tools by
adding LC_ALL=C.
+ Note that os.environb is used to obtain the environment, since in some
+ cases the environment many contain non-ASCII characters and we see
+ errors like:
+
+ UnicodeEncodeError: 'utf-8' codec can't encode characters in position
+ 569-570: surrogates not allowed
+
Args:
full_path: Return the full path in CROSS_COMPILE and don't set
PATH
Returns:
- Dict containing the environemnt to use. This is based on the current
- environment, with changes as needed to CROSS_COMPILE, PATH and
- LC_ALL.
+ Dict containing the (bytes) environment to use. This is based on the
+ current environment, with changes as needed to CROSS_COMPILE, PATH
+ and LC_ALL.
"""
- env = dict(os.environ)
+ env = dict(os.environb)
wrapper = self.GetWrapper()
if self.override_toolchain:
# We'll use MakeArgs() to provide this
pass
elif full_path:
- env['CROSS_COMPILE'] = wrapper + os.path.join(self.path, self.cross)
+ env[b'CROSS_COMPILE'] = tools.ToBytes(
+ wrapper + os.path.join(self.path, self.cross))
else:
- env['CROSS_COMPILE'] = wrapper + self.cross
- env['PATH'] = self.path + ':' + env['PATH']
+ env[b'CROSS_COMPILE'] = tools.ToBytes(wrapper + self.cross)
+ env[b'PATH'] = tools.ToBytes(self.path) + b':' + env[b'PATH']
- env['LC_ALL'] = 'C'
+ env[b'LC_ALL'] = b'C'
return env
diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py
index 1374f01..2d42480 100644
--- a/tools/dtoc/dtb_platdata.py
+++ b/tools/dtoc/dtb_platdata.py
@@ -824,8 +824,6 @@ class DtbPlatdata():
self.buf('\t},\n')
def generate_uclasses(self):
- if not self.check_instantiate(True):
- return
self.out('\n')
self.out('#include <common.h>\n')
self.out('#include <dm.h>\n')
@@ -1038,22 +1036,6 @@ class DtbPlatdata():
self.out(''.join(self.get_buf()))
- def check_instantiate(self, require):
- """Check if self._instantiate is set to the required value
-
- If not, this outputs a message into the current file
-
- Args:
- require: True to require --instantiate, False to require that it not
- be enabled
- """
- if require != self._instantiate:
- self.out(
- '/* This file is not used: --instantiate was %senabled */\n' %
- ('not ' if require else ''))
- return False
- return True
-
def generate_plat(self):
"""Generate device defintions for the platform data
@@ -1064,8 +1046,6 @@ class DtbPlatdata():
See the documentation in doc/driver-model/of-plat.rst for more
information.
"""
- if not self.check_instantiate(False):
- return
self.out('/* Allow use of U_BOOT_DRVINFO() in this file */\n')
self.out('#define DT_PLAT_C\n')
self.out('\n')
@@ -1102,8 +1082,6 @@ class DtbPlatdata():
See the documentation in doc/driver-model/of-plat.rst for more
information.
"""
- if not self.check_instantiate(True):
- return
self.out('#include <common.h>\n')
self.out('#include <dm.h>\n')
self.out('#include <dt-structs.h>\n')
@@ -1216,7 +1194,7 @@ def run_steps(args, dtb_file, include_disabled, output, output_dirs, phase,
plat.assign_seqs()
# Figure out what output files we plan to generate
- output_files = OUTPUT_FILES_COMMON
+ output_files = dict(OUTPUT_FILES_COMMON)
if instantiate:
output_files.update(OUTPUT_FILES_INST)
else:
diff --git a/tools/dtoc/test_dtoc.py b/tools/dtoc/test_dtoc.py
index a05e3d9..0b2805f 100755
--- a/tools/dtoc/test_dtoc.py
+++ b/tools/dtoc/test_dtoc.py
@@ -74,10 +74,6 @@ UCLASS_HEADER_COMMON = '''/*
*/
'''
-UCLASS_HEADER = UCLASS_HEADER_COMMON + '''
-/* This file is not used: --instantiate was not enabled */
-'''
-
# Scanner saved from a previous run of the tests (to speed things up)
saved_scan = None
@@ -412,7 +408,6 @@ U_BOOT_DRVINFO(spl_test3) = {
};
'''
- uclass_text = UCLASS_HEADER
uclass_text_inst = '''
#include <common.h>
@@ -512,15 +507,6 @@ DM_UCLASS_INST(testfdt) = {
};
'''
- device_text = '''/*
- * DO NOT MODIFY
- *
- * Declares the DM_DEVICE_INST() records.
- * This was generated by dtoc from a .dtb (device tree binary) file.
- */
-
-/* This file is not used: --instantiate was not enabled */
-'''
device_text_inst = '''/*
* DO NOT MODIFY
*
@@ -833,8 +819,7 @@ DM_DEVICE_INST(test0) = {
self.run_test(['all'], dtb_file, output)
data = tools.ReadFile(output, binary=False)
self._check_strings(
- self.decl_text + self.device_text + self.platdata_text +
- self.struct_text + self.uclass_text, data)
+ self.decl_text + self.platdata_text + self.struct_text, data)
def test_driver_alias(self):
"""Test output from a device tree file with a driver alias"""
@@ -1537,8 +1522,7 @@ U_BOOT_DRVINFO(spl_test2) = {
self.run_test(['all'], dtb_file, output)
data = tools.ReadFile(output, binary=False)
self._check_strings(
- self.decl_text + self.device_text + self.platdata_text +
- self.struct_text + self.uclass_text, data)
+ self.decl_text + self.platdata_text + self.struct_text, data)
def test_no_command(self):
"""Test running dtoc without a command"""
@@ -1566,8 +1550,7 @@ U_BOOT_DRVINFO(spl_test2) = {
self.assertIn("Must specify either output or output_dirs, not both",
str(exc.exception))
- def test_output_dirs(self):
- """Test outputting files to a directory"""
+ def check_output_dirs(self, instantiate):
# Remove the directory so that files from other tests are not there
tools._RemoveOutputDir()
tools.PrepareOutputDir(None)
@@ -1579,14 +1562,30 @@ U_BOOT_DRVINFO(spl_test2) = {
self.assertEqual(2, len(fnames))
dtb_platdata.run_steps(
- ['all'], dtb_file, False, None, [outdir], None, False,
+ ['all'], dtb_file, False, None, [outdir], None, instantiate,
warning_disabled=True, scan=copy_scan())
fnames = glob.glob(outdir + '/*')
- self.assertEqual(7, len(fnames))
+ return fnames
+
+ def test_output_dirs(self):
+ """Test outputting files to a directory"""
+ fnames = self.check_output_dirs(False)
+ self.assertEqual(5, len(fnames))
leafs = set(os.path.basename(fname) for fname in fnames)
self.assertEqual(
{'dt-structs-gen.h', 'source.dts', 'dt-plat.c', 'source.dtb',
+ 'dt-decl.h'},
+ leafs)
+
+ def test_output_dirs_inst(self):
+ """Test outputting files to a directory with instantiation"""
+ fnames = self.check_output_dirs(True)
+ self.assertEqual(6, len(fnames))
+
+ leafs = set(os.path.basename(fname) for fname in fnames)
+ self.assertEqual(
+ {'dt-structs-gen.h', 'source.dts', 'source.dtb',
'dt-uclass.c', 'dt-decl.h', 'dt-device.c'},
leafs)
@@ -1785,14 +1784,6 @@ U_BOOT_DRVINFO(spl_test2) = {
self._check_strings(self.decl_text_inst, data)
- self.run_test(['platdata'], dtb_file, output, True)
- with open(output) as infile:
- data = infile.read()
-
- self._check_strings(C_HEADER_PRE + '''
-/* This file is not used: --instantiate was enabled */
-''', data)
-
self.run_test(['uclass'], dtb_file, output, True)
with open(output) as infile:
data = infile.read()
diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py
index 63a8e37..8978df2 100644
--- a/tools/patman/checkpatch.py
+++ b/tools/patman/checkpatch.py
@@ -10,7 +10,15 @@ import sys
from patman import command
from patman import gitutil
from patman import terminal
-from patman import tools
+
+EMACS_PREFIX = r'(?:[0-9]{4}.*\.patch:[0-9]+: )?'
+TYPE_NAME = r'([A-Z_]+:)?'
+RE_ERROR = re.compile(r'ERROR:%s (.*)' % TYPE_NAME)
+RE_WARNING = re.compile(EMACS_PREFIX + r'WARNING:%s (.*)' % TYPE_NAME)
+RE_CHECK = re.compile(r'CHECK:%s (.*)' % TYPE_NAME)
+RE_FILE = re.compile(r'#(\d+): (FILE: ([^:]*):(\d+):)?')
+RE_NOTE = re.compile(r'NOTE: (.*)')
+
def FindCheckPatch():
top_level = gitutil.GetTopLevel()
@@ -38,14 +46,81 @@ def FindCheckPatch():
sys.exit('Cannot find checkpatch.pl - please put it in your ' +
'~/bin directory or use --no-check')
-def CheckPatch(fname, verbose=False, show_types=False):
- """Run checkpatch.pl on a file.
+
+def CheckPatchParseOneMessage(message):
+ """Parse one checkpatch message
Args:
- fname: Filename to check
+ message: string to parse
+
+ Returns:
+ dict:
+ 'type'; error or warning
+ 'msg': text message
+ 'file' : filename
+ 'line': line number
+ """
+
+ if RE_NOTE.match(message):
+ return {}
+
+ item = {}
+
+ err_match = RE_ERROR.match(message)
+ warn_match = RE_WARNING.match(message)
+ check_match = RE_CHECK.match(message)
+ if err_match:
+ item['cptype'] = err_match.group(1)
+ item['msg'] = err_match.group(2)
+ item['type'] = 'error'
+ elif warn_match:
+ item['cptype'] = warn_match.group(1)
+ item['msg'] = warn_match.group(2)
+ item['type'] = 'warning'
+ elif check_match:
+ item['cptype'] = check_match.group(1)
+ item['msg'] = check_match.group(2)
+ item['type'] = 'check'
+ else:
+ message_indent = ' '
+ print('patman: failed to parse checkpatch message:\n%s' %
+ (message_indent + message.replace('\n', '\n' + message_indent)),
+ file=sys.stderr)
+ return {}
+
+ file_match = RE_FILE.search(message)
+ # some messages have no file, catch those here
+ no_file_match = any(s in message for s in [
+ '\nSubject:', 'Missing Signed-off-by: line(s)',
+ 'does MAINTAINERS need updating'
+ ])
+
+ if file_match:
+ err_fname = file_match.group(3)
+ if err_fname:
+ item['file'] = err_fname
+ item['line'] = int(file_match.group(4))
+ else:
+ item['file'] = '<patch>'
+ item['line'] = int(file_match.group(1))
+ elif no_file_match:
+ item['file'] = '<patch>'
+ else:
+ message_indent = ' '
+ print('patman: failed to find file / line information:\n%s' %
+ (message_indent + message.replace('\n', '\n' + message_indent)),
+ file=sys.stderr)
+
+ return item
+
+
+def CheckPatchParse(checkpatch_output, verbose=False):
+ """Parse checkpatch.pl output
+
+ Args:
+ checkpatch_output: string to parse
verbose: True to print out every line of the checkpatch output as it is
parsed
- show_types: Tell checkpatch to show the type (number) of each message
Returns:
namedtuple containing:
@@ -59,67 +134,38 @@ def CheckPatch(fname, verbose=False, show_types=False):
warnings: Number of warnings
checks: Number of checks
lines: Number of lines
- stdout: Full output of checkpatch
+ stdout: checkpatch_output
"""
fields = ['ok', 'problems', 'errors', 'warnings', 'checks', 'lines',
'stdout']
result = collections.namedtuple('CheckPatchResult', fields)
+ result.stdout = checkpatch_output
result.ok = False
result.errors, result.warnings, result.checks = 0, 0, 0
result.lines = 0
result.problems = []
- chk = FindCheckPatch()
- item = {}
- args = [chk, '--no-tree']
- if show_types:
- args.append('--show-types')
- result.stdout = command.Output(*args, fname, raise_on_error=False)
- #pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- #stdout, stderr = pipe.communicate()
# total: 0 errors, 0 warnings, 159 lines checked
# or:
# total: 0 errors, 2 warnings, 7 checks, 473 lines checked
- emacs_prefix = '(?:[0-9]{4}.*\.patch:[0-9]+: )?'
- emacs_stats = '(?:[0-9]{4}.*\.patch )?'
+ emacs_stats = r'(?:[0-9]{4}.*\.patch )?'
re_stats = re.compile(emacs_stats +
- 'total: (\\d+) errors, (\d+) warnings, (\d+)')
+ r'total: (\d+) errors, (\d+) warnings, (\d+)')
re_stats_full = re.compile(emacs_stats +
- 'total: (\\d+) errors, (\d+) warnings, (\d+)'
- ' checks, (\d+)')
- re_ok = re.compile('.*has no obvious style problems')
- re_bad = re.compile('.*has style problems, please review')
- type_name = '([A-Z_]+:)?'
- re_error = re.compile('ERROR:%s (.*)' % type_name)
- re_warning = re.compile(emacs_prefix + 'WARNING:%s (.*)' % type_name)
- re_check = re.compile('CHECK:%s (.*)' % type_name)
- re_file = re.compile('#(\d+): (FILE: ([^:]*):(\d+):)?')
- re_note = re.compile('NOTE: (.*)')
- re_new_file = re.compile('new file mode .*')
- indent = ' ' * 6
- for line in result.stdout.splitlines():
+ r'total: (\d+) errors, (\d+) warnings, (\d+)'
+ r' checks, (\d+)')
+ re_ok = re.compile(r'.*has no obvious style problems')
+ re_bad = re.compile(r'.*has style problems, please review')
+
+ # A blank line indicates the end of a message
+ for message in result.stdout.split('\n\n'):
if verbose:
- print(line)
-
- # A blank line indicates the end of a message
- if not line:
- if item:
- result.problems.append(item)
- item = {}
- continue
- if re_note.match(line):
- continue
- # Skip lines which quote code
- if line.startswith(indent):
- continue
- # Skip code quotes
- if line.startswith('+'):
- continue
- if re_new_file.match(line):
- continue
- match = re_stats_full.match(line)
+ print(message)
+
+ # either find stats, the verdict, or delegate
+ match = re_stats_full.match(message)
if not match:
- match = re_stats.match(line)
+ match = re_stats.match(message)
if match:
result.errors = int(match.group(1))
result.warnings = int(match.group(2))
@@ -128,46 +174,50 @@ def CheckPatch(fname, verbose=False, show_types=False):
result.lines = int(match.group(4))
else:
result.lines = int(match.group(3))
- continue
- elif re_ok.match(line):
+ elif re_ok.match(message):
result.ok = True
- continue
- elif re_bad.match(line):
+ elif re_bad.match(message):
result.ok = False
- continue
- err_match = re_error.match(line)
- warn_match = re_warning.match(line)
- file_match = re_file.match(line)
- check_match = re_check.match(line)
- subject_match = line.startswith('Subject:')
- if err_match:
- item['cptype'] = err_match.group(1)
- item['msg'] = err_match.group(2)
- item['type'] = 'error'
- elif warn_match:
- item['cptype'] = warn_match.group(1)
- item['msg'] = warn_match.group(2)
- item['type'] = 'warning'
- elif check_match:
- item['cptype'] = check_match.group(1)
- item['msg'] = check_match.group(2)
- item['type'] = 'check'
- elif file_match:
- err_fname = file_match.group(3)
- if err_fname:
- item['file'] = err_fname
- item['line'] = int(file_match.group(4))
- else:
- item['file'] = '<patch>'
- item['line'] = int(file_match.group(1))
- elif subject_match:
- item['file'] = '<patch subject>'
- item['line'] = None
else:
- print('bad line "%s", %d' % (line, len(line)))
+ problem = CheckPatchParseOneMessage(message)
+ if problem:
+ result.problems.append(problem)
return result
+
+def CheckPatch(fname, verbose=False, show_types=False):
+ """Run checkpatch.pl on a file and parse the results.
+
+ Args:
+ fname: Filename to check
+ verbose: True to print out every line of the checkpatch output as it is
+ parsed
+ show_types: Tell checkpatch to show the type (number) of each message
+
+ Returns:
+ namedtuple containing:
+ ok: False=failure, True=ok
+ problems: List of problems, each a dict:
+ 'type'; error or warning
+ 'msg': text message
+ 'file' : filename
+ 'line': line number
+ errors: Number of errors
+ warnings: Number of warnings
+ checks: Number of checks
+ lines: Number of lines
+ stdout: Full output of checkpatch
+ """
+ chk = FindCheckPatch()
+ args = [chk, '--no-tree']
+ if show_types:
+ args.append('--show-types')
+ output = command.Output(*args, fname, raise_on_error=False)
+
+ return CheckPatchParse(output, verbose)
+
+
def GetWarningMsg(col, msg_type, fname, line, msg):
'''Create a message for a given file/line
diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py
index 450fe66..1ce6448 100644
--- a/tools/patman/func_test.py
+++ b/tools/patman/func_test.py
@@ -25,13 +25,8 @@ from patman import terminal
from patman import tools
from patman.test_util import capture_sys_output
-try:
- import pygit2
- HAVE_PYGIT2 = True
- from patman import status
-except ModuleNotFoundError:
- HAVE_PYGIT2 = False
-
+import pygit2
+from patman import status
class TestFunctional(unittest.TestCase):
"""Functional tests for checking that patman behaves correctly"""
@@ -458,7 +453,6 @@ complicated as possible''')
repo.branches.local.create('base', base_target)
return repo
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testBranch(self):
"""Test creating patches from a branch"""
repo = self.make_git_tree()
@@ -604,7 +598,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
["Found possible blank line(s) at end of file 'lib/fdtdec.c'"],
pstrm.commit.warn)
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testNoUpstream(self):
"""Test CountCommitsToBranch when there is no upstream"""
repo = self.make_git_tree()
@@ -642,7 +635,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
{'id': '1', 'name': 'Some patch'}]}
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testStatusMismatch(self):
"""Test Patchwork patches not matching the series"""
series = Series()
@@ -652,7 +644,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertIn('Warning: Patchwork reports 1 patches, series has 0',
err.getvalue())
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testStatusReadPatch(self):
"""Test handling a single patch in Patchwork"""
series = Series()
@@ -665,7 +656,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertEqual('1', patch.id)
self.assertEqual('Some patch', patch.raw_subject)
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testParseSubject(self):
"""Test parsing of the patch subject"""
patch = status.Patch('1')
@@ -728,7 +718,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertEqual('RESEND', patch.prefix)
self.assertEqual(None, patch.version)
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testCompareSeries(self):
"""Test operation of compare_with_series()"""
commit1 = Commit('abcd')
@@ -831,7 +820,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
return patch.comments
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testFindNewResponses(self):
"""Test operation of find_new_responses()"""
commit1 = Commit('abcd')
@@ -970,7 +958,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
return patch.comments
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testCreateBranch(self):
"""Test operation of create_branch()"""
repo = self.make_git_tree()
@@ -1058,7 +1045,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertEqual('Reviewed-by: %s' % self.mary, next(lines))
self.assertEqual('Tested-by: %s' % self.leb, next(lines))
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testParseSnippets(self):
"""Test parsing of review snippets"""
text = '''Hi Fred,
@@ -1142,7 +1128,6 @@ line8
'line2', 'line3', 'line4', 'line5', 'line6', 'line7', 'line8']],
pstrm.snippets)
- @unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testReviewSnippets(self):
"""Test showing of review snippets"""
def _to_submitter(who):