aboutsummaryrefslogtreecommitdiff
path: root/gdb/utils.c
diff options
context:
space:
mode:
authorJoel Brobecker <brobecker@adacore.com>2018-11-14 18:18:49 -0500
committerJoel Brobecker <brobecker@adacore.com>2018-11-14 18:18:49 -0500
commita99bc3d23c133fe80f7ccac44246a471fb075bc3 (patch)
tree8aabba85c80b30af36df1e73eeaeca83cbeb13fb /gdb/utils.c
parent8b2d40cbba8fbf98d6e031c7d8c7e2ac1baae2d9 (diff)
downloadgdb-a99bc3d23c133fe80f7ccac44246a471fb075bc3.zip
gdb-a99bc3d23c133fe80f7ccac44246a471fb075bc3.tar.gz
gdb-a99bc3d23c133fe80f7ccac44246a471fb075bc3.tar.bz2
delete ada-lang.c::move_bits, sharing and re-using copy_bitwise instead
This patch deletes ada-lang.c's move_bits function entirely, and replaces all calls to it by calls to copy_bitwise instead. Because the latter function was declared locally inside dwarf2loc.c, this patch also move the function to a common area, and makes it non-static. gdb/ChangeLog: * ada-lang.c (move_bits): Delete. Update all callers to use copy_bitwise instead. * dwarf2loc.c (copy_bitwise, bits_to_str::bits_to_str) (selftests::check_copy_bitwise, selftests::copy_bitwise_tests): Move from here to utils.c. (_initialize_dwarf2loc): Remove call to register copy_bitwise selftests. * utils.h (copy_bitwise): Add declaration. * utils.c (copy_bitwise, bits_to_str::bits_to_str) (selftests::check_copy_bitwise, selftests::copy_bitwise_tests): Moved here from dwarf2loc.c. (_initialize_utils): Register copy_bitwise selftests. Tested on x86_64-linux, no regression. Also tested using AdaCore's testsuite on a collection of small endian and big endian platforms.
Diffstat (limited to 'gdb/utils.c')
-rw-r--r--gdb/utils.c229
1 files changed, 229 insertions, 0 deletions
diff --git a/gdb/utils.c b/gdb/utils.c
index 8d4a744..c088d8b 100644
--- a/gdb/utils.c
+++ b/gdb/utils.c
@@ -3219,6 +3219,234 @@ strip_leading_path_elements (const char *path, int n)
return p;
}
+/* See utils.h. */
+
+void
+copy_bitwise (gdb_byte *dest, ULONGEST dest_offset,
+ const gdb_byte *source, ULONGEST source_offset,
+ ULONGEST nbits, int bits_big_endian)
+{
+ unsigned int buf, avail;
+
+ if (nbits == 0)
+ return;
+
+ if (bits_big_endian)
+ {
+ /* Start from the end, then work backwards. */
+ dest_offset += nbits - 1;
+ dest += dest_offset / 8;
+ dest_offset = 7 - dest_offset % 8;
+ source_offset += nbits - 1;
+ source += source_offset / 8;
+ source_offset = 7 - source_offset % 8;
+ }
+ else
+ {
+ dest += dest_offset / 8;
+ dest_offset %= 8;
+ source += source_offset / 8;
+ source_offset %= 8;
+ }
+
+ /* Fill BUF with DEST_OFFSET bits from the destination and 8 -
+ SOURCE_OFFSET bits from the source. */
+ buf = *(bits_big_endian ? source-- : source++) >> source_offset;
+ buf <<= dest_offset;
+ buf |= *dest & ((1 << dest_offset) - 1);
+
+ /* NBITS: bits yet to be written; AVAIL: BUF's fill level. */
+ nbits += dest_offset;
+ avail = dest_offset + 8 - source_offset;
+
+ /* Flush 8 bits from BUF, if appropriate. */
+ if (nbits >= 8 && avail >= 8)
+ {
+ *(bits_big_endian ? dest-- : dest++) = buf;
+ buf >>= 8;
+ avail -= 8;
+ nbits -= 8;
+ }
+
+ /* Copy the middle part. */
+ if (nbits >= 8)
+ {
+ size_t len = nbits / 8;
+
+ /* Use a faster method for byte-aligned copies. */
+ if (avail == 0)
+ {
+ if (bits_big_endian)
+ {
+ dest -= len;
+ source -= len;
+ memcpy (dest + 1, source + 1, len);
+ }
+ else
+ {
+ memcpy (dest, source, len);
+ dest += len;
+ source += len;
+ }
+ }
+ else
+ {
+ while (len--)
+ {
+ buf |= *(bits_big_endian ? source-- : source++) << avail;
+ *(bits_big_endian ? dest-- : dest++) = buf;
+ buf >>= 8;
+ }
+ }
+ nbits %= 8;
+ }
+
+ /* Write the last byte. */
+ if (nbits)
+ {
+ if (avail < nbits)
+ buf |= *source << avail;
+
+ buf &= (1 << nbits) - 1;
+ *dest = (*dest & (~0 << nbits)) | buf;
+ }
+}
+
+#if GDB_SELF_TEST
+
+namespace selftests {
+
+/* Helper function for the unit test of copy_bitwise. Convert NBITS bits
+ out of BITS, starting at OFFS, to the respective '0'/'1'-string. MSB0
+ specifies whether to assume big endian bit numbering. Store the
+ resulting (not null-terminated) string at STR. */
+
+static void
+bits_to_str (char *str, const gdb_byte *bits, ULONGEST offs,
+ ULONGEST nbits, int msb0)
+{
+ unsigned int j;
+ size_t i;
+
+ for (i = offs / 8, j = offs % 8; nbits; i++, j = 0)
+ {
+ unsigned int ch = bits[i];
+ for (; j < 8 && nbits; j++, nbits--)
+ *str++ = (ch & (msb0 ? (1 << (7 - j)) : (1 << j))) ? '1' : '0';
+ }
+}
+
+/* Check one invocation of copy_bitwise with the given parameters. */
+
+static void
+check_copy_bitwise (const gdb_byte *dest, unsigned int dest_offset,
+ const gdb_byte *source, unsigned int source_offset,
+ unsigned int nbits, int msb0)
+{
+ size_t len = align_up (dest_offset + nbits, 8);
+ char *expected = (char *) alloca (len + 1);
+ char *actual = (char *) alloca (len + 1);
+ gdb_byte *buf = (gdb_byte *) alloca (len / 8);
+
+ /* Compose a '0'/'1'-string that represents the expected result of
+ copy_bitwise below:
+ Bits from [0, DEST_OFFSET) are filled from DEST.
+ Bits from [DEST_OFFSET, DEST_OFFSET + NBITS) are filled from SOURCE.
+ Bits from [DEST_OFFSET + NBITS, LEN) are filled from DEST.
+
+ E.g., with:
+ dest_offset: 4
+ nbits: 2
+ len: 8
+ dest: 00000000
+ source: 11111111
+
+ We should end up with:
+ buf: 00001100
+ DDDDSSDD (D=dest, S=source)
+ */
+ bits_to_str (expected, dest, 0, len, msb0);
+ bits_to_str (expected + dest_offset, source, source_offset, nbits, msb0);
+
+ /* Fill BUF with data from DEST, apply copy_bitwise, and convert the
+ result to a '0'/'1'-string. */
+ memcpy (buf, dest, len / 8);
+ copy_bitwise (buf, dest_offset, source, source_offset, nbits, msb0);
+ bits_to_str (actual, buf, 0, len, msb0);
+
+ /* Compare the resulting strings. */
+ expected[len] = actual[len] = '\0';
+ if (strcmp (expected, actual) != 0)
+ error (_("copy_bitwise %s != %s (%u+%u -> %u)"),
+ expected, actual, source_offset, nbits, dest_offset);
+}
+
+/* Unit test for copy_bitwise. */
+
+static void
+copy_bitwise_tests (void)
+{
+ /* Data to be used as both source and destination buffers. The two
+ arrays below represent the lsb0- and msb0- encoded versions of the
+ following bit string, respectively:
+ 00000000 00011111 11111111 01001000 10100101 11110010
+ This pattern is chosen such that it contains:
+ - constant 0- and 1- chunks of more than a full byte;
+ - 0/1- and 1/0 transitions on all bit positions within a byte;
+ - several sufficiently asymmetric bytes.
+ */
+ static const gdb_byte data_lsb0[] = {
+ 0x00, 0xf8, 0xff, 0x12, 0xa5, 0x4f
+ };
+ static const gdb_byte data_msb0[] = {
+ 0x00, 0x1f, 0xff, 0x48, 0xa5, 0xf2
+ };
+
+ constexpr size_t data_nbits = 8 * sizeof (data_lsb0);
+ constexpr unsigned max_nbits = 24;
+
+ /* Try all combinations of:
+ lsb0/msb0 bit order (using the respective data array)
+ X [0, MAX_NBITS] copy bit width
+ X feasible source offsets for the given copy bit width
+ X feasible destination offsets
+ */
+ for (int msb0 = 0; msb0 < 2; msb0++)
+ {
+ const gdb_byte *data = msb0 ? data_msb0 : data_lsb0;
+
+ for (unsigned int nbits = 1; nbits <= max_nbits; nbits++)
+ {
+ const unsigned int max_offset = data_nbits - nbits;
+
+ for (unsigned source_offset = 0;
+ source_offset <= max_offset;
+ source_offset++)
+ {
+ for (unsigned dest_offset = 0;
+ dest_offset <= max_offset;
+ dest_offset++)
+ {
+ check_copy_bitwise (data + dest_offset / 8,
+ dest_offset % 8,
+ data + source_offset / 8,
+ source_offset % 8,
+ nbits, msb0);
+ }
+ }
+ }
+
+ /* Special cases: copy all, copy nothing. */
+ check_copy_bitwise (data_lsb0, 0, data_msb0, 0, data_nbits, msb0);
+ check_copy_bitwise (data_msb0, 0, data_lsb0, 0, data_nbits, msb0);
+ check_copy_bitwise (data, data_nbits - 7, data, 9, 0, msb0);
+ }
+}
+
+} /* namespace selftests */
+
+#endif /* GDB_SELF_TEST */
+
void
_initialize_utils (void)
{
@@ -3228,5 +3456,6 @@ _initialize_utils (void)
#if GDB_SELF_TEST
selftests::register_test ("gdb_realpath", gdb_realpath_tests);
+ selftests::register_test ("copy_bitwise", selftests::copy_bitwise_tests);
#endif
}