aboutsummaryrefslogtreecommitdiff
path: root/gdb/btrace.c
diff options
context:
space:
mode:
authorSimon Marchi <simon.marchi@ericsson.com>2017-09-04 10:46:36 +0200
committerSimon Marchi <simon.marchi@ericsson.com>2017-09-04 10:46:36 +0200
commit0860c437cbe4efd8752531d0464007f448fc8274 (patch)
tree2aed7314eee9241b3b9557baa29d738a45c91973 /gdb/btrace.c
parenta826403eb6f0ad2fb8c88b45ea3cf5ba39cdaa75 (diff)
downloadgdb-0860c437cbe4efd8752531d0464007f448fc8274.zip
gdb-0860c437cbe4efd8752531d0464007f448fc8274.tar.gz
gdb-0860c437cbe4efd8752531d0464007f448fc8274.tar.bz2
btrace: Store btrace_insn in an std::vector
Because it contains a non-POD type field (flags), the type btrace_insn should be new'ed/delete'd. Replace the VEC (btrace_insn_s) in btrace_function with an std::vector. gdb/ChangeLog: * btrace.h (btrace_insn_s, DEF_VEC_O (btrace_insn_s)): Remove. (btrace_function) <insn>: Change type to use std::vector. * btrace.c (ftrace_debug, ftrace_call_num_insn, ftrace_find_call, ftrace_new_gap, ftrace_update_function, ftrace_update_insns, ftrace_compute_global_level_offset, btrace_stitch_bts, btrace_clear, btrace_insn_get, btrace_insn_end, btrace_insn_next, btrace_insn_prev): Adjust to change to std::vector. (ftrace_update_insns): Adjust to change to std::vector, change type of INSN parameter. (btrace_compute_ftrace_bts): Adjust call to ftrace_update_insns. * record-btrace.c (btrace_call_history_insn_range, btrace_compute_src_line_range, record_btrace_frame_prev_register): Adjust to change to std::vector. * python/py-record-btrace.c (recpy_bt_func_instructions): Adjust to change to std::vector.
Diffstat (limited to 'gdb/btrace.c')
-rw-r--r--gdb/btrace.c59
1 files changed, 26 insertions, 33 deletions
diff --git a/gdb/btrace.c b/gdb/btrace.c
index 2a83e1b..6d2a9c4 100644
--- a/gdb/btrace.c
+++ b/gdb/btrace.c
@@ -131,7 +131,7 @@ ftrace_debug (const struct btrace_function *bfun, const char *prefix)
level = bfun->level;
ibegin = bfun->insn_offset;
- iend = ibegin + VEC_length (btrace_insn_s, bfun->insn);
+ iend = ibegin + bfun->insn.size ();
DEBUG_FTRACE ("%s: fun = %s, file = %s, level = %d, insn = [%u; %u)",
prefix, fun, file, level, ibegin, iend);
@@ -149,7 +149,7 @@ ftrace_call_num_insn (const struct btrace_function* bfun)
if (bfun->errcode != 0)
return 1;
- return VEC_length (btrace_insn_s, bfun->insn);
+ return bfun->insn.size ();
}
/* Return the function segment with the given NUMBER or NULL if no such segment
@@ -390,15 +390,13 @@ ftrace_find_call (struct btrace_thread_info *btinfo,
{
for (; bfun != NULL; bfun = ftrace_find_call_by_number (btinfo, bfun->up))
{
- struct btrace_insn *last;
-
/* Skip gaps. */
if (bfun->errcode != 0)
continue;
- last = VEC_last (btrace_insn_s, bfun->insn);
+ btrace_insn &last = bfun->insn.back ();
- if (last->iclass == BTRACE_INSN_CALL)
+ if (last.iclass == BTRACE_INSN_CALL)
break;
}
@@ -528,7 +526,7 @@ ftrace_new_gap (struct btrace_thread_info *btinfo, int errcode,
{
/* We hijack the previous function segment if it was empty. */
bfun = &btinfo->functions.back ();
- if (bfun->errcode != 0 || !VEC_empty (btrace_insn_s, bfun->insn))
+ if (bfun->errcode != 0 || !bfun->insn.empty ())
bfun = ftrace_new_function (btinfo, NULL, NULL);
}
@@ -550,7 +548,6 @@ ftrace_update_function (struct btrace_thread_info *btinfo, CORE_ADDR pc)
struct bound_minimal_symbol bmfun;
struct minimal_symbol *mfun;
struct symbol *fun;
- struct btrace_insn *last;
struct btrace_function *bfun;
/* Try to determine the function we're in. We use both types of symbols
@@ -575,9 +572,9 @@ ftrace_update_function (struct btrace_thread_info *btinfo, CORE_ADDR pc)
/* Check the last instruction, if we have one.
We do this check first, since it allows us to fill in the call stack
links in addition to the normal flow links. */
- last = NULL;
- if (!VEC_empty (btrace_insn_s, bfun->insn))
- last = VEC_last (btrace_insn_s, bfun->insn);
+ btrace_insn *last = NULL;
+ if (!bfun->insn.empty ())
+ last = &bfun->insn.back ();
if (last != NULL)
{
@@ -648,10 +645,9 @@ ftrace_update_function (struct btrace_thread_info *btinfo, CORE_ADDR pc)
/* Add the instruction at PC to BFUN's instructions. */
static void
-ftrace_update_insns (struct btrace_function *bfun,
- const struct btrace_insn *insn)
+ftrace_update_insns (struct btrace_function *bfun, const btrace_insn &insn)
{
- VEC_safe_push (btrace_insn_s, bfun->insn, insn);
+ bfun->insn.push_back (insn);
if (record_debug > 1)
ftrace_debug (bfun, "update insn");
@@ -747,7 +743,7 @@ ftrace_compute_global_level_offset (struct btrace_thread_info *btinfo)
really part of the trace. If it contains just this one instruction, we
ignore the segment. */
struct btrace_function *last = &btinfo->functions.back();
- if (VEC_length (btrace_insn_s, last->insn) != 1)
+ if (last->insn.size () != 1)
level = std::min (level, last->level);
DEBUG_FTRACE ("setting global level offset: %d", -level);
@@ -1104,7 +1100,7 @@ btrace_compute_ftrace_bts (struct thread_info *tp,
insn.iclass = ftrace_classify_insn (gdbarch, pc);
insn.flags = 0;
- ftrace_update_insns (bfun, &insn);
+ ftrace_update_insns (bfun, insn);
/* We're done once we pushed the instruction at the end. */
if (block->end == pc)
@@ -1682,7 +1678,6 @@ btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
{
struct btrace_thread_info *btinfo;
struct btrace_function *last_bfun;
- struct btrace_insn *last_insn;
btrace_block_s *first_new_block;
btinfo = &tp->btrace;
@@ -1694,7 +1689,7 @@ btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
/* If the existing trace ends with a gap, we just glue the traces
together. We need to drop the last (i.e. chronologically first) block
of the new trace, though, since we can't fill in the start address.*/
- if (VEC_empty (btrace_insn_s, last_bfun->insn))
+ if (last_bfun->insn.empty ())
{
VEC_pop (btrace_block_s, btrace->blocks);
return 0;
@@ -1704,7 +1699,7 @@ btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
chronologically first block in the new trace is the last block in
the new trace's block vector. */
first_new_block = VEC_last (btrace_block_s, btrace->blocks);
- last_insn = VEC_last (btrace_insn_s, last_bfun->insn);
+ const btrace_insn &last_insn = last_bfun->insn.back ();
/* If the current PC at the end of the block is the same as in our current
trace, there are two explanations:
@@ -1714,19 +1709,19 @@ btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
entries.
In the second case, the delta trace vector should contain exactly one
entry for the partial block containing the current PC. Remove it. */
- if (first_new_block->end == last_insn->pc
+ if (first_new_block->end == last_insn.pc
&& VEC_length (btrace_block_s, btrace->blocks) == 1)
{
VEC_pop (btrace_block_s, btrace->blocks);
return 0;
}
- DEBUG ("stitching %s to %s", ftrace_print_insn_addr (last_insn),
+ DEBUG ("stitching %s to %s", ftrace_print_insn_addr (&last_insn),
core_addr_to_string_nz (first_new_block->end));
/* Do a simple sanity check to make sure we don't accidentally end up
with a bad block. This should not occur in practice. */
- if (first_new_block->end < last_insn->pc)
+ if (first_new_block->end < last_insn.pc)
{
warning (_("Error while trying to read delta trace. Falling back to "
"a full read."));
@@ -1735,16 +1730,16 @@ btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
/* We adjust the last block to start at the end of our current trace. */
gdb_assert (first_new_block->begin == 0);
- first_new_block->begin = last_insn->pc;
+ first_new_block->begin = last_insn.pc;
/* We simply pop the last insn so we can insert it again as part of
the normal branch trace computation.
Since instruction iterators are based on indices in the instructions
vector, we don't leave any pointers dangling. */
DEBUG ("pruning insn at %s for stitching",
- ftrace_print_insn_addr (last_insn));
+ ftrace_print_insn_addr (&last_insn));
- VEC_pop (btrace_insn_s, last_bfun->insn);
+ last_bfun->insn.pop_back ();
/* The instructions vector may become empty temporarily if this has
been the only instruction in this function segment.
@@ -1755,7 +1750,7 @@ btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
of just that one instruction. If we remove it, we might turn the now
empty btrace function segment into a gap. But we don't want gaps at
the beginning. To avoid this, we remove the entire old trace. */
- if (last_bfun->number == 1 && VEC_empty (btrace_insn_s, last_bfun->insn))
+ if (last_bfun->number == 1 && last_bfun->insn.empty ())
btrace_clear (tp);
return 0;
@@ -1980,8 +1975,6 @@ btrace_clear (struct thread_info *tp)
reinit_frame_cache ();
btinfo = &tp->btrace;
- for (auto &bfun : btinfo->functions)
- VEC_free (btrace_insn_s, bfun.insn);
btinfo->functions.clear ();
btinfo->ngaps = 0;
@@ -2337,11 +2330,11 @@ btrace_insn_get (const struct btrace_insn_iterator *it)
return NULL;
/* The index is within the bounds of this function's instruction vector. */
- end = VEC_length (btrace_insn_s, bfun->insn);
+ end = bfun->insn.size ();
gdb_assert (0 < end);
gdb_assert (index < end);
- return VEC_index (btrace_insn_s, bfun->insn, index);
+ return &bfun->insn[index];
}
/* See btrace.h. */
@@ -2387,7 +2380,7 @@ btrace_insn_end (struct btrace_insn_iterator *it,
error (_("No trace."));
bfun = &btinfo->functions.back ();
- length = VEC_length (btrace_insn_s, bfun->insn);
+ length = bfun->insn.size ();
/* The last function may either be a gap or it contains the current
instruction, which is one past the end of the execution trace; ignore
@@ -2416,7 +2409,7 @@ btrace_insn_next (struct btrace_insn_iterator *it, unsigned int stride)
{
unsigned int end, space, adv;
- end = VEC_length (btrace_insn_s, bfun->insn);
+ end = bfun->insn.size ();
/* An empty function segment represents a gap in the trace. We count
it as one instruction. */
@@ -2509,7 +2502,7 @@ btrace_insn_prev (struct btrace_insn_iterator *it, unsigned int stride)
/* We point to one after the last instruction in the new function. */
bfun = prev;
- index = VEC_length (btrace_insn_s, bfun->insn);
+ index = bfun->insn.size ();
/* An empty function segment represents a gap in the trace. We count
it as one instruction. */