aboutsummaryrefslogtreecommitdiff
path: root/gcc/analyzer/svalue.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/analyzer/svalue.cc')
-rw-r--r--gcc/analyzer/svalue.cc635
1 files changed, 622 insertions, 13 deletions
diff --git a/gcc/analyzer/svalue.cc b/gcc/analyzer/svalue.cc
index d6305a3..5f2fe4c 100644
--- a/gcc/analyzer/svalue.cc
+++ b/gcc/analyzer/svalue.cc
@@ -105,12 +105,25 @@ svalue::to_json () const
tree
svalue::maybe_get_constant () const
{
- if (const constant_svalue *cst_sval = dyn_cast_constant_svalue ())
+ const svalue *sval = unwrap_any_unmergeable ();
+ if (const constant_svalue *cst_sval = sval->dyn_cast_constant_svalue ())
return cst_sval->get_constant ();
else
return NULL_TREE;
}
+/* If this svalue is a region_svalue, return the region it points to.
+ Otherwise return NULL. */
+
+const region *
+svalue::maybe_get_region () const
+{
+ if (const region_svalue *region_sval = dyn_cast_region_svalue ())
+ return region_sval->get_pointee ();
+ else
+ return NULL;
+}
+
/* If this svalue is a cast (i.e a unaryop NOP_EXPR or VIEW_CONVERT_EXPR),
return the underlying svalue.
Otherwise return NULL. */
@@ -158,6 +171,13 @@ svalue::can_merge_p (const svalue *other,
|| (other->get_kind () == SK_UNMERGEABLE))
return NULL;
+ /* Reject attempts to merge poisoned svalues with other svalues
+ (either non-poisoned, or other kinds of poison), so that e.g.
+ we identify paths in which a variable is conditionally uninitialized. */
+ if (get_kind () == SK_POISONED
+ || other->get_kind () == SK_POISONED)
+ return NULL;
+
/* Reject attempts to merge NULL pointers with not-NULL-pointers. */
if (POINTER_TYPE_P (get_type ()))
{
@@ -415,6 +435,27 @@ svalue::cmp_ptr (const svalue *sval1, const svalue *sval2)
sub_sval2->get_subregion ());
}
break;
+ case SK_REPEATED:
+ {
+ const repeated_svalue *repeated_sval1 = (const repeated_svalue *)sval1;
+ const repeated_svalue *repeated_sval2 = (const repeated_svalue *)sval2;
+ return svalue::cmp_ptr (repeated_sval1->get_inner_svalue (),
+ repeated_sval2->get_inner_svalue ());
+ }
+ break;
+ case SK_BITS_WITHIN:
+ {
+ const bits_within_svalue *bits_within_sval1
+ = (const bits_within_svalue *)sval1;
+ const bits_within_svalue *bits_within_sval2
+ = (const bits_within_svalue *)sval2;
+ if (int cmp = bit_range::cmp (bits_within_sval1->get_bits (),
+ bits_within_sval2->get_bits ()))
+ return cmp;
+ return svalue::cmp_ptr (bits_within_sval1->get_inner_svalue (),
+ bits_within_sval2->get_inner_svalue ());
+ }
+ break;
case SK_UNMERGEABLE:
{
const unmergeable_svalue *unmergeable_sval1
@@ -468,6 +509,29 @@ svalue::cmp_ptr (const svalue *sval1, const svalue *sval2)
conjured_sval2->get_id_region ());
}
break;
+ case SK_ASM_OUTPUT:
+ {
+ const asm_output_svalue *asm_output_sval1
+ = (const asm_output_svalue *)sval1;
+ const asm_output_svalue *asm_output_sval2
+ = (const asm_output_svalue *)sval2;
+ if (int asm_string_cmp = strcmp (asm_output_sval1->get_asm_string (),
+ asm_output_sval2->get_asm_string ()))
+ return asm_string_cmp;
+ if (int output_idx_cmp = ((int)asm_output_sval1->get_output_idx ()
+ - (int)asm_output_sval2->get_output_idx ()))
+ return output_idx_cmp;
+ if (int cmp = ((int)asm_output_sval1->get_num_inputs ()
+ - (int)asm_output_sval2->get_num_inputs ()))
+ return cmp;
+ for (unsigned i = 0; i < asm_output_sval1->get_num_inputs (); i++)
+ if (int input_cmp
+ = svalue::cmp_ptr (asm_output_sval1->get_input (i),
+ asm_output_sval2->get_input (i)))
+ return input_cmp;
+ return 0;
+ }
+ break;
}
}
@@ -481,6 +545,77 @@ svalue::cmp_ptr_ptr (const void *p1, const void *p2)
return cmp_ptr (sval1, sval2);
}
+/* Subclass of visitor for use in implementing svalue::involves_p. */
+
+class involvement_visitor : public visitor
+{
+public:
+ involvement_visitor (const svalue *needle)
+ : m_needle (needle), m_found (false) {}
+
+ void visit_initial_svalue (const initial_svalue *candidate)
+ {
+ if (candidate == m_needle)
+ m_found = true;
+ }
+
+ void visit_conjured_svalue (const conjured_svalue *candidate)
+ {
+ if (candidate == m_needle)
+ m_found = true;
+ }
+
+ bool found_p () const { return m_found; }
+
+private:
+ const svalue *m_needle;
+ bool m_found;
+};
+
+/* Return true iff this svalue is defined in terms of OTHER. */
+
+bool
+svalue::involves_p (const svalue *other) const
+{
+ /* Currently only implemented for these kinds. */
+ gcc_assert (other->get_kind () == SK_INITIAL
+ || other->get_kind () == SK_CONJURED);
+
+ involvement_visitor v (other);
+ accept (&v);
+ return v.found_p ();
+}
+
+/* Extract SUBRANGE from this value, of type TYPE. */
+
+const svalue *
+svalue::extract_bit_range (tree type,
+ const bit_range &subrange,
+ region_model_manager *mgr) const
+{
+ return mgr->get_or_create_bits_within (type, subrange, this);
+}
+
+/* Base implementation of svalue::maybe_fold_bits_within vfunc. */
+
+const svalue *
+svalue::maybe_fold_bits_within (tree,
+ const bit_range &,
+ region_model_manager *) const
+{
+ /* By default, don't fold. */
+ return NULL;
+}
+
+/* Base implementation of svalue::all_zeroes_p.
+ Return true if this value is known to be all zeroes. */
+
+bool
+svalue::all_zeroes_p () const
+{
+ return false;
+}
+
/* class region_svalue : public svalue. */
/* Implementation of svalue::dump_to_pp vfunc for region_svalue. */
@@ -646,6 +781,34 @@ constant_svalue::eval_condition (const constant_svalue *lhs,
return tristate::TS_UNKNOWN;
}
+/* Implementation of svalue::maybe_fold_bits_within vfunc
+ for constant_svalue. */
+
+const svalue *
+constant_svalue::maybe_fold_bits_within (tree type,
+ const bit_range &,
+ region_model_manager *mgr) const
+{
+ /* Bits within an all-zero value are also all zero. */
+ if (zerop (m_cst_expr))
+ {
+ if (type)
+ return mgr->get_or_create_cast (type, this);
+ else
+ return this;
+ }
+ /* Otherwise, don't fold. */
+ return NULL;
+}
+
+/* Implementation of svalue::all_zeroes_p for constant_svalue. */
+
+bool
+constant_svalue::all_zeroes_p () const
+{
+ return zerop (m_cst_expr);
+}
+
/* class unknown_svalue : public svalue. */
/* Implementation of svalue::dump_to_pp vfunc for unknown_svalue. */
@@ -677,6 +840,18 @@ unknown_svalue::accept (visitor *v) const
v->visit_unknown_svalue (this);
}
+/* Implementation of svalue::maybe_fold_bits_within vfunc
+ for unknown_svalue. */
+
+const svalue *
+unknown_svalue::maybe_fold_bits_within (tree type,
+ const bit_range &,
+ region_model_manager *mgr) const
+{
+ /* Bits within an unknown_svalue are themselves unknown. */
+ return mgr->get_or_create_unknown_svalue (type);
+}
+
/* Get a string for KIND for use in debug dumps. */
const char *
@@ -686,6 +861,8 @@ poison_kind_to_str (enum poison_kind kind)
{
default:
gcc_unreachable ();
+ case POISON_KIND_UNINIT:
+ return "uninit";
case POISON_KIND_FREED:
return "freed";
case POISON_KIND_POPPED_STACK:
@@ -701,9 +878,17 @@ void
poisoned_svalue::dump_to_pp (pretty_printer *pp, bool simple) const
{
if (simple)
- pp_printf (pp, "POISONED(%s)", poison_kind_to_str (m_kind));
+ {
+ pp_string (pp, "POISONED(");
+ print_quoted_type (pp, get_type ());
+ pp_printf (pp, ", %s)", poison_kind_to_str (m_kind));
+ }
else
- pp_printf (pp, "poisoned_svalue(%s)", poison_kind_to_str (m_kind));
+ {
+ pp_string (pp, "poisoned_svalue(");
+ print_quoted_type (pp, get_type ());
+ pp_printf (pp, ", %s)", poison_kind_to_str (m_kind));
+ }
}
/* Implementation of svalue::accept vfunc for poisoned_svalue. */
@@ -714,6 +899,18 @@ poisoned_svalue::accept (visitor *v) const
v->visit_poisoned_svalue (this);
}
+/* Implementation of svalue::maybe_fold_bits_within vfunc
+ for poisoned_svalue. */
+
+const svalue *
+poisoned_svalue::maybe_fold_bits_within (tree type,
+ const bit_range &,
+ region_model_manager *mgr) const
+{
+ /* Bits within a poisoned value are also poisoned. */
+ return mgr->get_or_create_poisoned_svalue (m_kind, type);
+}
+
/* class setjmp_svalue's implementation is in engine.cc, so that it can use
the declaration of exploded_node. */
@@ -763,7 +960,7 @@ initial_svalue::implicitly_live_p (const svalue_set *,
a popped stack frame. */
if (model->region_exists_p (m_reg))
{
- const svalue *reg_sval = model->get_store_value (m_reg);
+ const svalue *reg_sval = model->get_store_value (m_reg, NULL);
if (reg_sval == this)
return true;
}
@@ -850,8 +1047,51 @@ unaryop_svalue::implicitly_live_p (const svalue_set *live_svalues,
return get_arg ()->live_p (live_svalues, model);
}
+/* Implementation of svalue::maybe_fold_bits_within vfunc
+ for unaryop_svalue. */
+
+const svalue *
+unaryop_svalue::maybe_fold_bits_within (tree type,
+ const bit_range &,
+ region_model_manager *mgr) const
+{
+ switch (m_op)
+ {
+ default:
+ break;
+ case NOP_EXPR:
+ /* A cast of zero is zero. */
+ if (tree cst = m_arg->maybe_get_constant ())
+ if (zerop (cst))
+ {
+ if (type)
+ return mgr->get_or_create_cast (type, this);
+ else
+ return this;
+ }
+ break;
+ }
+ /* Otherwise, don't fold. */
+ return NULL;
+}
+
/* class binop_svalue : public svalue. */
+/* Return whether OP be printed as an infix operator. */
+
+static bool
+infix_p (enum tree_code op)
+{
+ switch (op)
+ {
+ default:
+ return true;
+ case MAX_EXPR:
+ case MIN_EXPR:
+ return false;
+ }
+}
+
/* Implementation of svalue::dump_to_pp vfunc for binop_svalue. */
void
@@ -859,11 +1099,25 @@ binop_svalue::dump_to_pp (pretty_printer *pp, bool simple) const
{
if (simple)
{
- pp_character (pp, '(');
- m_arg0->dump_to_pp (pp, simple);
- pp_string (pp, op_symbol_code (m_op));
- m_arg1->dump_to_pp (pp, simple);
- pp_character (pp, ')');
+ if (infix_p (m_op))
+ {
+ /* Print "(A OP B)". */
+ pp_character (pp, '(');
+ m_arg0->dump_to_pp (pp, simple);
+ pp_string (pp, op_symbol_code (m_op));
+ m_arg1->dump_to_pp (pp, simple);
+ pp_character (pp, ')');
+ }
+ else
+ {
+ /* Print "OP(A, B)". */
+ pp_string (pp, op_symbol_code (m_op));
+ pp_character (pp, '(');
+ m_arg0->dump_to_pp (pp, simple);
+ pp_string (pp, ", ");
+ m_arg1->dump_to_pp (pp, simple);
+ pp_character (pp, ')');
+ }
}
else
{
@@ -908,6 +1162,7 @@ sub_svalue::sub_svalue (tree type, const svalue *parent_svalue,
type),
m_parent_svalue (parent_svalue), m_subregion (subregion)
{
+ gcc_assert (parent_svalue->can_have_associated_state_p ());
}
/* Implementation of svalue::dump_to_pp vfunc for sub_svalue. */
@@ -953,6 +1208,216 @@ sub_svalue::implicitly_live_p (const svalue_set *live_svalues,
return get_parent ()->live_p (live_svalues, model);
}
+/* class repeated_svalue : public svalue. */
+
+/* repeated_svalue'c ctor. */
+
+repeated_svalue::repeated_svalue (tree type,
+ const svalue *outer_size,
+ const svalue *inner_svalue)
+: svalue (complexity::from_pair (outer_size, inner_svalue), type),
+ m_outer_size (outer_size),
+ m_inner_svalue (inner_svalue)
+{
+ gcc_assert (outer_size->can_have_associated_state_p ());
+ gcc_assert (inner_svalue->can_have_associated_state_p ());
+}
+
+/* Implementation of svalue::dump_to_pp vfunc for repeated_svalue. */
+
+void
+repeated_svalue::dump_to_pp (pretty_printer *pp, bool simple) const
+{
+ if (simple)
+ {
+ pp_string (pp, "REPEATED(");
+ if (get_type ())
+ {
+ print_quoted_type (pp, get_type ());
+ pp_string (pp, ", ");
+ }
+ pp_string (pp, "outer_size: ");
+ m_outer_size->dump_to_pp (pp, simple);
+ pp_string (pp, ", inner_val: ");
+ m_inner_svalue->dump_to_pp (pp, simple);
+ pp_character (pp, ')');
+ }
+ else
+ {
+ pp_string (pp, "repeated_svalue (");
+ if (get_type ())
+ {
+ print_quoted_type (pp, get_type ());
+ pp_string (pp, ", ");
+ }
+ pp_string (pp, "outer_size: ");
+ m_outer_size->dump_to_pp (pp, simple);
+ pp_string (pp, ", inner_val: ");
+ m_inner_svalue->dump_to_pp (pp, simple);
+ pp_character (pp, ')');
+ }
+}
+
+/* Implementation of svalue::accept vfunc for repeated_svalue. */
+
+void
+repeated_svalue::accept (visitor *v) const
+{
+ v->visit_repeated_svalue (this);
+ m_inner_svalue->accept (v);
+}
+
+/* Implementation of svalue::all_zeroes_p for repeated_svalue. */
+
+bool
+repeated_svalue::all_zeroes_p () const
+{
+ return m_inner_svalue->all_zeroes_p ();
+}
+
+/* Implementation of svalue::maybe_fold_bits_within vfunc
+ for repeated_svalue. */
+
+const svalue *
+repeated_svalue::maybe_fold_bits_within (tree type,
+ const bit_range &bits,
+ region_model_manager *mgr) const
+{
+ const svalue *innermost_sval = m_inner_svalue;
+ /* Fold
+ BITS_WITHIN (range, REPEATED_SVALUE (ZERO))
+ to:
+ REPEATED_SVALUE (ZERO). */
+ if (all_zeroes_p ())
+ {
+ byte_range bytes (0,0);
+ if (bits.as_byte_range (&bytes))
+ {
+ const svalue *byte_size
+ = mgr->get_or_create_int_cst (size_type_node,
+ bytes.m_size_in_bytes.to_uhwi ());
+ return mgr->get_or_create_repeated_svalue (type, byte_size,
+ innermost_sval);
+ }
+ }
+
+ /* Fold:
+ BITS_WITHIN (range, REPEATED_SVALUE (INNERMOST_SVALUE))
+ to:
+ BITS_WITHIN (range - offset, INNERMOST_SVALUE)
+ if range is fully within one instance of INNERMOST_SVALUE. */
+ if (tree innermost_type = innermost_sval->get_type ())
+ {
+ bit_size_t element_bit_size;
+ if (int_size_in_bits (innermost_type, &element_bit_size)
+ && element_bit_size > 0)
+ {
+ HOST_WIDE_INT start_idx
+ = (bits.get_start_bit_offset ()
+ / element_bit_size).to_shwi ();
+ HOST_WIDE_INT last_idx
+ = (bits.get_last_bit_offset ()
+ / element_bit_size).to_shwi ();
+ if (start_idx == last_idx)
+ {
+ bit_offset_t start_of_element
+ = start_idx * element_bit_size;
+ bit_range range_within_element
+ (bits.m_start_bit_offset - start_of_element,
+ bits.m_size_in_bits);
+ return mgr->get_or_create_bits_within (type,
+ range_within_element,
+ innermost_sval);
+ }
+ }
+ }
+
+ return NULL;
+}
+
+/* class bits_within_svalue : public svalue. */
+
+/* bits_within_svalue'c ctor. */
+
+bits_within_svalue::bits_within_svalue (tree type,
+ const bit_range &bits,
+ const svalue *inner_svalue)
+: svalue (complexity (inner_svalue), type),
+ m_bits (bits),
+ m_inner_svalue (inner_svalue)
+{
+ gcc_assert (inner_svalue->can_have_associated_state_p ());
+}
+
+/* Implementation of svalue::dump_to_pp vfunc for bits_within_svalue. */
+
+void
+bits_within_svalue::dump_to_pp (pretty_printer *pp, bool simple) const
+{
+ if (simple)
+ {
+ pp_string (pp, "BITS_WITHIN(");
+ if (get_type ())
+ {
+ print_quoted_type (pp, get_type ());
+ pp_string (pp, ", ");
+ }
+ m_bits.dump_to_pp (pp);
+ pp_string (pp, ", inner_val: ");
+ m_inner_svalue->dump_to_pp (pp, simple);
+ pp_character (pp, ')');
+ }
+ else
+ {
+ pp_string (pp, "bits_within_svalue (");
+ if (get_type ())
+ {
+ print_quoted_type (pp, get_type ());
+ pp_string (pp, ", ");
+ }
+ m_bits.dump_to_pp (pp);
+ pp_string (pp, ", inner_val: ");
+ m_inner_svalue->dump_to_pp (pp, simple);
+ pp_character (pp, ')');
+ }
+}
+
+/* Implementation of svalue::maybe_fold_bits_within vfunc
+ for bits_within_svalue. */
+
+const svalue *
+bits_within_svalue::maybe_fold_bits_within (tree type,
+ const bit_range &bits,
+ region_model_manager *mgr) const
+{
+ /* Fold:
+ BITS_WITHIN (range1, BITS_WITHIN (range2, VAL))
+ to:
+ BITS_WITHIN (range1 in range 2, VAL). */
+ bit_range offset_bits (m_bits.get_start_bit_offset ()
+ + bits.m_start_bit_offset,
+ bits.m_size_in_bits);
+ return mgr->get_or_create_bits_within (type, offset_bits, m_inner_svalue);
+}
+
+/* Implementation of svalue::accept vfunc for bits_within_svalue. */
+
+void
+bits_within_svalue::accept (visitor *v) const
+{
+ v->visit_bits_within_svalue (this);
+ m_inner_svalue->accept (v);
+}
+
+/* Implementation of svalue::implicitly_live_p vfunc for bits_within_svalue. */
+
+bool
+bits_within_svalue::implicitly_live_p (const svalue_set *live_svalues,
+ const region_model *model) const
+{
+ return m_inner_svalue->live_p (live_svalues, model);
+}
+
/* class widening_svalue : public svalue. */
/* Implementation of svalue::dump_to_pp vfunc for widening_svalue. */
@@ -1194,17 +1659,26 @@ compound_svalue::dump_to_pp (pretty_printer *pp, bool simple) const
if (simple)
{
pp_string (pp, "COMPOUND(");
+ if (get_type ())
+ {
+ print_quoted_type (pp, get_type ());
+ pp_string (pp, ", ");
+ }
+ pp_character (pp, '{');
m_map.dump_to_pp (pp, simple, false);
- pp_character (pp, ')');
+ pp_string (pp, "})");
}
else
{
pp_string (pp, "compound_svalue (");
- pp_string (pp, ", ");
+ if (get_type ())
+ {
+ print_quoted_type (pp, get_type ());
+ pp_string (pp, ", ");
+ }
pp_character (pp, '{');
m_map.dump_to_pp (pp, simple, false);
- pp_string (pp, "}, ");
- pp_character (pp, ')');
+ pp_string (pp, "})");
}
}
@@ -1240,6 +1714,75 @@ compound_svalue::calc_complexity (const binding_map &map)
return complexity (num_child_nodes + 1, max_child_depth + 1);
}
+/* Implementation of svalue::maybe_fold_bits_within vfunc
+ for compound_svalue. */
+
+const svalue *
+compound_svalue::maybe_fold_bits_within (tree type,
+ const bit_range &bits,
+ region_model_manager *mgr) const
+{
+ binding_map result_map;
+ for (auto iter : m_map)
+ {
+ const binding_key *key = iter.first;
+ if (const concrete_binding *conc_key
+ = key->dyn_cast_concrete_binding ())
+ {
+ /* Ignore concrete bindings outside BITS. */
+ if (!conc_key->get_bit_range ().intersects_p (bits))
+ continue;
+
+ const svalue *sval = iter.second;
+ /* Get the position of conc_key relative to BITS. */
+ bit_range result_location (conc_key->get_start_bit_offset ()
+ - bits.get_start_bit_offset (),
+ conc_key->get_size_in_bits ());
+ /* If conc_key starts after BITS, trim off leading bits
+ from the svalue and adjust binding location. */
+ if (result_location.m_start_bit_offset < 0)
+ {
+ bit_size_t leading_bits_to_drop
+ = -result_location.m_start_bit_offset;
+ result_location = bit_range
+ (0, result_location.m_size_in_bits - leading_bits_to_drop);
+ bit_range bits_within_sval (leading_bits_to_drop,
+ result_location.m_size_in_bits);
+ /* Trim off leading bits from iter_sval. */
+ sval = mgr->get_or_create_bits_within (NULL_TREE,
+ bits_within_sval,
+ sval);
+ }
+ /* If conc_key finishes after BITS, trim off trailing bits
+ from the svalue and adjust binding location. */
+ if (conc_key->get_next_bit_offset ()
+ > bits.get_next_bit_offset ())
+ {
+ bit_size_t trailing_bits_to_drop
+ = (conc_key->get_next_bit_offset ()
+ - bits.get_next_bit_offset ());
+ result_location = bit_range
+ (result_location.m_start_bit_offset,
+ result_location.m_size_in_bits - trailing_bits_to_drop);
+ bit_range bits_within_sval (0,
+ result_location.m_size_in_bits);
+ /* Trim off leading bits from iter_sval. */
+ sval = mgr->get_or_create_bits_within (NULL_TREE,
+ bits_within_sval,
+ sval);
+ }
+ const concrete_binding *offset_conc_key
+ = mgr->get_store_manager ()->get_concrete_binding
+ (result_location);
+ result_map.put (offset_conc_key, sval);
+ }
+ else
+ /* If we have any symbolic keys we can't get it as bits. */
+ return NULL;
+ }
+ return mgr->get_or_create_compound_svalue (type, result_map);
+}
+
/* class conjured_svalue : public svalue. */
/* Implementation of svalue::dump_to_pp vfunc for conjured_svalue. */
@@ -1275,6 +1818,72 @@ conjured_svalue::accept (visitor *v) const
m_id_reg->accept (v);
}
+/* class asm_output_svalue : public svalue. */
+
+/* Implementation of svalue::dump_to_pp vfunc for asm_output_svalue. */
+
+void
+asm_output_svalue::dump_to_pp (pretty_printer *pp, bool simple) const
+{
+ if (simple)
+ {
+ pp_printf (pp, "ASM_OUTPUT(%qs, %%%i, {",
+ get_asm_string (),
+ get_output_idx ());
+ for (unsigned i = 0; i < m_num_inputs; i++)
+ {
+ if (i > 0)
+ pp_string (pp, ", ");
+ dump_input (pp, 0, m_input_arr[i], simple);
+ }
+ pp_string (pp, "})");
+ }
+ else
+ {
+ pp_printf (pp, "asm_output_svalue (%qs, %%%i, {",
+ get_asm_string (),
+ get_output_idx ());
+ for (unsigned i = 0; i < m_num_inputs; i++)
+ {
+ if (i > 0)
+ pp_string (pp, ", ");
+ dump_input (pp, 0, m_input_arr[i], simple);
+ }
+ pp_string (pp, "})");
+ }
+}
+
+/* Subroutine of asm_output_svalue::dump_to_pp. */
+
+void
+asm_output_svalue::dump_input (pretty_printer *pp,
+ unsigned input_idx,
+ const svalue *sval,
+ bool simple) const
+{
+ pp_printf (pp, "%%%i: ", input_idx_to_asm_idx (input_idx));
+ sval->dump_to_pp (pp, simple);
+}
+
+/* Convert INPUT_IDX from an index into the array of inputs
+ into the index of all operands for the asm stmt. */
+
+unsigned
+asm_output_svalue::input_idx_to_asm_idx (unsigned input_idx) const
+{
+ return input_idx + m_num_outputs;
+}
+
+/* Implementation of svalue::accept vfunc for asm_output_svalue. */
+
+void
+asm_output_svalue::accept (visitor *v) const
+{
+ v->visit_asm_output_svalue (this);
+ for (unsigned i = 0; i < m_num_inputs; i++)
+ m_input_arr[i]->accept (v);
+}
+
} // namespace ana
#endif /* #if ENABLE_ANALYZER */