diff options
author | Patrick Palka <ppalka@redhat.com> | 2022-01-10 14:57:54 -0500 |
---|---|---|
committer | Patrick Palka <ppalka@redhat.com> | 2022-01-10 14:57:54 -0500 |
commit | ab36b554bd90e8db279d13b133369118814f13fb (patch) | |
tree | 6c4abc9617dec954ecde670d2c7be09e5044953a /gcc/cp | |
parent | 3e95a974c39e922d19bf7ac1246730c516ae01f2 (diff) | |
download | gcc-ab36b554bd90e8db279d13b133369118814f13fb.zip gcc-ab36b554bd90e8db279d13b133369118814f13fb.tar.gz gcc-ab36b554bd90e8db279d13b133369118814f13fb.tar.bz2 |
c++: constexpr base-to-derived conversion with offset 0 [PR103879]
r12-136 made us canonicalize an object/offset pair with negative offset
into one with a nonnegative offset, by iteratively absorbing the
innermost component into the offset and stopping as soon as the offset
becomes nonnegative.
This patch strengthens this transformation by making it keep on absorbing
even if the offset is already 0 as long as the innermost component is at
position 0 (and thus absorbing doesn't change the offset). This lets us
accept the two constexpr testcases below, which we'd previously reject
essentially because cxx_fold_indirect_ref would be unable to resolve
*(B*)&b.D123 (where D123 is the base A subobject at position 0) to just b.
PR c++/103879
gcc/cp/ChangeLog:
* constexpr.c (cxx_fold_indirect_ref): Split out object/offset
canonicalization step into a local lambda. Strengthen it to
absorb more components at position 0. Use it before both calls
to cxx_fold_indirect_ref_1.
gcc/testsuite/ChangeLog:
* g++.dg/cpp1y/constexpr-base2.C: New test.
* g++.dg/cpp1y/constexpr-base2a.C: New test.
Diffstat (limited to 'gcc/cp')
-rw-r--r-- | gcc/cp/constexpr.c | 38 |
1 files changed, 26 insertions, 12 deletions
diff --git a/gcc/cp/constexpr.c b/gcc/cp/constexpr.c index af6a4a7..d0da4a7 100644 --- a/gcc/cp/constexpr.c +++ b/gcc/cp/constexpr.c @@ -5215,6 +5215,25 @@ cxx_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, if (!INDIRECT_TYPE_P (subtype)) return NULL_TREE; + /* Canonicalizes the given OBJ/OFF pair by iteratively absorbing + the innermost component into the offset until it would make the + offset positive, so that cxx_fold_indirect_ref_1 can identify + more folding opportunities. */ + auto canonicalize_obj_off = [] (tree& obj, tree& off) { + while (TREE_CODE (obj) == COMPONENT_REF + && (tree_int_cst_sign_bit (off) || integer_zerop (off))) + { + tree field = TREE_OPERAND (obj, 1); + tree pos = byte_position (field); + if (integer_zerop (off) && integer_nonzerop (pos)) + /* If the offset is already 0, keep going as long as the + component is at position 0. */ + break; + off = int_const_binop (PLUS_EXPR, off, pos); + obj = TREE_OPERAND (obj, 0); + } + }; + if (TREE_CODE (sub) == ADDR_EXPR) { tree op = TREE_OPERAND (sub, 0); @@ -5233,7 +5252,12 @@ cxx_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, return op; } else - return cxx_fold_indirect_ref_1 (ctx, loc, type, op, 0, empty_base); + { + tree off = integer_zero_node; + canonicalize_obj_off (op, off); + gcc_assert (integer_zerop (off)); + return cxx_fold_indirect_ref_1 (ctx, loc, type, op, 0, empty_base); + } } else if (TREE_CODE (sub) == POINTER_PLUS_EXPR && tree_fits_uhwi_p (TREE_OPERAND (sub, 1))) @@ -5245,17 +5269,7 @@ cxx_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, if (TREE_CODE (op00) == ADDR_EXPR) { tree obj = TREE_OPERAND (op00, 0); - while (TREE_CODE (obj) == COMPONENT_REF - && tree_int_cst_sign_bit (off)) - { - /* Canonicalize this object/offset pair by iteratively absorbing - the innermost component into the offset until the offset is - nonnegative, so that cxx_fold_indirect_ref_1 can identify - more folding opportunities. */ - tree field = TREE_OPERAND (obj, 1); - off = int_const_binop (PLUS_EXPR, off, byte_position (field)); - obj = TREE_OPERAND (obj, 0); - } + canonicalize_obj_off (obj, off); return cxx_fold_indirect_ref_1 (ctx, loc, type, obj, tree_to_uhwi (off), empty_base); } |