diff options
author | Roger Sayle <roger@nextmovesoftware.com> | 2022-06-07 10:09:49 +0100 |
---|---|---|
committer | Roger Sayle <roger@nextmovesoftware.com> | 2022-06-07 10:09:49 +0100 |
commit | c00e1e3aa5ae62a991d105d309061d12f6a8764f (patch) | |
tree | 75bcd6c0007a3666c4b6e0603b2e797f9045edab /gcc/expr.cc | |
parent | cef3f69c2f4cbc05aa3ec00f2fdbacfa8ca1d303 (diff) | |
download | gcc-c00e1e3aa5ae62a991d105d309061d12f6a8764f.zip gcc-c00e1e3aa5ae62a991d105d309061d12f6a8764f.tar.gz gcc-c00e1e3aa5ae62a991d105d309061d12f6a8764f.tar.bz2 |
PR middle-end/105853: Call store_constructor directly from calls.cc.
This patch fixes both ICE regressions PR middle-end/105853 and
PR target/105856 caused by my recent patch to expand small const structs
as immediate constants. That patch updated code generation in three
places: two in expr.cc that call store_constructor directly, and the
third in calls.cc's load_register_parameters that expands its CONSTRUCTOR
via expand_expr, as store_constructor is local/static to expr.cc, and
the "public" API, should usually simply forward the constructor to the
appropriate store_constructor function.
Alas, despite the clean regression testing on multiple targets, the above
ICEs show that expand_expr isn't a suitable proxy for store_constructor,
and things that (I'd assumed) shouldn't affect how/whether a struct is
placed in a register [such as whether the struct is considered packed/
aligned or not] actually interfere with the optimization that is being
attempted.
The (proposed) solution is to export store_constructor (and it's helper
function int_expr_size) from expr.cc, by removing their static qualifier
and prototyping both functions in expr.h, so they can be called directly
from load_register_parameters in calls.cc. This cures both ICEs, but
almost as importantly improves code generation over GCC 12.
For PR 105853, GCC 12 generates:
compose_nd_na_ipv6_src:
movzx eax, WORD PTR eth_addr_zero[rip+2]
movzx edx, WORD PTR eth_addr_zero[rip]
movzx edi, WORD PTR eth_addr_zero[rip+4]
sal rax, 16
or rax, rdx
sal rdi, 32
or rdi, rax
xor eax, eax
jmp packet_set_nd
eth_addr_zero: .zero 6
where now (with this fix) GCC 13 generates:
compose_nd_na_ipv6_src:
xorl %edi, %edi
xorl %eax, %eax
jmp packet_set_nd
Likewise, for PR 105856 on ARM, we'd previously generate:
g_329_3:
movw r3, #:lower16:.LANCHOR0
movt r3, #:upper16:.LANCHOR0
ldr r0, [r3]
b func_19
but with this optimization we now generate:
g_329_3:
mov r0, #6
b func_19
2022-06-07 Roger Sayle <roger@nextmovesoftware.com>
gcc/ChangeLog
PR middle-end/105853
PR target/105856
* calls.cc (load_register_parameters): Call store_constructor
and int_expr_size directly instead of expanding via expand_expr.
* expr.cc (static void store_constructor): Don't prototype here.
(static HOST_WIDE_INT int_expr_size): Likewise.
(store_constructor): No longer static.
(int_expr_size): Likewise, no longer static.
* expr.h (store_constructor): Prototype here.
(int_expr_size): Prototype here.
gcc/testsuite/ChangeLog
PR middle-end/105853
PR target/105856
* gcc.dg/pr105853.c: New test case.
* gcc.dg/pr105856.c: New test case.
Diffstat (limited to 'gcc/expr.cc')
-rw-r--r-- | gcc/expr.cc | 6 |
1 files changed, 2 insertions, 4 deletions
diff --git a/gcc/expr.cc b/gcc/expr.cc index fb062dc..85cb414 100644 --- a/gcc/expr.cc +++ b/gcc/expr.cc @@ -84,7 +84,6 @@ static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); static rtx_insn *compress_float_constant (rtx, rtx); static rtx get_subtarget (rtx); -static void store_constructor (tree, rtx, int, poly_int64, bool); static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64, machine_mode, tree, alias_set_type, bool, bool); @@ -100,7 +99,6 @@ static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, profile_probability); static rtx const_vector_from_tree (tree); static tree tree_expr_size (const_tree); -static HOST_WIDE_INT int_expr_size (const_tree); static void convert_mode_scalar (rtx, rtx, int); @@ -6757,7 +6755,7 @@ fields_length (const_tree type) which has been packed to exclude padding bits. If REVERSE is true, the store is to be done in reverse order. */ -static void +void store_constructor (tree exp, rtx target, int cleared, poly_int64 size, bool reverse) { @@ -13209,7 +13207,7 @@ expr_size (tree exp) /* Return a wide integer for the size in bytes of the value of EXP, or -1 if the size can vary or is larger than an integer. */ -static HOST_WIDE_INT +HOST_WIDE_INT int_expr_size (const_tree exp) { tree size; |