aboutsummaryrefslogtreecommitdiff
path: root/gcc/explow.c
diff options
context:
space:
mode:
authorGiuliano Belinassi <giuliano.belinassi@usp.br>2020-08-22 17:43:43 -0300
committerGiuliano Belinassi <giuliano.belinassi@usp.br>2020-08-22 17:43:43 -0300
commita926878ddbd5a98b272c22171ce58663fc04c3e0 (patch)
tree86af256e5d9a9c06263c00adc90e5fe348008c43 /gcc/explow.c
parent542730f087133690b47e036dfd43eb0db8a650ce (diff)
parent07cbaed8ba7d1b6e4ab3a9f44175502a4e1ecdb1 (diff)
downloadgcc-a926878ddbd5a98b272c22171ce58663fc04c3e0.zip
gcc-a926878ddbd5a98b272c22171ce58663fc04c3e0.tar.gz
gcc-a926878ddbd5a98b272c22171ce58663fc04c3e0.tar.bz2
Merge branch 'autopar_rebase2' into autopar_develdevel/autopar_devel
Quickly commit changes in the rebase branch.
Diffstat (limited to 'gcc/explow.c')
-rw-r--r--gcc/explow.c23
1 files changed, 13 insertions, 10 deletions
diff --git a/gcc/explow.c b/gcc/explow.c
index b838f03..0fbc6d2 100644
--- a/gcc/explow.c
+++ b/gcc/explow.c
@@ -43,7 +43,6 @@ along with GCC; see the file COPYING3. If not see
#include "output.h"
static rtx break_out_memory_refs (rtx);
-static void anti_adjust_stack_and_probe_stack_clash (rtx);
/* Truncate and perhaps sign-extend C as appropriate for MODE. */
@@ -1294,9 +1293,9 @@ get_dynamic_stack_size (rtx *psize, unsigned size_align,
/* Return the number of bytes to "protect" on the stack for -fstack-check.
- "protect" in the context of -fstack-check means how many bytes we
- should always ensure are available on the stack. More importantly
- this is how many bytes are skipped when probing the stack.
+ "protect" in the context of -fstack-check means how many bytes we need
+ to always ensure are available on the stack; as a consequence, this is
+ also how many bytes are first skipped when probing the stack.
On some targets we want to reuse the -fstack-check prologue support
to give a degree of protection against stack clashing style attacks.
@@ -1304,14 +1303,16 @@ get_dynamic_stack_size (rtx *psize, unsigned size_align,
In that scenario we do not want to skip bytes before probing as that
would render the stack clash protections useless.
- So we never use STACK_CHECK_PROTECT directly. Instead we indirect though
- this helper which allows us to provide different values for
- -fstack-check and -fstack-clash-protection. */
+ So we never use STACK_CHECK_PROTECT directly. Instead we indirectly
+ use it through this helper, which allows to provide different values
+ for -fstack-check and -fstack-clash-protection. */
+
HOST_WIDE_INT
get_stack_check_protect (void)
{
if (flag_stack_clash_protection)
return 0;
+
return STACK_CHECK_PROTECT;
}
@@ -1533,6 +1534,8 @@ allocate_dynamic_stack_space (rtx size, unsigned size_align,
saved_stack_pointer_delta = stack_pointer_delta;
+ /* If stack checking or stack clash protection is requested,
+ then probe the stack while allocating space from it. */
if (flag_stack_check && STACK_CHECK_MOVING_SP)
anti_adjust_stack_and_probe (size, false);
else if (flag_stack_clash_protection)
@@ -1941,14 +1944,14 @@ emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
probes were not emitted.
2. It never skips probes, whereas anti_adjust_stack_and_probe will
- skip probes on the first couple PROBE_INTERVALs on the assumption
- they're done elsewhere.
+ skip the probe on the first PROBE_INTERVAL on the assumption it
+ was already done in the prologue and in previous allocations.
3. It only allocates and probes SIZE bytes, it does not need to
allocate/probe beyond that because this probing style does not
guarantee signal handling capability if the guard is hit. */
-static void
+void
anti_adjust_stack_and_probe_stack_clash (rtx size)
{
/* First ensure SIZE is Pmode. */