aboutsummaryrefslogtreecommitdiff
path: root/gcc/function.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/function.c')
-rw-r--r--gcc/function.c37
1 files changed, 25 insertions, 12 deletions
diff --git a/gcc/function.c b/gcc/function.c
index dee303c..a95d2f8 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -4893,20 +4893,33 @@ stack_protect_epilogue (void)
rtx_code_label *label = gen_label_rtx ();
rtx x, y;
rtx_insn *seq;
+ struct expand_operand ops[3];
x = expand_normal (crtl->stack_protect_guard);
- if (guard_decl)
- y = expand_normal (guard_decl);
- else
- y = const0_rtx;
-
- /* Allow the target to compare Y with X without leaking either into
- a register. */
- if (targetm.have_stack_protect_test ()
- && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
- emit_insn (seq);
- else
- emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
+ create_fixed_operand (&ops[0], x);
+ create_fixed_operand (&ops[1], DECL_RTL (guard_decl));
+ create_fixed_operand (&ops[2], label);
+ /* Allow the target to compute address of Y and compare it with X without
+ leaking Y into a register. This combined address + compare pattern allows
+ the target to prevent spilling of any intermediate results by splitting
+ it after register allocator. */
+ if (!maybe_expand_jump_insn (targetm.code_for_stack_protect_combined_test,
+ 3, ops))
+ {
+ if (guard_decl)
+ y = expand_normal (guard_decl);
+ else
+ y = const0_rtx;
+
+ /* Allow the target to compare Y with X without leaking either into
+ a register. */
+ if (targetm.have_stack_protect_test ()
+ && ((seq = targetm.gen_stack_protect_test (x, y, label))
+ != NULL_RTX))
+ emit_insn (seq);
+ else
+ emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
+ }
/* The noreturn predictor has been moved to the tree level. The rtl-level
predictors estimate this branch about 20%, which isn't enough to get