aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Henderson <rth@redhat.com>2013-05-23 12:10:56 -0700
committerRichard Henderson <rth@gcc.gnu.org>2013-05-23 12:10:56 -0700
commit46aeac1b9f62be85dcfa7f6a0396440dbe10f321 (patch)
tree498276e14359ea52894d65e4ff522d07b13b392e /gcc
parent70cc153622fa089431478a774653cc816eda20af (diff)
downloadgcc-46aeac1b9f62be85dcfa7f6a0396440dbe10f321.zip
gcc-46aeac1b9f62be85dcfa7f6a0396440dbe10f321.tar.gz
gcc-46aeac1b9f62be85dcfa7f6a0396440dbe10f321.tar.bz2
re PR rtl-optimization/56742 (Optimization bug lead to uncaught throw)
PR target/56742 * config/i386/i386.c (ix86_seh_fixup_eh_fallthru): New. (ix86_reorg): Call it. From-SVN: r199264
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/config/i386/i386.c43
2 files changed, 49 insertions, 0 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 0288258..51e7b9e 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,9 @@
+2013-05-23 Richard Henderson <rth@redhat.com>
+
+ PR target/56742
+ * config/i386/i386.c (ix86_seh_fixup_eh_fallthru): New.
+ (ix86_reorg): Call it.
+
2013-05-23 Uros Bizjak <ubizjak@gmail.com>
PR target/57379
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 3470fef..20163b1 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -35564,6 +35564,46 @@ ix86_pad_short_function (void)
}
}
+/* Fix up a Windows system unwinder issue. If an EH region falls thru into
+ the epilogue, the Windows system unwinder will apply epilogue logic and
+ produce incorrect offsets. This can be avoided by adding a nop between
+ the last insn that can throw and the first insn of the epilogue. */
+
+static void
+ix86_seh_fixup_eh_fallthru (void)
+{
+ edge e;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+ {
+ rtx insn, next;
+
+ /* Find the beginning of the epilogue. */
+ for (insn = BB_END (e->src); insn != NULL; insn = PREV_INSN (insn))
+ if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
+ break;
+ if (insn == NULL)
+ continue;
+
+ /* We only care about preceeding insns that can throw. */
+ insn = prev_active_insn (insn);
+ if (insn == NULL || !can_throw_internal (insn))
+ continue;
+
+ /* Do not separate calls from their debug information. */
+ for (next = NEXT_INSN (insn); next != NULL; next = NEXT_INSN (next))
+ if (NOTE_P (next)
+ && (NOTE_KIND (next) == NOTE_INSN_VAR_LOCATION
+ || NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION))
+ insn = next;
+ else
+ break;
+
+ emit_insn_after (gen_nops (const1_rtx), insn);
+ }
+}
+
/* Implement machine specific optimizations. We implement padding of returns
for K8 CPUs and pass to avoid 4 jumps in the single 16 byte window. */
static void
@@ -35573,6 +35613,9 @@ ix86_reorg (void)
with old MDEP_REORGS that are not CFG based. Recompute it now. */
compute_bb_for_insn ();
+ if (TARGET_SEH && current_function_has_exception_handlers ())
+ ix86_seh_fixup_eh_fallthru ();
+
if (optimize && optimize_function_for_speed_p (cfun))
{
if (TARGET_PAD_SHORT_FUNCTION)