From 46aeac1b9f62be85dcfa7f6a0396440dbe10f321 Mon Sep 17 00:00:00 2001 From: Richard Henderson <rth@redhat.com> Date: Thu, 23 May 2013 12:10:56 -0700 Subject: [PATCH] re PR rtl-optimization/56742 (Optimization bug lead to uncaught throw) PR target/56742 * config/i386/i386.c (ix86_seh_fixup_eh_fallthru): New. (ix86_reorg): Call it. From-SVN: r199264 --- gcc/ChangeLog | 6 ++++++ gcc/config/i386/i386.c | 43 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 0288258f3ce8..51e7b9e91d52 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,9 @@ +2013-05-23 Richard Henderson <rth@redhat.com> + + PR target/56742 + * config/i386/i386.c (ix86_seh_fixup_eh_fallthru): New. + (ix86_reorg): Call it. + 2013-05-23 Uros Bizjak <ubizjak@gmail.com> PR target/57379 diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c index 3470fef77b24..20163b1a8a1b 100644 --- a/gcc/config/i386/i386.c +++ b/gcc/config/i386/i386.c @@ -35564,6 +35564,46 @@ ix86_pad_short_function (void) } } +/* Fix up a Windows system unwinder issue. If an EH region falls thru into + the epilogue, the Windows system unwinder will apply epilogue logic and + produce incorrect offsets. This can be avoided by adding a nop between + the last insn that can throw and the first insn of the epilogue. */ + +static void +ix86_seh_fixup_eh_fallthru (void) +{ + edge e; + edge_iterator ei; + + FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) + { + rtx insn, next; + + /* Find the beginning of the epilogue. */ + for (insn = BB_END (e->src); insn != NULL; insn = PREV_INSN (insn)) + if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) + break; + if (insn == NULL) + continue; + + /* We only care about preceeding insns that can throw. */ + insn = prev_active_insn (insn); + if (insn == NULL || !can_throw_internal (insn)) + continue; + + /* Do not separate calls from their debug information. */ + for (next = NEXT_INSN (insn); next != NULL; next = NEXT_INSN (next)) + if (NOTE_P (next) + && (NOTE_KIND (next) == NOTE_INSN_VAR_LOCATION + || NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)) + insn = next; + else + break; + + emit_insn_after (gen_nops (const1_rtx), insn); + } +} + /* Implement machine specific optimizations. We implement padding of returns for K8 CPUs and pass to avoid 4 jumps in the single 16 byte window. */ static void @@ -35573,6 +35613,9 @@ ix86_reorg (void) with old MDEP_REORGS that are not CFG based. Recompute it now. */ compute_bb_for_insn (); + if (TARGET_SEH && current_function_has_exception_handlers ()) + ix86_seh_fixup_eh_fallthru (); + if (optimize && optimize_function_for_speed_p (cfun)) { if (TARGET_PAD_SHORT_FUNCTION) -- GitLab