summaryrefslogtreecommitdiffstats
path: root/contrib/gcc/except.c
diff options
context:
space:
mode:
authorobrien <obrien@FreeBSD.org>2002-02-01 18:16:02 +0000
committerobrien <obrien@FreeBSD.org>2002-02-01 18:16:02 +0000
commitc9ab9ae440a8066b2c2b85b157b1fdadcf09916a (patch)
tree086d9d6c8fbd4fc8fe4495059332f66bc0f8d12b /contrib/gcc/except.c
parent2ecfd8bd04b63f335c1ec6295740a4bfd97a4fa6 (diff)
downloadFreeBSD-src-c9ab9ae440a8066b2c2b85b157b1fdadcf09916a.zip
FreeBSD-src-c9ab9ae440a8066b2c2b85b157b1fdadcf09916a.tar.gz
Enlist the FreeBSD-CURRENT users as testers of what is to become Gcc 3.1.0.
These bits are taken from the FSF anoncvs repo on 1-Feb-2002 08:20 PST.
Diffstat (limited to 'contrib/gcc/except.c')
-rw-r--r--contrib/gcc/except.c5487
1 files changed, 3183 insertions, 2304 deletions
diff --git a/contrib/gcc/except.c b/contrib/gcc/except.c
index cc6fc29..606c0e6 100644
--- a/contrib/gcc/except.c
+++ b/contrib/gcc/except.c
@@ -1,23 +1,24 @@
/* Implements exception handling.
- Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
+ Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
+ 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* An exception is an event that can be signaled from within a
@@ -43,1096 +44,992 @@ Boston, MA 02111-1307, USA. */
exception, and thus there is the concept of "throwing" the
exception up the call stack.
- There are two major codegen options for exception handling. The
- flag -fsjlj-exceptions can be used to select the setjmp/longjmp
- approach, which is the default. -fno-sjlj-exceptions can be used to
- get the PC range table approach. While this is a compile time
- flag, an entire application must be compiled with the same codegen
- option. The first is a PC range table approach, the second is a
- setjmp/longjmp based scheme. We will first discuss the PC range
- table approach, after that, we will discuss the setjmp/longjmp
- based approach.
-
- It is appropriate to speak of the "context of a throw". This
- context refers to the address where the exception is thrown from,
- and is used to determine which exception region will handle the
- exception.
-
- Regions of code within a function can be marked such that if it
- contains the context of a throw, control will be passed to a
- designated "exception handler". These areas are known as "exception
- regions". Exception regions cannot overlap, but they can be nested
- to any arbitrary depth. Also, exception regions cannot cross
- function boundaries.
-
- Exception handlers can either be specified by the user (which we
- will call a "user-defined handler") or generated by the compiler
- (which we will designate as a "cleanup"). Cleanups are used to
- perform tasks such as destruction of objects allocated on the
- stack.
-
- In the current implementation, cleanups are handled by allocating an
- exception region for the area that the cleanup is designated for,
- and the handler for the region performs the cleanup and then
- rethrows the exception to the outer exception region. From the
- standpoint of the current implementation, there is little
- distinction made between a cleanup and a user-defined handler, and
- the phrase "exception handler" can be used to refer to either one
- equally well. (The section "Future Directions" below discusses how
- this will change).
-
- Each object file that is compiled with exception handling contains
- a static array of exception handlers named __EXCEPTION_TABLE__.
- Each entry contains the starting and ending addresses of the
- exception region, and the address of the handler designated for
- that region.
-
- If the target does not use the DWARF 2 frame unwind information, at
- program startup each object file invokes a function named
- __register_exceptions with the address of its local
- __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
- is responsible for recording all of the exception regions into one list
- (which is kept in a static variable named exception_table_list).
-
- On targets that support crtstuff.c, the unwind information
- is stored in a section named .eh_frame and the information for the
- entire shared object or program is registered with a call to
- __register_frame_info. On other targets, the information for each
- translation unit is registered from the file generated by collect2.
- __register_frame_info is defined in frame.c, and is responsible for
- recording all of the unwind regions into one list (which is kept in a
- static variable named unwind_table_list).
-
- The function __throw is actually responsible for doing the
- throw. On machines that have unwind info support, __throw is generated
- by code in libgcc2.c, otherwise __throw is generated on a
- per-object-file basis for each source file compiled with
- -fexceptions by the C++ frontend. Before __throw is invoked,
- the current context of the throw needs to be placed in the global
- variable __eh_pc.
-
- __throw attempts to find the appropriate exception handler for the
- PC value stored in __eh_pc by calling __find_first_exception_table_match
- (which is defined in libgcc2.c). If __find_first_exception_table_match
- finds a relevant handler, __throw transfers control directly to it.
-
- If a handler for the context being thrown from can't be found, __throw
- walks (see Walking the stack below) the stack up the dynamic call chain to
- continue searching for an appropriate exception handler based upon the
- caller of the function it last sought a exception handler for. It stops
- then either an exception handler is found, or when the top of the
- call chain is reached.
-
- If no handler is found, an external library function named
- __terminate is called. If a handler is found, then we restart
- our search for a handler at the end of the call chain, and repeat
- the search process, but instead of just walking up the call chain,
- we unwind the call chain as we walk up it.
-
- Internal implementation details:
-
- To associate a user-defined handler with a block of statements, the
- function expand_start_try_stmts is used to mark the start of the
- block of statements with which the handler is to be associated
- (which is known as a "try block"). All statements that appear
- afterwards will be associated with the try block.
-
- A call to expand_start_all_catch marks the end of the try block,
- and also marks the start of the "catch block" (the user-defined
- handler) associated with the try block.
-
- This user-defined handler will be invoked for *every* exception
- thrown with the context of the try block. It is up to the handler
- to decide whether or not it wishes to handle any given exception,
- as there is currently no mechanism in this implementation for doing
- this. (There are plans for conditionally processing an exception
- based on its "type", which will provide a language-independent
- mechanism).
-
- If the handler chooses not to process the exception (perhaps by
- looking at an "exception type" or some other additional data
- supplied with the exception), it can fall through to the end of the
- handler. expand_end_all_catch and expand_leftover_cleanups
- add additional code to the end of each handler to take care of
- rethrowing to the outer exception handler.
-
- The handler also has the option to continue with "normal flow of
- code", or in other words to resume executing at the statement
- immediately after the end of the exception region. The variable
- caught_return_label_stack contains a stack of labels, and jumping
- to the topmost entry's label via expand_goto will resume normal
- flow to the statement immediately after the end of the exception
- region. If the handler falls through to the end, the exception will
- be rethrown to the outer exception region.
-
- The instructions for the catch block are kept as a separate
- sequence, and will be emitted at the end of the function along with
- the handlers specified via expand_eh_region_end. The end of the
- catch block is marked with expand_end_all_catch.
-
- Any data associated with the exception must currently be handled by
- some external mechanism maintained in the frontend. For example,
- the C++ exception mechanism passes an arbitrary value along with
- the exception, and this is handled in the C++ frontend by using a
- global variable to hold the value. (This will be changing in the
- future.)
-
- The mechanism in C++ for handling data associated with the
- exception is clearly not thread-safe. For a thread-based
- environment, another mechanism must be used (possibly using a
- per-thread allocation mechanism if the size of the area that needs
- to be allocated isn't known at compile time.)
-
- Internally-generated exception regions (cleanups) are marked by
- calling expand_eh_region_start to mark the start of the region,
- and expand_eh_region_end (handler) is used to both designate the
- end of the region and to associate a specified handler/cleanup with
- the region. The rtl code in HANDLER will be invoked whenever an
- exception occurs in the region between the calls to
- expand_eh_region_start and expand_eh_region_end. After HANDLER is
- executed, additional code is emitted to handle rethrowing the
- exception to the outer exception handler. The code for HANDLER will
- be emitted at the end of the function.
-
- TARGET_EXPRs can also be used to designate exception regions. A
- TARGET_EXPR gives an unwind-protect style interface commonly used
- in functional languages such as LISP. The associated expression is
- evaluated, and whether or not it (or any of the functions that it
- calls) throws an exception, the protect expression is always
- invoked. This implementation takes care of the details of
- associating an exception table entry with the expression and
- generating the necessary code (it actually emits the protect
- expression twice, once for normal flow and once for the exception
- case). As for the other handlers, the code for the exception case
- will be emitted at the end of the function.
-
- Cleanups can also be specified by using add_partial_entry (handler)
- and end_protect_partials. add_partial_entry creates the start of
- a new exception region; HANDLER will be invoked if an exception is
- thrown with the context of the region between the calls to
- add_partial_entry and end_protect_partials. end_protect_partials is
- used to mark the end of these regions. add_partial_entry can be
- called as many times as needed before calling end_protect_partials.
- However, end_protect_partials should only be invoked once for each
- group of calls to add_partial_entry as the entries are queued
- and all of the outstanding entries are processed simultaneously
- when end_protect_partials is invoked. Similarly to the other
- handlers, the code for HANDLER will be emitted at the end of the
- function.
-
- The generated RTL for an exception region includes
- NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
- the start and end of the exception region. A unique label is also
- generated at the start of the exception region, which is available
- by looking at the ehstack variable. The topmost entry corresponds
- to the current region.
-
- In the current implementation, an exception can only be thrown from
- a function call (since the mechanism used to actually throw an
- exception involves calling __throw). If an exception region is
- created but no function calls occur within that region, the region
- can be safely optimized away (along with its exception handlers)
- since no exceptions can ever be caught in that region. This
- optimization is performed unless -fasynchronous-exceptions is
- given. If the user wishes to throw from a signal handler, or other
- asynchronous place, -fasynchronous-exceptions should be used when
- compiling for maximally correct code, at the cost of additional
- exception regions. Using -fasynchronous-exceptions only produces
- code that is reasonably safe in such situations, but a correct
- program cannot rely upon this working. It can be used in failsafe
- code, where trying to continue on, and proceeding with potentially
- incorrect results is better than halting the program.
-
-
- Walking the stack:
-
- The stack is walked by starting with a pointer to the current
- frame, and finding the pointer to the callers frame. The unwind info
- tells __throw how to find it.
-
- Unwinding the stack:
-
- When we use the term unwinding the stack, we mean undoing the
- effects of the function prologue in a controlled fashion so that we
- still have the flow of control. Otherwise, we could just return
- (jump to the normal end of function epilogue).
-
- This is done in __throw in libgcc2.c when we know that a handler exists
- in a frame higher up the call stack than its immediate caller.
-
- To unwind, we find the unwind data associated with the frame, if any.
- If we don't find any, we call the library routine __terminate. If we do
- find it, we use the information to copy the saved register values from
- that frame into the register save area in the frame for __throw, return
- into a stub which updates the stack pointer, and jump to the handler.
- The normal function epilogue for __throw handles restoring the saved
- values into registers.
-
- When unwinding, we use this method if we know it will
- work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
- an inline unwinder will have been emitted for any function that
- __unwind_function cannot unwind. The inline unwinder appears as a
- normal exception handler for the entire function, for any function
- that we know cannot be unwound by __unwind_function. We inform the
- compiler of whether a function can be unwound with
- __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
- when the unwinder isn't needed. __unwind_function is used as an
- action of last resort. If no other method can be used for
- unwinding, __unwind_function is used. If it cannot unwind, it
- should call __terminate.
-
- By default, if the target-specific backend doesn't supply a definition
- for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
- unwinders will be used instead. The main tradeoff here is in text space
- utilization. Obviously, if inline unwinders have to be generated
- repeatedly, this uses much more space than if a single routine is used.
-
- However, it is simply not possible on some platforms to write a
- generalized routine for doing stack unwinding without having some
- form of additional data associated with each function. The current
- implementation can encode this data in the form of additional
- machine instructions or as static data in tabular form. The later
- is called the unwind data.
-
- The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
- or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
- defined and has a non-zero value, a per-function unwinder is not emitted
- for the current function. If the static unwind data is supported, then
- a per-function unwinder is not emitted.
-
- On some platforms it is possible that neither __unwind_function
- nor inlined unwinders are available. For these platforms it is not
- possible to throw through a function call, and abort will be
- invoked instead of performing the throw.
-
- The reason the unwind data may be needed is that on some platforms
- the order and types of data stored on the stack can vary depending
- on the type of function, its arguments and returned values, and the
- compilation options used (optimization versus non-optimization,
- -fomit-frame-pointer, processor variations, etc).
-
- Unfortunately, this also means that throwing through functions that
- aren't compiled with exception handling support will still not be
- possible on some platforms. This problem is currently being
- investigated, but no solutions have been found that do not imply
- some unacceptable performance penalties.
-
- Future directions:
-
- Currently __throw makes no differentiation between cleanups and
- user-defined exception regions. While this makes the implementation
- simple, it also implies that it is impossible to determine if a
- user-defined exception handler exists for a given exception without
- completely unwinding the stack in the process. This is undesirable
- from the standpoint of debugging, as ideally it would be possible
- to trap unhandled exceptions in the debugger before the process of
- unwinding has even started.
-
- This problem can be solved by marking user-defined handlers in a
- special way (probably by adding additional bits to exception_table_list).
- A two-pass scheme could then be used by __throw to iterate
- through the table. The first pass would search for a relevant
- user-defined handler for the current context of the throw, and if
- one is found, the second pass would then invoke all needed cleanups
- before jumping to the user-defined handler.
-
- Many languages (including C++ and Ada) make execution of a
- user-defined handler conditional on the "type" of the exception
- thrown. (The type of the exception is actually the type of the data
- that is thrown with the exception.) It will thus be necessary for
- __throw to be able to determine if a given user-defined
- exception handler will actually be executed, given the type of
- exception.
-
- One scheme is to add additional information to exception_table_list
- as to the types of exceptions accepted by each handler. __throw
- can do the type comparisons and then determine if the handler is
- actually going to be executed.
-
- There is currently no significant level of debugging support
- available, other than to place a breakpoint on __throw. While
- this is sufficient in most cases, it would be helpful to be able to
- know where a given exception was going to be thrown to before it is
- actually thrown, and to be able to choose between stopping before
- every exception region (including cleanups), or just user-defined
- exception regions. This should be possible to do in the two-pass
- scheme by adding additional labels to __throw for appropriate
- breakpoints, and additional debugger commands could be added to
- query various state variables to determine what actions are to be
- performed next.
-
- Another major problem that is being worked on is the issue with stack
- unwinding on various platforms. Currently the only platforms that have
- support for the generation of a generic unwinder are the SPARC and MIPS.
- All other ports require per-function unwinders, which produce large
- amounts of code bloat.
-
- For setjmp/longjmp based exception handling, some of the details
- are as above, but there are some additional details. This section
- discusses the details.
-
- We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
- optimize EH regions yet. We don't have to worry about machine
- specific issues with unwinding the stack, as we rely upon longjmp
- for all the machine specific details. There is no variable context
- of a throw, just the one implied by the dynamic handler stack
- pointed to by the dynamic handler chain. There is no exception
- table, and no calls to __register_exceptions. __sjthrow is used
- instead of __throw, and it works by using the dynamic handler
- chain, and longjmp. -fasynchronous-exceptions has no effect, as
- the elimination of trivial exception regions is not yet performed.
-
- A frontend can set protect_cleanup_actions_with_terminate when all
- the cleanup actions should be protected with an EH region that
- calls terminate when an unhandled exception is throw. C++ does
- this, Ada does not. */
+ [ Add updated documentation on how to use this. ] */
#include "config.h"
-#include "defaults.h"
-#include "eh-common.h"
#include "system.h"
#include "rtl.h"
#include "tree.h"
#include "flags.h"
-#include "except.h"
#include "function.h"
-#include "insn-flags.h"
#include "expr.h"
-#include "insn-codes.h"
-#include "regs.h"
-#include "hard-reg-set.h"
+#include "libfuncs.h"
#include "insn-config.h"
-#include "recog.h"
+#include "except.h"
+#include "integrate.h"
+#include "hard-reg-set.h"
+#include "basic-block.h"
#include "output.h"
+#include "dwarf2asm.h"
+#include "dwarf2out.h"
+#include "dwarf2.h"
#include "toplev.h"
+#include "hashtab.h"
#include "intl.h"
-#include "obstack.h"
-
-/* One to use setjmp/longjmp method of generating code for exception
- handling. */
-
-int exceptions_via_longjmp = 2;
+#include "ggc.h"
+#include "tm_p.h"
+#include "target.h"
+
+/* Provide defaults for stuff that may not be defined when using
+ sjlj exceptions. */
+#ifndef EH_RETURN_STACKADJ_RTX
+#define EH_RETURN_STACKADJ_RTX 0
+#endif
+#ifndef EH_RETURN_HANDLER_RTX
+#define EH_RETURN_HANDLER_RTX 0
+#endif
+#ifndef EH_RETURN_DATA_REGNO
+#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
+#endif
-/* One to enable asynchronous exception support. */
-int asynchronous_exceptions = 0;
+/* Nonzero means enable synchronous exceptions for non-call instructions. */
+int flag_non_call_exceptions;
-/* One to protect cleanup actions with a handler that calls
- __terminate, zero otherwise. */
+/* Protect cleanup actions with must-not-throw regions, with a call
+ to the given failure handler. */
+tree (*lang_protect_cleanup_actions) PARAMS ((void));
-int protect_cleanup_actions_with_terminate;
+/* Return true if type A catches type B. */
+int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
-/* A list of labels used for exception handlers. Created by
- find_exception_handler_labels for the optimization passes. */
+/* Map a type to a runtime object to match type. */
+tree (*lang_eh_runtime_type) PARAMS ((tree));
+/* A list of labels used for exception handlers. */
rtx exception_handler_labels;
-/* The EH context. Nonzero if the function has already
- fetched a pointer to the EH context for exception handling. */
+static int call_site_base;
+static unsigned int sjlj_funcdef_number;
+static htab_t type_to_runtime_map;
+
+/* Describe the SjLj_Function_Context structure. */
+static tree sjlj_fc_type_node;
+static int sjlj_fc_call_site_ofs;
+static int sjlj_fc_data_ofs;
+static int sjlj_fc_personality_ofs;
+static int sjlj_fc_lsda_ofs;
+static int sjlj_fc_jbuf_ofs;
+
+/* Describes one exception region. */
+struct eh_region
+{
+ /* The immediately surrounding region. */
+ struct eh_region *outer;
-rtx current_function_ehc;
+ /* The list of immediately contained regions. */
+ struct eh_region *inner;
+ struct eh_region *next_peer;
-/* A stack used for keeping track of the currently active exception
- handling region. As each exception region is started, an entry
- describing the region is pushed onto this stack. The current
- region can be found by looking at the top of the stack, and as we
- exit regions, the corresponding entries are popped.
+ /* An identifier for this region. */
+ int region_number;
- Entries cannot overlap; they can be nested. So there is only one
- entry at most that corresponds to the current instruction, and that
- is the entry on the top of the stack. */
+ /* Each region does exactly one thing. */
+ enum eh_region_type
+ {
+ ERT_UNKNOWN = 0,
+ ERT_CLEANUP,
+ ERT_TRY,
+ ERT_CATCH,
+ ERT_ALLOWED_EXCEPTIONS,
+ ERT_MUST_NOT_THROW,
+ ERT_THROW,
+ ERT_FIXUP
+ } type;
+
+ /* Holds the action to perform based on the preceding type. */
+ union {
+ /* A list of catch blocks, a surrounding try block,
+ and the label for continuing after a catch. */
+ struct {
+ struct eh_region *catch;
+ struct eh_region *last_catch;
+ struct eh_region *prev_try;
+ rtx continue_label;
+ } try;
+
+ /* The list through the catch handlers, the list of type objects
+ matched, and the list of associated filters. */
+ struct {
+ struct eh_region *next_catch;
+ struct eh_region *prev_catch;
+ tree type_list;
+ tree filter_list;
+ } catch;
+
+ /* A tree_list of allowed types. */
+ struct {
+ tree type_list;
+ int filter;
+ } allowed;
+
+ /* The type given by a call to "throw foo();", or discovered
+ for a throw. */
+ struct {
+ tree type;
+ } throw;
+
+ /* Retain the cleanup expression even after expansion so that
+ we can match up fixup regions. */
+ struct {
+ tree exp;
+ } cleanup;
+
+ /* The real region (by expression and by pointer) that fixup code
+ should live in. */
+ struct {
+ tree cleanup_exp;
+ struct eh_region *real_region;
+ } fixup;
+ } u;
+
+ /* Entry point for this region's handler before landing pads are built. */
+ rtx label;
-static struct eh_stack ehstack;
+ /* Entry point for this region's handler from the runtime eh library. */
+ rtx landing_pad;
+ /* Entry point for this region's handler from an inner region. */
+ rtx post_landing_pad;
-/* This stack is used to represent what the current eh region is
- for the catch blocks beings processed */
+ /* The RESX insn for handing off control to the next outermost handler,
+ if appropriate. */
+ rtx resume;
+};
-static struct eh_stack catchstack;
+/* Used to save exception status for each function. */
+struct eh_status
+{
+ /* The tree of all regions for this function. */
+ struct eh_region *region_tree;
-/* A queue used for tracking which exception regions have closed but
- whose handlers have not yet been expanded. Regions are emitted in
- groups in an attempt to improve paging performance.
+ /* The same information as an indexable array. */
+ struct eh_region **region_array;
- As we exit a region, we enqueue a new entry. The entries are then
- dequeued during expand_leftover_cleanups and expand_start_all_catch,
+ /* The most recently open region. */
+ struct eh_region *cur_region;
- We should redo things so that we either take RTL for the handler,
- or we expand the handler expressed as a tree immediately at region
- end time. */
+ /* This is the region for which we are processing catch blocks. */
+ struct eh_region *try_region;
-static struct eh_queue ehqueue;
+ /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
+ node is itself a TREE_CHAINed list of handlers for regions that
+ are not yet closed. The TREE_VALUE of each entry contains the
+ handler for the corresponding entry on the ehstack. */
+ tree protect_list;
-/* Insns for all of the exception handlers for the current function.
- They are currently emitted by the frontend code. */
+ rtx filter;
+ rtx exc_ptr;
-rtx catch_clauses;
+ int built_landing_pads;
+ int last_region_number;
-/* A TREE_CHAINed list of handlers for regions that are not yet
- closed. The TREE_VALUE of each entry contains the handler for the
- corresponding entry on the ehstack. */
+ varray_type ttype_data;
+ varray_type ehspec_data;
+ varray_type action_record_data;
-static tree protect_list;
+ struct call_site_record
+ {
+ rtx landing_pad;
+ int action;
+ } *call_site_data;
+ int call_site_data_used;
+ int call_site_data_size;
+
+ rtx ehr_stackadj;
+ rtx ehr_handler;
+ rtx ehr_label;
+
+ rtx sjlj_fc;
+ rtx sjlj_exit_after;
+};
-/* Stacks to keep track of various labels. */
+
+static void mark_eh_region PARAMS ((struct eh_region *));
+
+static int t2r_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t t2r_hash PARAMS ((const PTR));
+static int t2r_mark_1 PARAMS ((PTR *, PTR));
+static void t2r_mark PARAMS ((PTR));
+static void add_type_for_runtime PARAMS ((tree));
+static tree lookup_type_for_runtime PARAMS ((tree));
+
+static struct eh_region *expand_eh_region_end PARAMS ((void));
+
+static rtx get_exception_filter PARAMS ((struct function *));
+
+static void collect_eh_region_array PARAMS ((void));
+static void resolve_fixup_regions PARAMS ((void));
+static void remove_fixup_regions PARAMS ((void));
+static void remove_unreachable_regions PARAMS ((rtx));
+static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
+
+static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
+ struct inline_remap *));
+static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
+ struct eh_region **));
+static int ttypes_filter_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t ttypes_filter_hash PARAMS ((const PTR));
+static int ehspec_filter_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t ehspec_filter_hash PARAMS ((const PTR));
+static int add_ttypes_entry PARAMS ((htab_t, tree));
+static int add_ehspec_entry PARAMS ((htab_t, htab_t,
+ tree));
+static void assign_filter_values PARAMS ((void));
+static void build_post_landing_pads PARAMS ((void));
+static void connect_post_landing_pads PARAMS ((void));
+static void dw2_build_landing_pads PARAMS ((void));
+
+struct sjlj_lp_info;
+static bool sjlj_find_directly_reachable_regions
+ PARAMS ((struct sjlj_lp_info *));
+static void sjlj_assign_call_site_values
+ PARAMS ((rtx, struct sjlj_lp_info *));
+static void sjlj_mark_call_sites
+ PARAMS ((struct sjlj_lp_info *));
+static void sjlj_emit_function_enter PARAMS ((rtx));
+static void sjlj_emit_function_exit PARAMS ((void));
+static void sjlj_emit_dispatch_table
+ PARAMS ((rtx, struct sjlj_lp_info *));
+static void sjlj_build_landing_pads PARAMS ((void));
+
+static void remove_exception_handler_label PARAMS ((rtx));
+static void remove_eh_handler PARAMS ((struct eh_region *));
+
+struct reachable_info;
+
+/* The return value of reachable_next_level. */
+enum reachable_code
+{
+ /* The given exception is not processed by the given region. */
+ RNL_NOT_CAUGHT,
+ /* The given exception may need processing by the given region. */
+ RNL_MAYBE_CAUGHT,
+ /* The given exception is completely processed by the given region. */
+ RNL_CAUGHT,
+ /* The given exception is completely processed by the runtime. */
+ RNL_BLOCKED
+};
-/* Keeps track of the label to resume to should one want to resume
- normal control flow out of a handler (instead of, say, returning to
- the caller of the current function or exiting the program). */
+static int check_handled PARAMS ((tree, tree));
+static void add_reachable_handler
+ PARAMS ((struct reachable_info *, struct eh_region *,
+ struct eh_region *));
+static enum reachable_code reachable_next_level
+ PARAMS ((struct eh_region *, tree, struct reachable_info *));
+
+static int action_record_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t action_record_hash PARAMS ((const PTR));
+static int add_action_record PARAMS ((htab_t, int, int));
+static int collect_one_action_chain PARAMS ((htab_t,
+ struct eh_region *));
+static int add_call_site PARAMS ((rtx, int));
+
+static void push_uleb128 PARAMS ((varray_type *,
+ unsigned int));
+static void push_sleb128 PARAMS ((varray_type *, int));
+#ifndef HAVE_AS_LEB128
+static int dw2_size_of_call_site_table PARAMS ((void));
+static int sjlj_size_of_call_site_table PARAMS ((void));
+#endif
+static void dw2_output_call_site_table PARAMS ((void));
+static void sjlj_output_call_site_table PARAMS ((void));
-struct label_node *caught_return_label_stack = NULL;
+
+/* Routine to see if exception handling is turned on.
+ DO_WARN is non-zero if we want to inform the user that exception
+ handling is turned off.
-/* Keeps track of the label used as the context of a throw to rethrow an
- exception to the outer exception region. */
+ This is used to ensure that -fexceptions has been specified if the
+ compiler tries to use any exception-specific functions. */
-struct label_node *outer_context_label_stack = NULL;
+int
+doing_eh (do_warn)
+ int do_warn;
+{
+ if (! flag_exceptions)
+ {
+ static int warned = 0;
+ if (! warned && do_warn)
+ {
+ error ("exception handling disabled, use -fexceptions to enable");
+ warned = 1;
+ }
+ return 0;
+ }
+ return 1;
+}
-/* A random data area for the front end's own use. */
+
+void
+init_eh ()
+{
+ ggc_add_rtx_root (&exception_handler_labels, 1);
-struct label_node *false_label_stack = NULL;
+ if (! flag_exceptions)
+ return;
-/* Pseudos used to hold exception return data in the interim between
- __builtin_eh_return and the end of the function. */
+ type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
+ ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
-static rtx eh_return_context;
-static rtx eh_return_stack_adjust;
-static rtx eh_return_handler;
+ /* Create the SjLj_Function_Context structure. This should match
+ the definition in unwind-sjlj.c. */
+ if (USING_SJLJ_EXCEPTIONS)
+ {
+ tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
-/* Used to mark the eh return stub for flow, so that the Right Thing
- happens with the values for the hardregs therin. */
+ sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
+ ggc_add_tree_root (&sjlj_fc_type_node, 1);
-rtx eh_return_stub_label;
+ f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
+ build_pointer_type (sjlj_fc_type_node));
+ DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
-/* This is used for targets which can call rethrow with an offset instead
- of an address. This is subtracted from the rethrow label we are
- interested in. */
+ f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
+ integer_type_node);
+ DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
-static rtx first_rethrow_symbol = NULL_RTX;
-static rtx final_rethrow = NULL_RTX;
-static rtx last_rethrow_symbol = NULL_RTX;
+ tmp = build_index_type (build_int_2 (4 - 1, 0));
+ tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
+ f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
+ DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
+ f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
+ ptr_type_node);
+ DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
-/* Prototypes for local functions. */
+ f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
+ ptr_type_node);
+ DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
-static void push_eh_entry PROTO((struct eh_stack *));
-static struct eh_entry * pop_eh_entry PROTO((struct eh_stack *));
-static void enqueue_eh_entry PROTO((struct eh_queue *, struct eh_entry *));
-static struct eh_entry * dequeue_eh_entry PROTO((struct eh_queue *));
-static rtx call_get_eh_context PROTO((void));
-static void start_dynamic_cleanup PROTO((tree, tree));
-static void start_dynamic_handler PROTO((void));
-static void expand_rethrow PROTO((rtx));
-static void output_exception_table_entry PROTO((FILE *, int));
-static int can_throw PROTO((rtx));
-static rtx scan_region PROTO((rtx, int, int *));
-static void eh_regs PROTO((rtx *, rtx *, rtx *, int));
-static void set_insn_eh_region PROTO((rtx *, int));
#ifdef DONT_USE_BUILTIN_SETJMP
-static void jumpif_rtx PROTO((rtx, rtx));
+#ifdef JMP_BUF_SIZE
+ tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
+#else
+ /* Should be large enough for most systems, if it is not,
+ JMP_BUF_SIZE should be defined with the proper value. It will
+ also tend to be larger than necessary for most systems, a more
+ optimal port will define JMP_BUF_SIZE. */
+ tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
#endif
-
-rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
-
-/* Various support routines to manipulate the various data structures
- used by the exception handling code. */
-
-extern struct obstack permanent_obstack;
-
-/* Generate a SYMBOL_REF for rethrow to use */
-static rtx
-create_rethrow_ref (region_num)
- int region_num;
-{
- rtx def;
- char *ptr;
- char buf[60];
-
- push_obstacks_nochange ();
- end_temporary_allocation ();
-
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", region_num);
- ptr = (char *) obstack_copy0 (&permanent_obstack, buf, strlen (buf));
- def = gen_rtx_SYMBOL_REF (Pmode, ptr);
- SYMBOL_REF_NEED_ADJUST (def) = 1;
-
- pop_obstacks ();
- return def;
+#else
+ /* This is 2 for builtin_setjmp, plus whatever the target requires
+ via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
+ tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
+ / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
+#endif
+ tmp = build_index_type (tmp);
+ tmp = build_array_type (ptr_type_node, tmp);
+ f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
+#ifdef DONT_USE_BUILTIN_SETJMP
+ /* We don't know what the alignment requirements of the
+ runtime's jmp_buf has. Overestimate. */
+ DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
+ DECL_USER_ALIGN (f_jbuf) = 1;
+#endif
+ DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
+
+ TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
+ TREE_CHAIN (f_prev) = f_cs;
+ TREE_CHAIN (f_cs) = f_data;
+ TREE_CHAIN (f_data) = f_per;
+ TREE_CHAIN (f_per) = f_lsda;
+ TREE_CHAIN (f_lsda) = f_jbuf;
+
+ layout_type (sjlj_fc_type_node);
+
+ /* Cache the interesting field offsets so that we have
+ easy access from rtl. */
+ sjlj_fc_call_site_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
+ sjlj_fc_data_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
+ sjlj_fc_personality_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
+ sjlj_fc_lsda_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
+ sjlj_fc_jbuf_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
+ }
}
-/* Push a label entry onto the given STACK. */
-
void
-push_label_entry (stack, rlabel, tlabel)
- struct label_node **stack;
- rtx rlabel;
- tree tlabel;
+init_eh_for_function ()
{
- struct label_node *newnode
- = (struct label_node *) xmalloc (sizeof (struct label_node));
-
- if (rlabel)
- newnode->u.rlabel = rlabel;
- else
- newnode->u.tlabel = tlabel;
- newnode->chain = *stack;
- *stack = newnode;
+ cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
}
-/* Pop a label entry from the given STACK. */
+/* Mark EH for GC. */
-rtx
-pop_label_entry (stack)
- struct label_node **stack;
+static void
+mark_eh_region (region)
+ struct eh_region *region;
{
- rtx label;
- struct label_node *tempnode;
-
- if (! *stack)
- return NULL_RTX;
+ if (! region)
+ return;
- tempnode = *stack;
- label = tempnode->u.rlabel;
- *stack = (*stack)->chain;
- free (tempnode);
+ switch (region->type)
+ {
+ case ERT_UNKNOWN:
+ /* This can happen if a nested function is inside the body of a region
+ and we do a GC as part of processing it. */
+ break;
+ case ERT_CLEANUP:
+ ggc_mark_tree (region->u.cleanup.exp);
+ break;
+ case ERT_TRY:
+ ggc_mark_rtx (region->u.try.continue_label);
+ break;
+ case ERT_CATCH:
+ ggc_mark_tree (region->u.catch.type_list);
+ ggc_mark_tree (region->u.catch.filter_list);
+ break;
+ case ERT_ALLOWED_EXCEPTIONS:
+ ggc_mark_tree (region->u.allowed.type_list);
+ break;
+ case ERT_MUST_NOT_THROW:
+ break;
+ case ERT_THROW:
+ ggc_mark_tree (region->u.throw.type);
+ break;
+ case ERT_FIXUP:
+ ggc_mark_tree (region->u.fixup.cleanup_exp);
+ break;
+ default:
+ abort ();
+ }
- return label;
+ ggc_mark_rtx (region->label);
+ ggc_mark_rtx (region->resume);
+ ggc_mark_rtx (region->landing_pad);
+ ggc_mark_rtx (region->post_landing_pad);
}
-/* Return the top element of the given STACK. */
-
-tree
-top_label_entry (stack)
- struct label_node **stack;
+void
+mark_eh_status (eh)
+ struct eh_status *eh;
{
- if (! *stack)
- return NULL_TREE;
+ int i;
- return (*stack)->u.tlabel;
-}
+ if (eh == 0)
+ return;
-/* get an exception label. These must be on the permanent obstack */
+ /* If we've called collect_eh_region_array, use it. Otherwise walk
+ the tree non-recursively. */
+ if (eh->region_array)
+ {
+ for (i = eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *r = eh->region_array[i];
+ if (r && r->region_number == i)
+ mark_eh_region (r);
+ }
+ }
+ else if (eh->region_tree)
+ {
+ struct eh_region *r = eh->region_tree;
+ while (1)
+ {
+ mark_eh_region (r);
+ if (r->inner)
+ r = r->inner;
+ else if (r->next_peer)
+ r = r->next_peer;
+ else
+ {
+ do {
+ r = r->outer;
+ if (r == NULL)
+ goto tree_done;
+ } while (r->next_peer == NULL);
+ r = r->next_peer;
+ }
+ }
+ tree_done:;
+ }
-rtx
-gen_exception_label ()
-{
- rtx lab;
- lab = gen_label_rtx ();
- return lab;
-}
+ ggc_mark_tree (eh->protect_list);
+ ggc_mark_rtx (eh->filter);
+ ggc_mark_rtx (eh->exc_ptr);
+ ggc_mark_tree_varray (eh->ttype_data);
-/* Push a new eh_node entry onto STACK. */
+ if (eh->call_site_data)
+ {
+ for (i = eh->call_site_data_used - 1; i >= 0; --i)
+ ggc_mark_rtx (eh->call_site_data[i].landing_pad);
+ }
-static void
-push_eh_entry (stack)
- struct eh_stack *stack;
-{
- struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
- struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
-
- rtx rlab = gen_exception_label ();
- entry->finalization = NULL_TREE;
- entry->label_used = 0;
- entry->exception_handler_label = rlab;
- entry->false_label = NULL_RTX;
- if (! flag_new_exceptions)
- entry->outer_context = gen_label_rtx ();
- else
- entry->outer_context = create_rethrow_ref (CODE_LABEL_NUMBER (rlab));
- entry->rethrow_label = entry->outer_context;
+ ggc_mark_rtx (eh->ehr_stackadj);
+ ggc_mark_rtx (eh->ehr_handler);
+ ggc_mark_rtx (eh->ehr_label);
- node->entry = entry;
- node->chain = stack->top;
- stack->top = node;
+ ggc_mark_rtx (eh->sjlj_fc);
+ ggc_mark_rtx (eh->sjlj_exit_after);
}
-/* push an existing entry onto a stack. */
-static void
-push_entry (stack, entry)
- struct eh_stack *stack;
- struct eh_entry *entry;
+void
+free_eh_status (f)
+ struct function *f;
{
- struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
- node->entry = entry;
- node->chain = stack->top;
- stack->top = node;
-}
+ struct eh_status *eh = f->eh;
-/* Pop an entry from the given STACK. */
+ if (eh->region_array)
+ {
+ int i;
+ for (i = eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *r = eh->region_array[i];
+ /* Mind we don't free a region struct more than once. */
+ if (r && r->region_number == i)
+ free (r);
+ }
+ free (eh->region_array);
+ }
+ else if (eh->region_tree)
+ {
+ struct eh_region *next, *r = eh->region_tree;
+ while (1)
+ {
+ if (r->inner)
+ r = r->inner;
+ else if (r->next_peer)
+ {
+ next = r->next_peer;
+ free (r);
+ r = next;
+ }
+ else
+ {
+ do {
+ next = r->outer;
+ free (r);
+ r = next;
+ if (r == NULL)
+ goto tree_done;
+ } while (r->next_peer == NULL);
+ next = r->next_peer;
+ free (r);
+ r = next;
+ }
+ }
+ tree_done:;
+ }
-static struct eh_entry *
-pop_eh_entry (stack)
- struct eh_stack *stack;
-{
- struct eh_node *tempnode;
- struct eh_entry *tempentry;
-
- tempnode = stack->top;
- tempentry = tempnode->entry;
- stack->top = stack->top->chain;
- free (tempnode);
+ VARRAY_FREE (eh->ttype_data);
+ VARRAY_FREE (eh->ehspec_data);
+ VARRAY_FREE (eh->action_record_data);
+ if (eh->call_site_data)
+ free (eh->call_site_data);
- return tempentry;
+ free (eh);
+ f->eh = NULL;
+ exception_handler_labels = NULL;
}
-/* Enqueue an ENTRY onto the given QUEUE. */
+
+/* Start an exception handling region. All instructions emitted
+ after this point are considered to be part of the region until
+ expand_eh_region_end is invoked. */
-static void
-enqueue_eh_entry (queue, entry)
- struct eh_queue *queue;
- struct eh_entry *entry;
+void
+expand_eh_region_start ()
{
- struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
+ struct eh_region *new_region;
+ struct eh_region *cur_region;
+ rtx note;
- node->entry = entry;
- node->chain = NULL;
+ if (! doing_eh (0))
+ return;
- if (queue->head == NULL)
+ /* Insert a new blank region as a leaf in the tree. */
+ new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
+ cur_region = cfun->eh->cur_region;
+ new_region->outer = cur_region;
+ if (cur_region)
{
- queue->head = node;
+ new_region->next_peer = cur_region->inner;
+ cur_region->inner = new_region;
}
else
{
- queue->tail->chain = node;
+ new_region->next_peer = cfun->eh->region_tree;
+ cfun->eh->region_tree = new_region;
}
- queue->tail = node;
+ cfun->eh->cur_region = new_region;
+
+ /* Create a note marking the start of this region. */
+ new_region->region_number = ++cfun->eh->last_region_number;
+ note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
+ NOTE_EH_HANDLER (note) = new_region->region_number;
}
-/* Dequeue an entry from the given QUEUE. */
+/* Common code to end a region. Returns the region just ended. */
-static struct eh_entry *
-dequeue_eh_entry (queue)
- struct eh_queue *queue;
+static struct eh_region *
+expand_eh_region_end ()
{
- struct eh_node *tempnode;
- struct eh_entry *tempentry;
-
- if (queue->head == NULL)
- return NULL;
+ struct eh_region *cur_region = cfun->eh->cur_region;
+ rtx note;
- tempnode = queue->head;
- queue->head = queue->head->chain;
+ /* Create a note marking the end of this region. */
+ note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
+ NOTE_EH_HANDLER (note) = cur_region->region_number;
- tempentry = tempnode->entry;
- free (tempnode);
+ /* Pop. */
+ cfun->eh->cur_region = cur_region->outer;
- return tempentry;
+ return cur_region;
}
-static void
-receive_exception_label (handler_label)
- rtx handler_label;
+/* End an exception handling region for a cleanup. HANDLER is an
+ expression to expand for the cleanup. */
+
+void
+expand_eh_region_end_cleanup (handler)
+ tree handler;
{
- rtx around_label = NULL_RTX;
+ struct eh_region *region;
+ tree protect_cleanup_actions;
+ rtx around_label;
+ rtx data_save[2];
- if (! flag_new_exceptions || exceptions_via_longjmp)
- {
- around_label = gen_label_rtx ();
- emit_jump (around_label);
- emit_barrier ();
- }
+ if (! doing_eh (0))
+ return;
- emit_label (handler_label);
-
- if (! exceptions_via_longjmp)
- {
-#ifdef HAVE_exception_receiver
- if (HAVE_exception_receiver)
- emit_insn (gen_exception_receiver ());
- else
-#endif
-#ifdef HAVE_nonlocal_goto_receiver
- if (HAVE_nonlocal_goto_receiver)
- emit_insn (gen_nonlocal_goto_receiver ());
- else
-#endif
- { /* Nothing */ }
- }
- else
- {
-#ifndef DONT_USE_BUILTIN_SETJMP
- expand_builtin_setjmp_receiver (handler_label);
-#endif
- }
+ region = expand_eh_region_end ();
+ region->type = ERT_CLEANUP;
+ region->label = gen_label_rtx ();
+ region->u.cleanup.exp = handler;
- if (around_label)
- emit_label (around_label);
-}
+ around_label = gen_label_rtx ();
+ emit_jump (around_label);
-struct func_eh_entry
-{
- int range_number; /* EH region number from EH NOTE insn's */
- rtx rethrow_label; /* Label for rethrow */
- struct handler_info *handlers;
-};
+ emit_label (region->label);
+ /* Give the language a chance to specify an action to be taken if an
+ exception is thrown that would propagate out of the HANDLER. */
+ protect_cleanup_actions
+ = (lang_protect_cleanup_actions
+ ? (*lang_protect_cleanup_actions) ()
+ : NULL_TREE);
-/* table of function eh regions */
-static struct func_eh_entry *function_eh_regions = NULL;
-static int num_func_eh_entries = 0;
-static int current_func_eh_entry = 0;
+ if (protect_cleanup_actions)
+ expand_eh_region_start ();
-#define SIZE_FUNC_EH(X) (sizeof (struct func_eh_entry) * X)
+ /* In case this cleanup involves an inline destructor with a try block in
+ it, we need to save the EH return data registers around it. */
+ data_save[0] = gen_reg_rtx (Pmode);
+ emit_move_insn (data_save[0], get_exception_pointer (cfun));
+ data_save[1] = gen_reg_rtx (word_mode);
+ emit_move_insn (data_save[1], get_exception_filter (cfun));
-/* Add a new eh_entry for this function, and base it off of the information
- in the EH_ENTRY parameter. A NULL parameter is invalid.
- OUTER_CONTEXT is a label which is used for rethrowing. The number
- returned is an number which uniquely identifies this exception range. */
+ expand_expr (handler, const0_rtx, VOIDmode, 0);
-static int
-new_eh_region_entry (note_eh_region, rethrow)
- int note_eh_region;
- rtx rethrow;
-{
- if (current_func_eh_entry == num_func_eh_entries)
- {
- if (num_func_eh_entries == 0)
- {
- function_eh_regions =
- (struct func_eh_entry *) malloc (SIZE_FUNC_EH (50));
- num_func_eh_entries = 50;
- }
- else
- {
- num_func_eh_entries = num_func_eh_entries * 3 / 2;
- function_eh_regions = (struct func_eh_entry *)
- realloc (function_eh_regions, SIZE_FUNC_EH (num_func_eh_entries));
- }
- }
- function_eh_regions[current_func_eh_entry].range_number = note_eh_region;
- if (rethrow == NULL_RTX)
- function_eh_regions[current_func_eh_entry].rethrow_label =
- create_rethrow_ref (note_eh_region);
- else
- function_eh_regions[current_func_eh_entry].rethrow_label = rethrow;
- function_eh_regions[current_func_eh_entry].handlers = NULL;
+ emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
+ emit_move_insn (cfun->eh->filter, data_save[1]);
- return current_func_eh_entry++;
-}
+ if (protect_cleanup_actions)
+ expand_eh_region_end_must_not_throw (protect_cleanup_actions);
-/* Add new handler information to an exception range. The first parameter
- specifies the range number (returned from new_eh_entry()). The second
- parameter specifies the handler. By default the handler is inserted at
- the end of the list. A handler list may contain only ONE NULL_TREE
- typeinfo entry. Regardless where it is positioned, a NULL_TREE entry
- is always output as the LAST handler in the exception table for a region. */
+ /* We need any stack adjustment complete before the around_label. */
+ do_pending_stack_adjust ();
-void
-add_new_handler (region, newhandler)
- int region;
- struct handler_info *newhandler;
-{
- struct handler_info *last;
+ /* We delay the generation of the _Unwind_Resume until we generate
+ landing pads. We emit a marker here so as to get good control
+ flow data in the meantime. */
+ region->resume
+ = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
+ emit_barrier ();
- newhandler->next = NULL;
- last = function_eh_regions[region].handlers;
- if (last == NULL)
- function_eh_regions[region].handlers = newhandler;
- else
- {
- for ( ; ; last = last->next)
- {
- if (last->type_info == CATCH_ALL_TYPE)
- pedwarn ("additional handler after ...");
- if (last->next == NULL)
- break;
- }
- last->next = newhandler;
- }
+ emit_label (around_label);
}
-/* Remove a handler label. The handler label is being deleted, so all
- regions which reference this handler should have it removed from their
- list of possible handlers. Any region which has the final handler
- removed can be deleted. */
+/* End an exception handling region for a try block, and prepares
+ for subsequent calls to expand_start_catch. */
-void remove_handler (removing_label)
- rtx removing_label;
+void
+expand_start_all_catch ()
{
- struct handler_info *handler, *last;
- int x;
- for (x = 0 ; x < current_func_eh_entry; ++x)
- {
- last = NULL;
- handler = function_eh_regions[x].handlers;
- for ( ; handler; last = handler, handler = handler->next)
- if (handler->handler_label == removing_label)
- {
- if (last)
- {
- last->next = handler->next;
- handler = last;
- }
- else
- function_eh_regions[x].handlers = handler->next;
- }
- }
+ struct eh_region *region;
+
+ if (! doing_eh (1))
+ return;
+
+ region = expand_eh_region_end ();
+ region->type = ERT_TRY;
+ region->u.try.prev_try = cfun->eh->try_region;
+ region->u.try.continue_label = gen_label_rtx ();
+
+ cfun->eh->try_region = region;
+
+ emit_jump (region->u.try.continue_label);
}
-/* This function will return a malloc'd pointer to an array of
- void pointer representing the runtime match values that
- currently exist in all regions. */
+/* Begin a catch clause. TYPE is the type caught, a list of such types, or
+ null if this is a catch-all clause. Providing a type list enables to
+ associate the catch region with potentially several exception types, which
+ is useful e.g. for Ada. */
-int
-find_all_handler_type_matches (array)
- void ***array;
+void
+expand_start_catch (type_or_list)
+ tree type_or_list;
{
- struct handler_info *handler, *last;
- int x,y;
- void *val;
- void **ptr;
- int max_ptr;
- int n_ptr = 0;
+ struct eh_region *t, *c, *l;
+ tree type_list;
- *array = NULL;
+ if (! doing_eh (0))
+ return;
- if (!doing_eh (0) || ! flag_new_exceptions)
- return 0;
+ type_list = type_or_list;
- max_ptr = 100;
- ptr = (void **)malloc (max_ptr * sizeof (void *));
+ if (type_or_list)
+ {
+ /* Ensure to always end up with a type list to normalize further
+ processing, then register each type against the runtime types
+ map. */
+ tree type_node;
- if (ptr == NULL)
- return 0;
+ if (TREE_CODE (type_or_list) != TREE_LIST)
+ type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
- for (x = 0 ; x < current_func_eh_entry; x++)
- {
- last = NULL;
- handler = function_eh_regions[x].handlers;
- for ( ; handler; last = handler, handler = handler->next)
- {
- val = handler->type_info;
- if (val != NULL && val != CATCH_ALL_TYPE)
- {
- /* See if this match value has already been found. */
- for (y = 0; y < n_ptr; y++)
- if (ptr[y] == val)
- break;
-
- /* If we break early, we already found this value. */
- if (y < n_ptr)
- continue;
-
- /* Do we need to allocate more space? */
- if (n_ptr >= max_ptr)
- {
- max_ptr += max_ptr / 2;
- ptr = (void **)realloc (ptr, max_ptr * sizeof (void *));
- if (ptr == NULL)
- return 0;
- }
- ptr[n_ptr] = val;
- n_ptr++;
- }
- }
+ type_node = type_list;
+ for (; type_node; type_node = TREE_CHAIN (type_node))
+ add_type_for_runtime (TREE_VALUE (type_node));
}
- *array = ptr;
- return n_ptr;
-}
-/* Create a new handler structure initialized with the handler label and
- typeinfo fields passed in. */
+ expand_eh_region_start ();
-struct handler_info *
-get_new_handler (handler, typeinfo)
- rtx handler;
- void *typeinfo;
-{
- struct handler_info* ptr;
- ptr = (struct handler_info *) malloc (sizeof (struct handler_info));
- ptr->handler_label = handler;
- ptr->handler_number = CODE_LABEL_NUMBER (handler);
- ptr->type_info = typeinfo;
- ptr->next = NULL;
+ t = cfun->eh->try_region;
+ c = cfun->eh->cur_region;
+ c->type = ERT_CATCH;
+ c->u.catch.type_list = type_list;
+ c->label = gen_label_rtx ();
- return ptr;
+ l = t->u.try.last_catch;
+ c->u.catch.prev_catch = l;
+ if (l)
+ l->u.catch.next_catch = c;
+ else
+ t->u.try.catch = c;
+ t->u.try.last_catch = c;
+
+ emit_label (c->label);
}
+/* End a catch clause. Control will resume after the try/catch block. */
+void
+expand_end_catch ()
+{
+ struct eh_region *try_region, *catch_region;
-/* Find the index in function_eh_regions associated with a NOTE region. If
- the region cannot be found, a -1 is returned. This should never happen! */
+ if (! doing_eh (0))
+ return;
-int
-find_func_region (insn_region)
- int insn_region;
-{
- int x;
- for (x = 0; x < current_func_eh_entry; x++)
- if (function_eh_regions[x].range_number == insn_region)
- return x;
+ catch_region = expand_eh_region_end ();
+ try_region = cfun->eh->try_region;
- return -1;
+ emit_jump (try_region->u.try.continue_label);
}
-/* Get a pointer to the first handler in an exception region's list. */
+/* End a sequence of catch handlers for a try block. */
-struct handler_info *
-get_first_handler (region)
- int region;
+void
+expand_end_all_catch ()
{
- return function_eh_regions[find_func_region (region)].handlers;
+ struct eh_region *try_region;
+
+ if (! doing_eh (0))
+ return;
+
+ try_region = cfun->eh->try_region;
+ cfun->eh->try_region = try_region->u.try.prev_try;
+
+ emit_label (try_region->u.try.continue_label);
}
-/* Clean out the function_eh_region table and free all memory */
+/* End an exception region for an exception type filter. ALLOWED is a
+ TREE_LIST of types to be matched by the runtime. FAILURE is an
+ expression to invoke if a mismatch occurs.
-static void
-clear_function_eh_region ()
+ ??? We could use these semantics for calls to rethrow, too; if we can
+ see the surrounding catch clause, we know that the exception we're
+ rethrowing satisfies the "filter" of the catch type. */
+
+void
+expand_eh_region_end_allowed (allowed, failure)
+ tree allowed, failure;
{
- int x;
- struct handler_info *ptr, *next;
- for (x = 0; x < current_func_eh_entry; x++)
- for (ptr = function_eh_regions[x].handlers; ptr != NULL; ptr = next)
- {
- next = ptr->next;
- free (ptr);
- }
- free (function_eh_regions);
- num_func_eh_entries = 0;
- current_func_eh_entry = 0;
+ struct eh_region *region;
+ rtx around_label;
+
+ if (! doing_eh (0))
+ return;
+
+ region = expand_eh_region_end ();
+ region->type = ERT_ALLOWED_EXCEPTIONS;
+ region->u.allowed.type_list = allowed;
+ region->label = gen_label_rtx ();
+
+ for (; allowed ; allowed = TREE_CHAIN (allowed))
+ add_type_for_runtime (TREE_VALUE (allowed));
+
+ /* We must emit the call to FAILURE here, so that if this function
+ throws a different exception, that it will be processed by the
+ correct region. */
+
+ around_label = gen_label_rtx ();
+ emit_jump (around_label);
+
+ emit_label (region->label);
+ expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ /* We must adjust the stack before we reach the AROUND_LABEL because
+ the call to FAILURE does not occur on all paths to the
+ AROUND_LABEL. */
+ do_pending_stack_adjust ();
+
+ emit_label (around_label);
}
-/* Make a duplicate of an exception region by copying all the handlers
- for an exception region. Return the new handler index. The final
- parameter is a routine which maps old labels to new ones. */
+/* End an exception region for a must-not-throw filter. FAILURE is an
+ expression invoke if an uncaught exception propagates this far.
+
+ This is conceptually identical to expand_eh_region_end_allowed with
+ an empty allowed list (if you passed "std::terminate" instead of
+ "__cxa_call_unexpected"), but they are represented differently in
+ the C++ LSDA. */
-int
-duplicate_eh_handlers (old_note_eh_region, new_note_eh_region, map)
- int old_note_eh_region, new_note_eh_region;
- rtx (*map) PARAMS ((rtx));
+void
+expand_eh_region_end_must_not_throw (failure)
+ tree failure;
{
- struct handler_info *ptr, *new_ptr;
- int new_region, region;
+ struct eh_region *region;
+ rtx around_label;
- region = find_func_region (old_note_eh_region);
- if (region == -1)
- fatal ("Cannot duplicate non-existant exception region.");
+ if (! doing_eh (0))
+ return;
- /* duplicate_eh_handlers may have been called during a symbol remap. */
- new_region = find_func_region (new_note_eh_region);
- if (new_region != -1)
- return (new_region);
+ region = expand_eh_region_end ();
+ region->type = ERT_MUST_NOT_THROW;
+ region->label = gen_label_rtx ();
- new_region = new_eh_region_entry (new_note_eh_region, NULL_RTX);
+ /* We must emit the call to FAILURE here, so that if this function
+ throws a different exception, that it will be processed by the
+ correct region. */
- ptr = function_eh_regions[region].handlers;
+ around_label = gen_label_rtx ();
+ emit_jump (around_label);
- for ( ; ptr; ptr = ptr->next)
- {
- new_ptr = get_new_handler (map (ptr->handler_label), ptr->type_info);
- add_new_handler (new_region, new_ptr);
- }
+ emit_label (region->label);
+ expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
- return new_region;
+ emit_label (around_label);
}
+/* End an exception region for a throw. No handling goes on here,
+ but it's the easiest way for the front-end to indicate what type
+ is being thrown. */
-/* Given a rethrow symbol, find the EH region number this is for. */
-int
-eh_region_from_symbol (sym)
- rtx sym;
+void
+expand_eh_region_end_throw (type)
+ tree type;
{
- int x;
- if (sym == last_rethrow_symbol)
- return 1;
- for (x = 0; x < current_func_eh_entry; x++)
- if (function_eh_regions[x].rethrow_label == sym)
- return function_eh_regions[x].range_number;
- return -1;
+ struct eh_region *region;
+
+ if (! doing_eh (0))
+ return;
+
+ region = expand_eh_region_end ();
+ region->type = ERT_THROW;
+ region->u.throw.type = type;
}
+/* End a fixup region. Within this region the cleanups for the immediately
+ enclosing region are _not_ run. This is used for goto cleanup to avoid
+ destroying an object twice.
+
+ This would be an extraordinarily simple prospect, were it not for the
+ fact that we don't actually know what the immediately enclosing region
+ is. This surprising fact is because expand_cleanups is currently
+ generating a sequence that it will insert somewhere else. We collect
+ the proper notion of "enclosing" in convert_from_eh_region_ranges. */
-/* When inlining/unrolling, we have to map the symbols passed to
- __rethrow as well. This performs the remap. If a symbol isn't foiund,
- the original one is returned. This is not an efficient routine,
- so don't call it on everything!! */
-rtx
-rethrow_symbol_map (sym, map)
- rtx sym;
- rtx (*map) PARAMS ((rtx));
+void
+expand_eh_region_end_fixup (handler)
+ tree handler;
{
- int x, y;
- for (x = 0; x < current_func_eh_entry; x++)
- if (function_eh_regions[x].rethrow_label == sym)
- {
- /* We've found the original region, now lets determine which region
- this now maps to. */
- rtx l1 = function_eh_regions[x].handlers->handler_label;
- rtx l2 = map (l1);
- y = CODE_LABEL_NUMBER (l2); /* This is the new region number */
- x = find_func_region (y); /* Get the new permanent region */
- if (x == -1) /* Hmm, Doesn't exist yet */
- {
- x = duplicate_eh_handlers (CODE_LABEL_NUMBER (l1), y, map);
- /* Since we're mapping it, it must be used. */
- SYMBOL_REF_USED (function_eh_regions[x].rethrow_label) = 1;
- }
- return function_eh_regions[x].rethrow_label;
- }
- return sym;
+ struct eh_region *fixup;
+
+ if (! doing_eh (0))
+ return;
+
+ fixup = expand_eh_region_end ();
+ fixup->type = ERT_FIXUP;
+ fixup->u.fixup.cleanup_exp = handler;
}
-int
-rethrow_used (region)
- int region;
+/* Return an rtl expression for a pointer to the exception object
+ within a handler. */
+
+rtx
+get_exception_pointer (fun)
+ struct function *fun;
{
- if (flag_new_exceptions)
+ rtx exc_ptr = fun->eh->exc_ptr;
+ if (fun == cfun && ! exc_ptr)
{
- rtx lab = function_eh_regions[find_func_region (region)].rethrow_label;
- return (SYMBOL_REF_USED (lab));
+ exc_ptr = gen_reg_rtx (Pmode);
+ fun->eh->exc_ptr = exc_ptr;
}
- return 0;
+ return exc_ptr;
}
-
-/* Routine to see if exception handling is turned on.
- DO_WARN is non-zero if we want to inform the user that exception
- handling is turned off.
-
- This is used to ensure that -fexceptions has been specified if the
- compiler tries to use any exception-specific functions. */
+/* Return an rtl expression for the exception dispatch filter
+ within a handler. */
-int
-doing_eh (do_warn)
- int do_warn;
+static rtx
+get_exception_filter (fun)
+ struct function *fun;
{
- if (! flag_exceptions)
+ rtx filter = fun->eh->filter;
+ if (fun == cfun && ! filter)
{
- static int warned = 0;
- if (! warned && do_warn)
- {
- error ("exception handling disabled, use -fexceptions to enable");
- warned = 1;
- }
- return 0;
+ filter = gen_reg_rtx (word_mode);
+ fun->eh->filter = filter;
}
- return 1;
+ return filter;
}
+
+/* Begin a region that will contain entries created with
+ add_partial_entry. */
-/* Given a return address in ADDR, determine the address we should use
- to find the corresponding EH region. */
-
-rtx
-eh_outer_context (addr)
- rtx addr;
+void
+begin_protect_partials ()
{
- /* First mask out any unwanted bits. */
-#ifdef MASK_RETURN_ADDR
- expand_and (addr, MASK_RETURN_ADDR, addr);
-#endif
-
- /* Then adjust to find the real return address. */
-#if defined (RETURN_ADDR_OFFSET)
- addr = plus_constant (addr, RETURN_ADDR_OFFSET);
-#endif
-
- return addr;
+ /* Push room for a new list. */
+ cfun->eh->protect_list
+ = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
}
/* Start a new exception region for a region of code that has a
cleanup action and push the HANDLER for the region onto
protect_list. All of the regions created with add_partial_entry
- will be ended when end_protect_partials is invoked. */
+ will be ended when end_protect_partials is invoked.
+
+ ??? The only difference between this purpose and that of
+ expand_decl_cleanup is that in this case, we only want the cleanup to
+ run if an exception is thrown. This should also be handled using
+ binding levels. */
void
add_partial_entry (handler)
@@ -1140,1531 +1037,2003 @@ add_partial_entry (handler)
{
expand_eh_region_start ();
- /* Make sure the entry is on the correct obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* Because this is a cleanup action, we may have to protect the handler
- with __terminate. */
- handler = protect_with_terminate (handler);
-
- protect_list = tree_cons (NULL_TREE, handler, protect_list);
- pop_obstacks ();
+ /* Add this entry to the front of the list. */
+ TREE_VALUE (cfun->eh->protect_list)
+ = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
}
-/* Emit code to get EH context to current function. */
+/* End all the pending exception regions on protect_list. */
-static rtx
-call_get_eh_context ()
+void
+end_protect_partials ()
{
- static tree fn;
- tree expr;
-
- if (fn == NULL_TREE)
- {
- tree fntype;
- fn = get_identifier ("__get_eh_context");
- push_obstacks_nochange ();
- end_temporary_allocation ();
- fntype = build_pointer_type (build_pointer_type
- (build_pointer_type (void_type_node)));
- fntype = build_function_type (fntype, NULL_TREE);
- fn = build_decl (FUNCTION_DECL, fn, fntype);
- DECL_EXTERNAL (fn) = 1;
- TREE_PUBLIC (fn) = 1;
- DECL_ARTIFICIAL (fn) = 1;
- TREE_READONLY (fn) = 1;
- make_decl_rtl (fn, NULL_PTR, 1);
- assemble_external (fn);
- pop_obstacks ();
- }
+ tree t;
- expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- expr, NULL_TREE, NULL_TREE);
- TREE_SIDE_EFFECTS (expr) = 1;
+ /* Pop the topmost entry. */
+ t = TREE_VALUE (cfun->eh->protect_list);
+ cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
- return copy_to_reg (expand_expr (expr, NULL_RTX, VOIDmode, 0));
+ /* End all the exception regions. */
+ for (; t; t = TREE_CHAIN (t))
+ expand_eh_region_end_cleanup (TREE_VALUE (t));
}
-/* Get a reference to the EH context.
- We will only generate a register for the current function EH context here,
- and emit a USE insn to mark that this is a EH context register.
+
+/* This section is for the exception handling specific optimization pass. */
- Later, emit_eh_context will emit needed call to __get_eh_context
- in libgcc2, and copy the value to the register we have generated. */
+/* Random access the exception region tree. It's just as simple to
+ collect the regions this way as in expand_eh_region_start, but
+ without having to realloc memory. */
-rtx
-get_eh_context ()
+static void
+collect_eh_region_array ()
{
- if (current_function_ehc == 0)
- {
- rtx insn;
+ struct eh_region **array, *i;
+
+ i = cfun->eh->region_tree;
+ if (! i)
+ return;
- current_function_ehc = gen_reg_rtx (Pmode);
-
- insn = gen_rtx_USE (GET_MODE (current_function_ehc),
- current_function_ehc);
- insn = emit_insn_before (insn, get_first_nonparm_insn ());
+ array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
+ cfun->eh->region_array = array;
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_EH_CONTEXT, current_function_ehc,
- REG_NOTES (insn));
+ while (1)
+ {
+ array[i->region_number] = i;
+
+ /* If there are sub-regions, process them. */
+ if (i->inner)
+ i = i->inner;
+ /* If there are peers, process them. */
+ else if (i->next_peer)
+ i = i->next_peer;
+ /* Otherwise, step back up the tree to the next peer. */
+ else
+ {
+ do {
+ i = i->outer;
+ if (i == NULL)
+ return;
+ } while (i->next_peer == NULL);
+ i = i->next_peer;
+ }
}
- return current_function_ehc;
}
-
-/* Get a reference to the dynamic handler chain. It points to the
- pointer to the next element in the dynamic handler chain. It ends
- when there are no more elements in the dynamic handler chain, when
- the value is &top_elt from libgcc2.c. Immediately after the
- pointer, is an area suitable for setjmp/longjmp when
- DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
- __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
- isn't defined. */
-rtx
-get_dynamic_handler_chain ()
+static void
+resolve_fixup_regions ()
{
- rtx ehc, dhc, result;
+ int i, j, n = cfun->eh->last_region_number;
- ehc = get_eh_context ();
+ for (i = 1; i <= n; ++i)
+ {
+ struct eh_region *fixup = cfun->eh->region_array[i];
+ struct eh_region *cleanup = 0;
- /* This is the offset of dynamic_handler_chain in the eh_context struct
- declared in eh-common.h. If its location is change, change this offset */
- dhc = plus_constant (ehc, POINTER_SIZE / BITS_PER_UNIT);
+ if (! fixup || fixup->type != ERT_FIXUP)
+ continue;
- result = copy_to_reg (dhc);
+ for (j = 1; j <= n; ++j)
+ {
+ cleanup = cfun->eh->region_array[j];
+ if (cleanup->type == ERT_CLEANUP
+ && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
+ break;
+ }
+ if (j > n)
+ abort ();
- /* We don't want a copy of the dcc, but rather, the single dcc. */
- return gen_rtx_MEM (Pmode, result);
+ fixup->u.fixup.real_region = cleanup->outer;
+ }
}
-/* Get a reference to the dynamic cleanup chain. It points to the
- pointer to the next element in the dynamic cleanup chain.
- Immediately after the pointer, are two Pmode variables, one for a
- pointer to a function that performs the cleanup action, and the
- second, the argument to pass to that function. */
+/* Now that we've discovered what region actually encloses a fixup,
+ we can shuffle pointers and remove them from the tree. */
-rtx
-get_dynamic_cleanup_chain ()
+static void
+remove_fixup_regions ()
{
- rtx dhc, dcc, result;
-
- dhc = get_dynamic_handler_chain ();
- dcc = plus_constant (dhc, POINTER_SIZE / BITS_PER_UNIT);
-
- result = copy_to_reg (dcc);
+ int i;
+ rtx insn, note;
+ struct eh_region *fixup;
+
+ /* Walk the insn chain and adjust the REG_EH_REGION numbers
+ for instructions referencing fixup regions. This is only
+ strictly necessary for fixup regions with no parent, but
+ doesn't hurt to do it for all regions. */
+ for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
+ if (INSN_P (insn)
+ && (note = find_reg_note (insn, REG_EH_REGION, NULL))
+ && INTVAL (XEXP (note, 0)) > 0
+ && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
+ && fixup->type == ERT_FIXUP)
+ {
+ if (fixup->u.fixup.real_region)
+ XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
+ else
+ remove_note (insn, note);
+ }
- /* We don't want a copy of the dcc, but rather, the single dcc. */
- return gen_rtx_MEM (Pmode, result);
-}
+ /* Remove the fixup regions from the tree. */
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ fixup = cfun->eh->region_array[i];
+ if (! fixup)
+ continue;
-#ifdef DONT_USE_BUILTIN_SETJMP
-/* Generate code to evaluate X and jump to LABEL if the value is nonzero.
- LABEL is an rtx of code CODE_LABEL, in this function. */
+ /* Allow GC to maybe free some memory. */
+ if (fixup->type == ERT_CLEANUP)
+ fixup->u.cleanup.exp = NULL_TREE;
-static void
-jumpif_rtx (x, label)
- rtx x;
- rtx label;
-{
- jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
-}
-#endif
+ if (fixup->type != ERT_FIXUP)
+ continue;
-/* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
- We just need to create an element for the cleanup list, and push it
- into the chain.
+ if (fixup->inner)
+ {
+ struct eh_region *parent, *p, **pp;
- A dynamic cleanup is a cleanup action implied by the presence of an
- element on the EH runtime dynamic cleanup stack that is to be
- performed when an exception is thrown. The cleanup action is
- performed by __sjthrow when an exception is thrown. Only certain
- actions can be optimized into dynamic cleanup actions. For the
- restrictions on what actions can be performed using this routine,
- see expand_eh_region_start_tree. */
+ parent = fixup->u.fixup.real_region;
-static void
-start_dynamic_cleanup (func, arg)
- tree func;
- tree arg;
-{
- rtx dcc;
- rtx new_func, new_arg;
- rtx x, buf;
- int size;
+ /* Fix up the children's parent pointers; find the end of
+ the list. */
+ for (p = fixup->inner; ; p = p->next_peer)
+ {
+ p->outer = parent;
+ if (! p->next_peer)
+ break;
+ }
- /* We allocate enough room for a pointer to the function, and
- one argument. */
- size = 2;
+ /* In the tree of cleanups, only outer-inner ordering matters.
+ So link the children back in anywhere at the correct level. */
+ if (parent)
+ pp = &parent->inner;
+ else
+ pp = &cfun->eh->region_tree;
+ p->next_peer = *pp;
+ *pp = fixup->inner;
+ fixup->inner = NULL;
+ }
- /* XXX, FIXME: The stack space allocated this way is too long lived,
- but there is no allocation routine that allocates at the level of
- the last binding contour. */
- buf = assign_stack_local (BLKmode,
- GET_MODE_SIZE (Pmode)*(size+1),
- 0);
+ remove_eh_handler (fixup);
+ }
+}
- buf = change_address (buf, Pmode, NULL_RTX);
+/* Remove all regions whose labels are not reachable from insns. */
- /* Store dcc into the first word of the newly allocated buffer. */
+static void
+remove_unreachable_regions (insns)
+ rtx insns;
+{
+ int i, *uid_region_num;
+ bool *reachable;
+ struct eh_region *r;
+ rtx insn;
- dcc = get_dynamic_cleanup_chain ();
- emit_move_insn (buf, dcc);
+ uid_region_num = xcalloc (get_max_uid (), sizeof(int));
+ reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
- /* Store func and arg into the cleanup list element. */
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ r = cfun->eh->region_array[i];
+ if (!r || r->region_number != i)
+ continue;
- new_func = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
- GET_MODE_SIZE (Pmode)));
- new_arg = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
- GET_MODE_SIZE (Pmode)*2));
- x = expand_expr (func, new_func, Pmode, 0);
- if (x != new_func)
- emit_move_insn (new_func, x);
+ if (r->resume)
+ {
+ if (uid_region_num[INSN_UID (r->resume)])
+ abort ();
+ uid_region_num[INSN_UID (r->resume)] = i;
+ }
+ if (r->label)
+ {
+ if (uid_region_num[INSN_UID (r->label)])
+ abort ();
+ uid_region_num[INSN_UID (r->label)] = i;
+ }
+ if (r->type == ERT_TRY && r->u.try.continue_label)
+ {
+ if (uid_region_num[INSN_UID (r->u.try.continue_label)])
+ abort ();
+ uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
+ }
+ }
- x = expand_expr (arg, new_arg, Pmode, 0);
- if (x != new_arg)
- emit_move_insn (new_arg, x);
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ reachable[uid_region_num[INSN_UID (insn)]] = true;
- /* Update the cleanup chain. */
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ r = cfun->eh->region_array[i];
+ if (r && r->region_number == i && !reachable[i])
+ {
+ /* Don't remove ERT_THROW regions if their outer region
+ is reachable. */
+ if (r->type == ERT_THROW
+ && r->outer
+ && reachable[r->outer->region_number])
+ continue;
+
+ remove_eh_handler (r);
+ }
+ }
- emit_move_insn (dcc, XEXP (buf, 0));
+ free (reachable);
+ free (uid_region_num);
}
-/* Emit RTL to start a dynamic handler on the EH runtime dynamic
- handler stack. This should only be used by expand_eh_region_start
- or expand_eh_region_start_tree. */
+/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
+ can_throw instruction in the region. */
static void
-start_dynamic_handler ()
+convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
+ rtx *pinsns;
+ int *orig_sp;
+ int cur;
{
- rtx dhc, dcc;
- rtx arg, buf;
- int size;
+ int *sp = orig_sp;
+ rtx insn, next;
-#ifndef DONT_USE_BUILTIN_SETJMP
- /* The number of Pmode words for the setjmp buffer, when using the
- builtin setjmp/longjmp, see expand_builtin, case
- BUILT_IN_LONGJMP. */
- size = 5;
-#else
-#ifdef JMP_BUF_SIZE
- size = JMP_BUF_SIZE;
-#else
- /* Should be large enough for most systems, if it is not,
- JMP_BUF_SIZE should be defined with the proper value. It will
- also tend to be larger than necessary for most systems, a more
- optimal port will define JMP_BUF_SIZE. */
- size = FIRST_PSEUDO_REGISTER+2;
-#endif
-#endif
- /* XXX, FIXME: The stack space allocated this way is too long lived,
- but there is no allocation routine that allocates at the level of
- the last binding contour. */
- arg = assign_stack_local (BLKmode,
- GET_MODE_SIZE (Pmode)*(size+1),
- 0);
+ for (insn = *pinsns; insn ; insn = next)
+ {
+ next = NEXT_INSN (insn);
+ if (GET_CODE (insn) == NOTE)
+ {
+ int kind = NOTE_LINE_NUMBER (insn);
+ if (kind == NOTE_INSN_EH_REGION_BEG
+ || kind == NOTE_INSN_EH_REGION_END)
+ {
+ if (kind == NOTE_INSN_EH_REGION_BEG)
+ {
+ struct eh_region *r;
- arg = change_address (arg, Pmode, NULL_RTX);
+ *sp++ = cur;
+ cur = NOTE_EH_HANDLER (insn);
- /* Store dhc into the first word of the newly allocated buffer. */
+ r = cfun->eh->region_array[cur];
+ if (r->type == ERT_FIXUP)
+ {
+ r = r->u.fixup.real_region;
+ cur = r ? r->region_number : 0;
+ }
+ else if (r->type == ERT_CATCH)
+ {
+ r = r->outer;
+ cur = r ? r->region_number : 0;
+ }
+ }
+ else
+ cur = *--sp;
+
+ /* Removing the first insn of a CALL_PLACEHOLDER sequence
+ requires extra care to adjust sequence start. */
+ if (insn == *pinsns)
+ *pinsns = next;
+ remove_insn (insn);
+ continue;
+ }
+ }
+ else if (INSN_P (insn))
+ {
+ if (cur > 0
+ && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
+ /* Calls can always potentially throw exceptions, unless
+ they have a REG_EH_REGION note with a value of 0 or less.
+ Which should be the only possible kind so far. */
+ && (GET_CODE (insn) == CALL_INSN
+ /* If we wanted exceptions for non-call insns, then
+ any may_trap_p instruction could throw. */
+ || (flag_non_call_exceptions
+ && GET_CODE (PATTERN (insn)) != CLOBBER
+ && GET_CODE (PATTERN (insn)) != USE
+ && may_trap_p (PATTERN (insn)))))
+ {
+ REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
+ REG_NOTES (insn));
+ }
- dhc = get_dynamic_handler_chain ();
- dcc = gen_rtx_MEM (Pmode, plus_constant (XEXP (arg, 0),
- GET_MODE_SIZE (Pmode)));
- emit_move_insn (arg, dhc);
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
+ sp, cur);
+ convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
+ sp, cur);
+ convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
+ sp, cur);
+ }
+ }
+ }
- /* Zero out the start of the cleanup chain. */
- emit_move_insn (dcc, const0_rtx);
+ if (sp != orig_sp)
+ abort ();
+}
- /* The jmpbuf starts two words into the area allocated. */
- buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
+void
+convert_from_eh_region_ranges ()
+{
+ int *stack;
+ rtx insns;
-#ifdef DONT_USE_BUILTIN_SETJMP
- {
- rtx x;
- x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_CONST,
- TYPE_MODE (integer_type_node), 1,
- buf, Pmode);
- /* If we come back here for a catch, transfer control to the handler. */
- jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
- }
-#else
- expand_builtin_setjmp_setup (buf,
- ehstack.top->entry->exception_handler_label);
-#endif
+ collect_eh_region_array ();
+ resolve_fixup_regions ();
- /* We are committed to this, so update the handler chain. */
+ stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
+ insns = get_insns ();
+ convert_from_eh_region_ranges_1 (&insns, stack, 0);
+ free (stack);
- emit_move_insn (dhc, force_operand (XEXP (arg, 0), NULL_RTX));
+ remove_fixup_regions ();
+ remove_unreachable_regions (insns);
}
-/* Start an exception handling region for the given cleanup action.
- All instructions emitted after this point are considered to be part
- of the region until expand_eh_region_end is invoked. CLEANUP is
- the cleanup action to perform. The return value is true if the
- exception region was optimized away. If that case,
- expand_eh_region_end does not need to be called for this cleanup,
- nor should it be.
+void
+find_exception_handler_labels ()
+{
+ rtx list = NULL_RTX;
+ int i;
+
+ free_EXPR_LIST_list (&exception_handler_labels);
- This routine notices one particular common case in C++ code
- generation, and optimizes it so as to not need the exception
- region. It works by creating a dynamic cleanup action, instead of
- a using an exception region. */
+ if (cfun->eh->region_tree == NULL)
+ return;
-int
-expand_eh_region_start_tree (decl, cleanup)
- tree decl;
- tree cleanup;
-{
- /* This is the old code. */
- if (! doing_eh (0))
- return 0;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+ rtx lab;
- /* The optimization only applies to actions protected with
- terminate, and only applies if we are using the setjmp/longjmp
- codegen method. */
- if (exceptions_via_longjmp
- && protect_cleanup_actions_with_terminate)
- {
- tree func, arg;
- tree args;
-
- /* Ignore any UNSAVE_EXPR. */
- if (TREE_CODE (cleanup) == UNSAVE_EXPR)
- cleanup = TREE_OPERAND (cleanup, 0);
-
- /* Further, it only applies if the action is a call, if there
- are 2 arguments, and if the second argument is 2. */
-
- if (TREE_CODE (cleanup) == CALL_EXPR
- && (args = TREE_OPERAND (cleanup, 1))
- && (func = TREE_OPERAND (cleanup, 0))
- && (arg = TREE_VALUE (args))
- && (args = TREE_CHAIN (args))
-
- /* is the second argument 2? */
- && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
- && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
- && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
-
- /* Make sure there are no other arguments. */
- && TREE_CHAIN (args) == NULL_TREE)
- {
- /* Arrange for returns and gotos to pop the entry we make on the
- dynamic cleanup stack. */
- expand_dcc_cleanup (decl);
- start_dynamic_cleanup (func, arg);
- return 1;
- }
+ if (! region || region->region_number != i)
+ continue;
+ if (cfun->eh->built_landing_pads)
+ lab = region->landing_pad;
+ else
+ lab = region->label;
+
+ if (lab)
+ list = alloc_EXPR_LIST (0, lab, list);
}
- expand_eh_region_start_for_decl (decl);
- ehstack.top->entry->finalization = cleanup;
+ /* For sjlj exceptions, need the return label to remain live until
+ after landing pad generation. */
+ if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
+ list = alloc_EXPR_LIST (0, return_label, list);
- return 0;
+ exception_handler_labels = list;
}
-/* Just like expand_eh_region_start, except if a cleanup action is
- entered on the cleanup chain, the TREE_PURPOSE of the element put
- on the chain is DECL. DECL should be the associated VAR_DECL, if
- any, otherwise it should be NULL_TREE. */
-
-void
-expand_eh_region_start_for_decl (decl)
- tree decl;
+
+static struct eh_region *
+duplicate_eh_region_1 (o, map)
+ struct eh_region *o;
+ struct inline_remap *map;
{
- rtx note;
+ struct eh_region *n
+ = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
- /* This is the old code. */
- if (! doing_eh (0))
- return;
+ n->region_number = o->region_number + cfun->eh->last_region_number;
+ n->type = o->type;
- if (exceptions_via_longjmp)
+ switch (n->type)
{
- /* We need a new block to record the start and end of the
- dynamic handler chain. We could always do this, but we
- really want to permit jumping into such a block, and we want
- to avoid any errors or performance impact in the SJ EH code
- for now. */
- expand_start_bindings (0);
+ case ERT_CLEANUP:
+ case ERT_MUST_NOT_THROW:
+ break;
- /* But we don't need or want a new temporary level. */
- pop_temp_slots ();
+ case ERT_TRY:
+ if (o->u.try.continue_label)
+ n->u.try.continue_label
+ = get_label_from_map (map,
+ CODE_LABEL_NUMBER (o->u.try.continue_label));
+ break;
+
+ case ERT_CATCH:
+ n->u.catch.type_list = o->u.catch.type_list;
+ break;
+
+ case ERT_ALLOWED_EXCEPTIONS:
+ n->u.allowed.type_list = o->u.allowed.type_list;
+ break;
- /* Mark this block as created by expand_eh_region_start. This
- is so that we can pop the block with expand_end_bindings
- automatically. */
- mark_block_as_eh_region ();
+ case ERT_THROW:
+ n->u.throw.type = o->u.throw.type;
- /* Arrange for returns and gotos to pop the entry we make on the
- dynamic handler stack. */
- expand_dhc_cleanup (decl);
+ default:
+ abort ();
}
- push_eh_entry (&ehstack);
- note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
- NOTE_BLOCK_NUMBER (note)
- = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
- if (exceptions_via_longjmp)
- start_dynamic_handler ();
-}
+ if (o->label)
+ n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
+ if (o->resume)
+ {
+ n->resume = map->insn_map[INSN_UID (o->resume)];
+ if (n->resume == NULL)
+ abort ();
+ }
-/* Start an exception handling region. All instructions emitted after
- this point are considered to be part of the region until
- expand_eh_region_end is invoked. */
+ return n;
+}
-void
-expand_eh_region_start ()
+static void
+duplicate_eh_region_2 (o, n_array)
+ struct eh_region *o;
+ struct eh_region **n_array;
{
- expand_eh_region_start_for_decl (NULL_TREE);
-}
+ struct eh_region *n = n_array[o->region_number];
-/* End an exception handling region. The information about the region
- is found on the top of ehstack.
+ switch (n->type)
+ {
+ case ERT_TRY:
+ n->u.try.catch = n_array[o->u.try.catch->region_number];
+ n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
+ break;
- HANDLER is either the cleanup for the exception region, or if we're
- marking the end of a try block, HANDLER is integer_zero_node.
+ case ERT_CATCH:
+ if (o->u.catch.next_catch)
+ n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
+ if (o->u.catch.prev_catch)
+ n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
+ break;
- HANDLER will be transformed to rtl when expand_leftover_cleanups
- is invoked. */
+ default:
+ break;
+ }
-void
-expand_eh_region_end (handler)
- tree handler;
+ if (o->outer)
+ n->outer = n_array[o->outer->region_number];
+ if (o->inner)
+ n->inner = n_array[o->inner->region_number];
+ if (o->next_peer)
+ n->next_peer = n_array[o->next_peer->region_number];
+}
+
+int
+duplicate_eh_regions (ifun, map)
+ struct function *ifun;
+ struct inline_remap *map;
{
- struct eh_entry *entry;
- rtx note;
- int ret, r;
+ int ifun_last_region_number = ifun->eh->last_region_number;
+ struct eh_region **n_array, *root, *cur;
+ int i;
- if (! doing_eh (0))
- return;
+ if (ifun_last_region_number == 0)
+ return 0;
- entry = pop_eh_entry (&ehstack);
+ n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
- note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
- ret = NOTE_BLOCK_NUMBER (note)
- = CODE_LABEL_NUMBER (entry->exception_handler_label);
- if (exceptions_via_longjmp == 0 && ! flag_new_exceptions
- /* We share outer_context between regions; only emit it once. */
- && INSN_UID (entry->outer_context) == 0)
+ for (i = 1; i <= ifun_last_region_number; ++i)
{
- rtx label;
-
- label = gen_label_rtx ();
- emit_jump (label);
+ cur = ifun->eh->region_array[i];
+ if (!cur || cur->region_number != i)
+ continue;
+ n_array[i] = duplicate_eh_region_1 (cur, map);
+ }
+ for (i = 1; i <= ifun_last_region_number; ++i)
+ {
+ cur = ifun->eh->region_array[i];
+ if (!cur || cur->region_number != i)
+ continue;
+ duplicate_eh_region_2 (cur, n_array);
+ }
- /* Emit a label marking the end of this exception region that
- is used for rethrowing into the outer context. */
- emit_label (entry->outer_context);
- expand_internal_throw ();
+ root = n_array[ifun->eh->region_tree->region_number];
+ cur = cfun->eh->cur_region;
+ if (cur)
+ {
+ struct eh_region *p = cur->inner;
+ if (p)
+ {
+ while (p->next_peer)
+ p = p->next_peer;
+ p->next_peer = root;
+ }
+ else
+ cur->inner = root;
- emit_label (label);
+ for (i = 1; i <= ifun_last_region_number; ++i)
+ if (n_array[i] && n_array[i]->outer == NULL)
+ n_array[i]->outer = cur;
+ }
+ else
+ {
+ struct eh_region *p = cfun->eh->region_tree;
+ if (p)
+ {
+ while (p->next_peer)
+ p = p->next_peer;
+ p->next_peer = root;
+ }
+ else
+ cfun->eh->region_tree = root;
}
- entry->finalization = handler;
+ free (n_array);
- /* create region entry in final exception table */
- r = new_eh_region_entry (NOTE_BLOCK_NUMBER (note), entry->rethrow_label);
+ i = cfun->eh->last_region_number;
+ cfun->eh->last_region_number = i + ifun_last_region_number;
+ return i;
+}
- enqueue_eh_entry (&ehqueue, entry);
+
+static int
+t2r_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
+{
+ tree entry = (tree) pentry;
+ tree data = (tree) pdata;
- /* If we have already started ending the bindings, don't recurse.
- This only happens when exceptions_via_longjmp is true. */
- if (is_eh_region ())
- {
- /* Because we don't need or want a new temporary level and
- because we didn't create one in expand_eh_region_start,
- create a fake one now to avoid removing one in
- expand_end_bindings. */
- push_temp_slots ();
+ return TREE_PURPOSE (entry) == data;
+}
- mark_block_as_not_eh_region ();
+static hashval_t
+t2r_hash (pentry)
+ const PTR pentry;
+{
+ tree entry = (tree) pentry;
+ return TYPE_HASH (TREE_PURPOSE (entry));
+}
- /* Maybe do this to prevent jumping in and so on... */
- expand_end_bindings (NULL_TREE, 0, 0);
- }
+static int
+t2r_mark_1 (slot, data)
+ PTR *slot;
+ PTR data ATTRIBUTE_UNUSED;
+{
+ tree contents = (tree) *slot;
+ ggc_mark_tree (contents);
+ return 1;
}
-/* End the EH region for a goto fixup. We only need them in the region-based
- EH scheme. */
+static void
+t2r_mark (addr)
+ PTR addr;
+{
+ htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
+}
-void
-expand_fixup_region_start ()
+static void
+add_type_for_runtime (type)
+ tree type;
{
- if (! doing_eh (0) || exceptions_via_longjmp)
- return;
+ tree *slot;
- expand_eh_region_start ();
+ slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
+ TYPE_HASH (type), INSERT);
+ if (*slot == NULL)
+ {
+ tree runtime = (*lang_eh_runtime_type) (type);
+ *slot = tree_cons (type, runtime, NULL_TREE);
+ }
}
-/* End the EH region for a goto fixup. CLEANUP is the cleanup we just
- expanded; to avoid running it twice if it throws, we look through the
- ehqueue for a matching region and rethrow from its outer_context. */
-
-void
-expand_fixup_region_end (cleanup)
- tree cleanup;
+static tree
+lookup_type_for_runtime (type)
+ tree type;
{
- struct eh_node *node;
- int dont_issue;
+ tree *slot;
- if (! doing_eh (0) || exceptions_via_longjmp)
- return;
+ slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
+ TYPE_HASH (type), NO_INSERT);
- for (node = ehstack.top; node && node->entry->finalization != cleanup; )
- node = node->chain;
- if (node == 0)
- for (node = ehqueue.head; node && node->entry->finalization != cleanup; )
- node = node->chain;
- if (node == 0)
- abort ();
+ /* We should have always inserted the data earlier. */
+ return TREE_VALUE (*slot);
+}
- /* If the outer context label has not been issued yet, we don't want
- to issue it as a part of this region, unless this is the
- correct region for the outer context. If we did, then the label for
- the outer context will be WITHIN the begin/end labels,
- and we could get an infinte loop when it tried to rethrow, or just
- generally incorrect execution following a throw. */
+
+/* Represent an entry in @TTypes for either catch actions
+ or exception filter actions. */
+struct ttypes_filter
+{
+ tree t;
+ int filter;
+};
- dont_issue = ((INSN_UID (node->entry->outer_context) == 0)
- && (ehstack.top->entry != node->entry));
+/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
+ (a tree) for a @TTypes type node we are thinking about adding. */
- ehstack.top->entry->outer_context = node->entry->outer_context;
+static int
+ttypes_filter_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
+{
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ tree data = (tree) pdata;
- /* Since we are rethrowing to the OUTER region, we know we don't need
- a jump around sequence for this region, so we'll pretend the outer
- context label has been issued by setting INSN_UID to 1, then clearing
- it again afterwards. */
+ return entry->t == data;
+}
+
+static hashval_t
+ttypes_filter_hash (pentry)
+ const PTR pentry;
+{
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ return TYPE_HASH (entry->t);
+}
- if (dont_issue)
- INSN_UID (node->entry->outer_context) = 1;
+/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
+ exception specification list we are thinking about adding. */
+/* ??? Currently we use the type lists in the order given. Someone
+ should put these in some canonical order. */
- /* Just rethrow. size_zero_node is just a NOP. */
- expand_eh_region_end (size_zero_node);
+static int
+ehspec_filter_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
+{
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
- if (dont_issue)
- INSN_UID (node->entry->outer_context) = 0;
+ return type_list_equal (entry->t, data->t);
}
-/* If we are using the setjmp/longjmp EH codegen method, we emit a
- call to __sjthrow.
+/* Hash function for exception specification lists. */
- Otherwise, we emit a call to __throw and note that we threw
- something, so we know we need to generate the necessary code for
- __throw.
+static hashval_t
+ehspec_filter_hash (pentry)
+ const PTR pentry;
+{
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ hashval_t h = 0;
+ tree list;
- Before invoking throw, the __eh_pc variable must have been set up
- to contain the PC being thrown from. This address is used by
- __throw to determine which exception region (if any) is
- responsible for handling the exception. */
+ for (list = entry->t; list ; list = TREE_CHAIN (list))
+ h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
+ return h;
+}
-void
-emit_throw ()
+/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
+ up the search. Return the filter value to be used. */
+
+static int
+add_ttypes_entry (ttypes_hash, type)
+ htab_t ttypes_hash;
+ tree type;
{
- if (exceptions_via_longjmp)
- {
- emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
- }
- else
+ struct ttypes_filter **slot, *n;
+
+ slot = (struct ttypes_filter **)
+ htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
+
+ if ((n = *slot) == NULL)
{
-#ifdef JUMP_TO_THROW
- emit_indirect_jump (throw_libfunc);
-#else
- emit_library_call (throw_libfunc, 0, VOIDmode, 0);
-#endif
+ /* Filter value is a 1 based table index. */
+
+ n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n->t = type;
+ n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
+ *slot = n;
+
+ VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
}
- emit_barrier ();
+
+ return n->filter;
}
-/* Throw the current exception. If appropriate, this is done by jumping
- to the next handler. */
+/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
+ to speed up the search. Return the filter value to be used. */
-void
-expand_internal_throw ()
+static int
+add_ehspec_entry (ehspec_hash, ttypes_hash, list)
+ htab_t ehspec_hash;
+ htab_t ttypes_hash;
+ tree list;
{
- emit_throw ();
+ struct ttypes_filter **slot, *n;
+ struct ttypes_filter dummy;
+
+ dummy.t = list;
+ slot = (struct ttypes_filter **)
+ htab_find_slot (ehspec_hash, &dummy, INSERT);
+
+ if ((n = *slot) == NULL)
+ {
+ /* Filter value is a -1 based byte index into a uleb128 buffer. */
+
+ n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n->t = list;
+ n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
+ *slot = n;
+
+ /* Look up each type in the list and encode its filter
+ value as a uleb128. Terminate the list with 0. */
+ for (; list ; list = TREE_CHAIN (list))
+ push_uleb128 (&cfun->eh->ehspec_data,
+ add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
+ VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
+ }
+
+ return n->filter;
}
-/* Called from expand_exception_blocks and expand_end_catch_block to
- emit any pending handlers/cleanups queued from expand_eh_region_end. */
+/* Generate the action filter values to be used for CATCH and
+ ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
+ we use lots of landing pads, and so every type or list can share
+ the same filter value, which saves table space. */
-void
-expand_leftover_cleanups ()
+static void
+assign_filter_values ()
{
- struct eh_entry *entry;
+ int i;
+ htab_t ttypes, ehspec;
+
+ VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
+ VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
- while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
+ ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
+ ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
+
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
- rtx prev;
+ struct eh_region *r = cfun->eh->region_array[i];
- /* A leftover try block. Shouldn't be one here. */
- if (entry->finalization == integer_zero_node)
- abort ();
+ /* Mind we don't process a region more than once. */
+ if (!r || r->region_number != i)
+ continue;
- /* Output the label for the start of the exception handler. */
+ switch (r->type)
+ {
+ case ERT_CATCH:
+ /* Whatever type_list is (NULL or true list), we build a list
+ of filters for the region. */
+ r->u.catch.filter_list = NULL_TREE;
- receive_exception_label (entry->exception_handler_label);
+ if (r->u.catch.type_list != NULL)
+ {
+ /* Get a filter value for each of the types caught and store
+ them in the region's dedicated list. */
+ tree tp_node = r->u.catch.type_list;
- /* register a handler for this cleanup region */
- add_new_handler (
- find_func_region (CODE_LABEL_NUMBER (entry->exception_handler_label)),
- get_new_handler (entry->exception_handler_label, NULL));
+ for (;tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
+ tree flt_node = build_int_2 (flt, 0);
- /* And now generate the insns for the handler. */
- expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
+ r->u.catch.filter_list
+ = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
+ }
+ }
+ else
+ {
+ /* Get a filter value for the NULL list also since it will need
+ an action record anyway. */
+ int flt = add_ttypes_entry (ttypes, NULL);
+ tree flt_node = build_int_2 (flt, 0);
- prev = get_last_insn ();
- if (prev == NULL || GET_CODE (prev) != BARRIER)
- /* Emit code to throw to the outer context if we fall off
- the end of the handler. */
- expand_rethrow (entry->outer_context);
+ r->u.catch.filter_list
+ = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
+ }
- do_pending_stack_adjust ();
- free (entry);
+ break;
+
+ case ERT_ALLOWED_EXCEPTIONS:
+ r->u.allowed.filter
+ = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
+ break;
+
+ default:
+ break;
+ }
}
+
+ htab_delete (ttypes);
+ htab_delete (ehspec);
}
-/* Called at the start of a block of try statements. */
-void
-expand_start_try_stmts ()
+static void
+build_post_landing_pads ()
{
- if (! doing_eh (1))
- return;
+ int i;
- expand_eh_region_start ();
-}
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+ rtx seq;
-/* Called to begin a catch clause. The parameter is the object which
- will be passed to the runtime type check routine. */
-void
-start_catch_handler (rtime)
- tree rtime;
-{
- rtx handler_label;
- int insn_region_num;
- int eh_region_entry;
+ /* Mind we don't process a region more than once. */
+ if (!region || region->region_number != i)
+ continue;
- if (! doing_eh (1))
- return;
+ switch (region->type)
+ {
+ case ERT_TRY:
+ /* ??? Collect the set of all non-overlapping catch handlers
+ all the way up the chain until blocked by a cleanup. */
+ /* ??? Outer try regions can share landing pads with inner
+ try regions if the types are completely non-overlapping,
+ and there are no intervening cleanups. */
- handler_label = catchstack.top->entry->exception_handler_label;
- insn_region_num = CODE_LABEL_NUMBER (handler_label);
- eh_region_entry = find_func_region (insn_region_num);
+ region->post_landing_pad = gen_label_rtx ();
- /* If we've already issued this label, pick a new one */
- if (catchstack.top->entry->label_used)
- handler_label = gen_exception_label ();
- else
- catchstack.top->entry->label_used = 1;
+ start_sequence ();
- receive_exception_label (handler_label);
+ emit_label (region->post_landing_pad);
- add_new_handler (eh_region_entry, get_new_handler (handler_label, rtime));
+ /* ??? It is mighty inconvenient to call back into the
+ switch statement generation code in expand_end_case.
+ Rapid prototyping sez a sequence of ifs. */
+ {
+ struct eh_region *c;
+ for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
+ {
+ /* ??? _Unwind_ForcedUnwind wants no match here. */
+ if (c->u.catch.type_list == NULL)
+ emit_jump (c->label);
+ else
+ {
+ /* Need for one cmp/jump per type caught. Each type
+ list entry has a matching entry in the filter list
+ (see assign_filter_values). */
+ tree tp_node = c->u.catch.type_list;
+ tree flt_node = c->u.catch.filter_list;
+
+ for (; tp_node; )
+ {
+ emit_cmp_and_jump_insns
+ (cfun->eh->filter,
+ GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
+ EQ, NULL_RTX, word_mode, 0, c->label);
+
+ tp_node = TREE_CHAIN (tp_node);
+ flt_node = TREE_CHAIN (flt_node);
+ }
+ }
+ }
+ }
- if (flag_new_exceptions && ! exceptions_via_longjmp)
- return;
+ /* We delay the generation of the _Unwind_Resume until we generate
+ landing pads. We emit a marker here so as to get good control
+ flow data in the meantime. */
+ region->resume
+ = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
+ emit_barrier ();
- /* Under the old mechanism, as well as setjmp/longjmp, we need to
- issue code to compare 'rtime' to the value in eh_info, via the
- matching function in eh_info. If its is false, we branch around
- the handler we are about to issue. */
+ seq = get_insns ();
+ end_sequence ();
- if (rtime != NULL_TREE && rtime != CATCH_ALL_TYPE)
- {
- rtx call_rtx, rtime_address;
+ emit_insns_before (seq, region->u.try.catch->label);
+ break;
- if (catchstack.top->entry->false_label != NULL_RTX)
- fatal ("Compiler Bug: Never issued previous false_label");
- catchstack.top->entry->false_label = gen_exception_label ();
+ case ERT_ALLOWED_EXCEPTIONS:
+ region->post_landing_pad = gen_label_rtx ();
- rtime_address = expand_expr (rtime, NULL_RTX, Pmode, EXPAND_INITIALIZER);
-#ifdef POINTERS_EXTEND_UNSIGNED
- rtime_address = convert_memory_address (Pmode, rtime_address);
-#endif
- rtime_address = force_reg (Pmode, rtime_address);
+ start_sequence ();
+
+ emit_label (region->post_landing_pad);
+
+ emit_cmp_and_jump_insns (cfun->eh->filter,
+ GEN_INT (region->u.allowed.filter),
+ EQ, NULL_RTX, word_mode, 0, region->label);
+
+ /* We delay the generation of the _Unwind_Resume until we generate
+ landing pads. We emit a marker here so as to get good control
+ flow data in the meantime. */
+ region->resume
+ = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
+ emit_barrier ();
+
+ seq = get_insns ();
+ end_sequence ();
+
+ emit_insns_before (seq, region->label);
+ break;
+
+ case ERT_CLEANUP:
+ case ERT_MUST_NOT_THROW:
+ region->post_landing_pad = region->label;
+ break;
- /* Now issue the call, and branch around handler if needed */
- call_rtx = emit_library_call_value (eh_rtime_match_libfunc, NULL_RTX,
- 0, SImode, 1, rtime_address, Pmode);
+ case ERT_CATCH:
+ case ERT_THROW:
+ /* Nothing to do. */
+ break;
- /* Did the function return true? */
- emit_cmp_and_jump_insns (call_rtx, const0_rtx, EQ, NULL_RTX,
- GET_MODE (call_rtx), 0, 0,
- catchstack.top->entry->false_label);
+ default:
+ abort ();
+ }
}
}
-/* Called to end a catch clause. If we aren't using the new exception
- model tabel mechanism, we need to issue the branch-around label
- for the end of the catch block. */
+/* Replace RESX patterns with jumps to the next handler if any, or calls to
+ _Unwind_Resume otherwise. */
-void
-end_catch_handler ()
+static void
+connect_post_landing_pads ()
{
- if (! doing_eh (1))
- return;
+ int i;
- if (flag_new_exceptions && ! exceptions_via_longjmp)
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
- emit_barrier ();
- return;
- }
-
- /* A NULL label implies the catch clause was a catch all or cleanup */
- if (catchstack.top->entry->false_label == NULL_RTX)
- return;
+ struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *outer;
+ rtx seq;
+
+ /* Mind we don't process a region more than once. */
+ if (!region || region->region_number != i)
+ continue;
+
+ /* If there is no RESX, or it has been deleted by flow, there's
+ nothing to fix up. */
+ if (! region->resume || INSN_DELETED_P (region->resume))
+ continue;
+
+ /* Search for another landing pad in this function. */
+ for (outer = region->outer; outer ; outer = outer->outer)
+ if (outer->post_landing_pad)
+ break;
- emit_label (catchstack.top->entry->false_label);
- catchstack.top->entry->false_label = NULL_RTX;
-}
+ start_sequence ();
-/* Generate RTL for the start of a group of catch clauses.
+ if (outer)
+ emit_jump (outer->post_landing_pad);
+ else
+ emit_library_call (unwind_resume_libfunc, LCT_THROW,
+ VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
- It is responsible for starting a new instruction sequence for the
- instructions in the catch block, and expanding the handlers for the
- internally-generated exception regions nested within the try block
- corresponding to this catch block. */
+ seq = get_insns ();
+ end_sequence ();
+ emit_insns_before (seq, region->resume);
+ delete_insn (region->resume);
+ }
+}
-void
-expand_start_all_catch ()
+
+static void
+dw2_build_landing_pads ()
{
- struct eh_entry *entry;
- tree label;
- rtx outer_context;
-
- if (! doing_eh (1))
- return;
-
- outer_context = ehstack.top->entry->outer_context;
+ int i;
+ unsigned int j;
- /* End the try block. */
- expand_eh_region_end (integer_zero_node);
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+ rtx seq;
+ bool clobbers_hard_regs = false;
- emit_line_note (input_filename, lineno);
- label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
+ /* Mind we don't process a region more than once. */
+ if (!region || region->region_number != i)
+ continue;
- /* The label for the exception handling block that we will save.
- This is Lresume in the documentation. */
- expand_label (label);
-
- /* Push the label that points to where normal flow is resumed onto
- the top of the label stack. */
- push_label_entry (&caught_return_label_stack, NULL_RTX, label);
+ if (region->type != ERT_CLEANUP
+ && region->type != ERT_TRY
+ && region->type != ERT_ALLOWED_EXCEPTIONS)
+ continue;
- /* Start a new sequence for all the catch blocks. We will add this
- to the global sequence catch_clauses when we have completed all
- the handlers in this handler-seq. */
- start_sequence ();
+ start_sequence ();
- entry = dequeue_eh_entry (&ehqueue);
- for ( ; entry->finalization != integer_zero_node;
- entry = dequeue_eh_entry (&ehqueue))
- {
- rtx prev;
+ region->landing_pad = gen_label_rtx ();
+ emit_label (region->landing_pad);
- /* Emit the label for the cleanup handler for this region, and
- expand the code for the handler.
+#ifdef HAVE_exception_receiver
+ if (HAVE_exception_receiver)
+ emit_insn (gen_exception_receiver ());
+ else
+#endif
+#ifdef HAVE_nonlocal_goto_receiver
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+ else
+#endif
+ { /* Nothing */ }
- Note that a catch region is handled as a side-effect here;
- for a try block, entry->finalization will contain
- integer_zero_node, so no code will be generated in the
- expand_expr call below. But, the label for the handler will
- still be emitted, so any code emitted after this point will
- end up being the handler. */
-
- receive_exception_label (entry->exception_handler_label);
+ /* If the eh_return data registers are call-saved, then we
+ won't have considered them clobbered from the call that
+ threw. Kill them now. */
+ for (j = 0; ; ++j)
+ {
+ unsigned r = EH_RETURN_DATA_REGNO (j);
+ if (r == INVALID_REGNUM)
+ break;
+ if (! call_used_regs[r])
+ {
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
+ clobbers_hard_regs = true;
+ }
+ }
- /* register a handler for this cleanup region */
- add_new_handler (
- find_func_region (CODE_LABEL_NUMBER (entry->exception_handler_label)),
- get_new_handler (entry->exception_handler_label, NULL));
+ if (clobbers_hard_regs)
+ {
+ /* @@@ This is a kludge. Not all machine descriptions define a
+ blockage insn, but we must not allow the code we just generated
+ to be reordered by scheduling. So emit an ASM_INPUT to act as
+ blockage insn. */
+ emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
+ }
- /* And now generate the insns for the cleanup handler. */
- expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
+ emit_move_insn (cfun->eh->exc_ptr,
+ gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
+ emit_move_insn (cfun->eh->filter,
+ gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
- prev = get_last_insn ();
- if (prev == NULL || GET_CODE (prev) != BARRIER)
- /* Code to throw out to outer context when we fall off end
- of the handler. We can't do this here for catch blocks,
- so it's done in expand_end_all_catch instead. */
- expand_rethrow (entry->outer_context);
+ seq = get_insns ();
+ end_sequence ();
- do_pending_stack_adjust ();
- free (entry);
+ emit_insns_before (seq, region->post_landing_pad);
}
+}
- /* At this point, all the cleanups are done, and the ehqueue now has
- the current exception region at its head. We dequeue it, and put it
- on the catch stack. */
+
+struct sjlj_lp_info
+{
+ int directly_reachable;
+ int action_index;
+ int dispatch_index;
+ int call_site_index;
+};
- push_entry (&catchstack, entry);
+static bool
+sjlj_find_directly_reachable_regions (lp_info)
+ struct sjlj_lp_info *lp_info;
+{
+ rtx insn;
+ bool found_one = false;
- /* If we are not doing setjmp/longjmp EH, because we are reordered
- out of line, we arrange to rethrow in the outer context. We need to
- do this because we are not physically within the region, if any, that
- logically contains this catch block. */
- if (! exceptions_via_longjmp)
+ for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
{
- expand_eh_region_start ();
- ehstack.top->entry->outer_context = outer_context;
+ struct eh_region *region;
+ enum reachable_code rc;
+ tree type_thrown;
+ rtx note;
+
+ if (! INSN_P (insn))
+ continue;
+
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ continue;
+
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
+
+ /* Find the first containing region that might handle the exception.
+ That's the landing pad to which we will transfer control. */
+ rc = RNL_NOT_CAUGHT;
+ for (; region; region = region->outer)
+ {
+ rc = reachable_next_level (region, type_thrown, 0);
+ if (rc != RNL_NOT_CAUGHT)
+ break;
+ }
+ if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
+ {
+ lp_info[region->region_number].directly_reachable = 1;
+ found_one = true;
+ }
}
+ return found_one;
}
-/* Finish up the catch block. At this point all the insns for the
- catch clauses have already been generated, so we only have to add
- them to the catch_clauses list. We also want to make sure that if
- we fall off the end of the catch clauses that we rethrow to the
- outer EH region. */
-
-void
-expand_end_all_catch ()
+static void
+sjlj_assign_call_site_values (dispatch_label, lp_info)
+ rtx dispatch_label;
+ struct sjlj_lp_info *lp_info;
{
- rtx new_catch_clause;
- struct eh_entry *entry;
+ htab_t ar_hash;
+ int i, index;
- if (! doing_eh (1))
- return;
+ /* First task: build the action table. */
- /* Dequeue the current catch clause region. */
- entry = pop_eh_entry (&catchstack);
- free (entry);
+ VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
+ ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
- if (! exceptions_via_longjmp)
- {
- rtx outer_context = ehstack.top->entry->outer_context;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (lp_info[i].directly_reachable)
+ {
+ struct eh_region *r = cfun->eh->region_array[i];
+ r->landing_pad = dispatch_label;
+ lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
+ if (lp_info[i].action_index != -1)
+ cfun->uses_eh_lsda = 1;
+ }
- /* Finish the rethrow region. size_zero_node is just a NOP. */
- expand_eh_region_end (size_zero_node);
- /* New exceptions handling models will never have a fall through
- of a catch clause */
- if (!flag_new_exceptions)
- expand_rethrow (outer_context);
- }
- else
- expand_rethrow (NULL_RTX);
+ htab_delete (ar_hash);
- /* Code to throw out to outer context, if we fall off end of catch
- handlers. This is rethrow (Lresume, same id, same obj) in the
- documentation. We use Lresume because we know that it will throw
- to the correct context.
+ /* Next: assign dispatch values. In dwarf2 terms, this would be the
+ landing pad label for the region. For sjlj though, there is one
+ common landing pad from which we dispatch to the post-landing pads.
- In other words, if the catch handler doesn't exit or return, we
- do a "throw" (using the address of Lresume as the point being
- thrown from) so that the outer EH region can then try to process
- the exception. */
+ A region receives a dispatch index if it is directly reachable
+ and requires in-function processing. Regions that share post-landing
+ pads may share dispatch indices. */
+ /* ??? Post-landing pad sharing doesn't actually happen at the moment
+ (see build_post_landing_pads) so we don't bother checking for it. */
- /* Now we have the complete catch sequence. */
- new_catch_clause = get_insns ();
- end_sequence ();
-
- /* This level of catch blocks is done, so set up the successful
- catch jump label for the next layer of catch blocks. */
- pop_label_entry (&caught_return_label_stack);
- pop_label_entry (&outer_context_label_stack);
-
- /* Add the new sequence of catches to the main one for this function. */
- push_to_sequence (catch_clauses);
- emit_insns (new_catch_clause);
- catch_clauses = get_insns ();
- end_sequence ();
-
- /* Here we fall through into the continuation code. */
-}
+ index = 0;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (lp_info[i].directly_reachable)
+ lp_info[i].dispatch_index = index++;
-/* Rethrow from the outer context LABEL. */
+ /* Finally: assign call-site values. If dwarf2 terms, this would be
+ the region number assigned by convert_to_eh_region_ranges, but
+ handles no-action and must-not-throw differently. */
-static void
-expand_rethrow (label)
- rtx label;
-{
- if (exceptions_via_longjmp)
- emit_throw ();
- else
- if (flag_new_exceptions)
+ call_site_base = 1;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (lp_info[i].directly_reachable)
{
- rtx insn, val;
- if (label == NULL_RTX)
- label = last_rethrow_symbol;
- emit_library_call (rethrow_libfunc, 0, VOIDmode, 1, label, Pmode);
- SYMBOL_REF_USED (label) = 1;
-
- /* Search backwards for the actual call insn. */
- insn = get_last_insn ();
- while (GET_CODE (insn) != CALL_INSN)
- insn = PREV_INSN (insn);
- delete_insns_since (insn);
-
- /* Mark the label/symbol on the call. */
- val = GEN_INT (eh_region_from_symbol (label));
- REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EH_RETHROW, val,
- REG_NOTES (insn));
- emit_barrier ();
+ int action = lp_info[i].action_index;
+
+ /* Map must-not-throw to otherwise unused call-site index 0. */
+ if (action == -2)
+ index = 0;
+ /* Map no-action to otherwise unused call-site index -1. */
+ else if (action == -1)
+ index = -1;
+ /* Otherwise, look it up in the table. */
+ else
+ index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
+
+ lp_info[i].call_site_index = index;
}
- else
- emit_jump (label);
}
-/* End all the pending exception regions on protect_list. The handlers
- will be emitted when expand_leftover_cleanups is invoked. */
-
-void
-end_protect_partials ()
+static void
+sjlj_mark_call_sites (lp_info)
+ struct sjlj_lp_info *lp_info;
{
- while (protect_list)
+ int last_call_site = -2;
+ rtx insn, mem;
+
+ for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
{
- expand_eh_region_end (TREE_VALUE (protect_list));
- protect_list = TREE_CHAIN (protect_list);
- }
-}
+ struct eh_region *region;
+ int this_call_site;
+ rtx note, before, p;
-/* Arrange for __terminate to be called if there is an unhandled throw
- from within E. */
+ /* Reset value tracking at extended basic block boundaries. */
+ if (GET_CODE (insn) == CODE_LABEL)
+ last_call_site = -2;
-tree
-protect_with_terminate (e)
- tree e;
-{
- /* We only need to do this when using setjmp/longjmp EH and the
- language requires it, as otherwise we protect all of the handlers
- at once, if we need to. */
- if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
- {
- tree handler, result;
+ if (! INSN_P (insn))
+ continue;
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note)
+ {
+ /* Calls (and trapping insns) without notes are outside any
+ exception handling region in this function. Mark them as
+ no action. */
+ if (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn))))
+ this_call_site = -1;
+ else
+ continue;
+ }
+ else
+ {
+ /* Calls that are known to not throw need not be marked. */
+ if (INTVAL (XEXP (note, 0)) <= 0)
+ continue;
- handler = make_node (RTL_EXPR);
- TREE_TYPE (handler) = void_type_node;
- RTL_EXPR_RTL (handler) = const0_rtx;
- TREE_SIDE_EFFECTS (handler) = 1;
- start_sequence_for_rtl_expr (handler);
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ this_call_site = lp_info[region->region_number].call_site_index;
+ }
- emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
- emit_barrier ();
+ if (this_call_site == last_call_site)
+ continue;
- RTL_EXPR_SEQUENCE (handler) = get_insns ();
- end_sequence ();
-
- result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
- TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
- TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
- TREE_READONLY (result) = TREE_READONLY (e);
+ /* Don't separate a call from it's argument loads. */
+ before = insn;
+ if (GET_CODE (insn) == CALL_INSN)
+ before = find_first_parameter_load (insn, NULL_RTX);
- pop_obstacks ();
+ start_sequence ();
+ mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
+ sjlj_fc_call_site_ofs);
+ emit_move_insn (mem, GEN_INT (this_call_site));
+ p = get_insns ();
+ end_sequence ();
- e = result;
+ emit_insns_before (p, before);
+ last_call_site = this_call_site;
}
-
- return e;
}
-
-/* The exception table that we build that is used for looking up and
- dispatching exceptions, the current number of entries, and its
- maximum size before we have to extend it.
- The number in eh_table is the code label number of the exception
- handler for the region. This is added by add_eh_table_entry and
- used by output_exception_table_entry. */
+/* Construct the SjLj_Function_Context. */
-static int *eh_table = NULL;
-static int eh_table_size = 0;
-static int eh_table_max_size = 0;
+static void
+sjlj_emit_function_enter (dispatch_label)
+ rtx dispatch_label;
+{
+ rtx fn_begin, fc, mem, seq;
-/* Note the need for an exception table entry for region N. If we
- don't need to output an explicit exception table, avoid all of the
- extra work.
+ fc = cfun->eh->sjlj_fc;
- Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
- (Or NOTE_INSN_EH_REGION_END sometimes)
- N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
- label number of the exception handler for the region. */
+ start_sequence ();
-void
-add_eh_table_entry (n)
- int n;
-{
-#ifndef OMIT_EH_TABLE
- if (eh_table_size >= eh_table_max_size)
+ /* We're storing this libcall's address into memory instead of
+ calling it directly. Thus, we must call assemble_external_libcall
+ here, as we can not depend on emit_library_call to do it for us. */
+ assemble_external_libcall (eh_personality_libfunc);
+ mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
+ emit_move_insn (mem, eh_personality_libfunc);
+
+ mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
+ if (cfun->uses_eh_lsda)
{
- if (eh_table)
- {
- eh_table_max_size += eh_table_max_size>>1;
+ char buf[20];
+ ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
+ emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
+ }
+ else
+ emit_move_insn (mem, const0_rtx);
- if (eh_table_max_size < 0)
- abort ();
+#ifdef DONT_USE_BUILTIN_SETJMP
+ {
+ rtx x, note;
+ x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
+ TYPE_MODE (integer_type_node), 1,
+ plus_constant (XEXP (fc, 0),
+ sjlj_fc_jbuf_ofs), Pmode);
- eh_table = (int *) xrealloc (eh_table,
- eh_table_max_size * sizeof (int));
- }
- else
- {
- eh_table_max_size = 252;
- eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
- }
- }
- eh_table[eh_table_size++] = n;
+ note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
+ NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
+
+ emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
+ TYPE_MODE (integer_type_node), 0, dispatch_label);
+ }
+#else
+ expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
+ dispatch_label);
#endif
+
+ emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
+ 1, XEXP (fc, 0), Pmode);
+
+ seq = get_insns ();
+ end_sequence ();
+
+ /* ??? Instead of doing this at the beginning of the function,
+ do this in a block that is at loop level 0 and dominates all
+ can_throw_internal instructions. */
+
+ for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
+ if (GET_CODE (fn_begin) == NOTE
+ && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ break;
+ emit_insns_after (seq, fn_begin);
}
-/* Return a non-zero value if we need to output an exception table.
+/* Call back from expand_function_end to know where we should put
+ the call to unwind_sjlj_unregister_libfunc if needed. */
- On some platforms, we don't have to output a table explicitly.
- This routine doesn't mean we don't have one. */
+void
+sjlj_emit_function_exit_after (after)
+ rtx after;
+{
+ cfun->eh->sjlj_exit_after = after;
+}
-int
-exception_table_p ()
+static void
+sjlj_emit_function_exit ()
{
- if (eh_table)
- return 1;
+ rtx seq;
- return 0;
-}
+ start_sequence ();
-/* Output the entry of the exception table corresponding to the
- exception region numbered N to file FILE.
+ emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
+ 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
- N is the code label number corresponding to the handler of the
- region. */
+ seq = get_insns ();
+ end_sequence ();
+
+ /* ??? Really this can be done in any block at loop level 0 that
+ post-dominates all can_throw_internal instructions. This is
+ the last possible moment. */
+
+ emit_insns_after (seq, cfun->eh->sjlj_exit_after);
+}
static void
-output_exception_table_entry (file, n)
- FILE *file;
- int n;
-{
- char buf[256];
- rtx sym;
- struct handler_info *handler = get_first_handler (n);
- int index = find_func_region (n);
- rtx rethrow;
-
- /* form and emit the rethrow label, if needed */
- rethrow = function_eh_regions[index].rethrow_label;
- if (rethrow != NULL_RTX && !flag_new_exceptions)
- rethrow = NULL_RTX;
- if (rethrow != NULL_RTX && handler == NULL)
- if (! SYMBOL_REF_USED (rethrow))
- rethrow = NULL_RTX;
-
-
- for ( ; handler != NULL || rethrow != NULL_RTX; handler = handler->next)
- {
- /* rethrow label should indicate the LAST entry for a region */
- if (rethrow != NULL_RTX && (handler == NULL || handler->next == NULL))
- {
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", n);
- assemble_label(buf);
- rethrow = NULL_RTX;
- }
+sjlj_emit_dispatch_table (dispatch_label, lp_info)
+ rtx dispatch_label;
+ struct sjlj_lp_info *lp_info;
+{
+ int i, first_reachable;
+ rtx mem, dispatch, seq, fc;
- ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
- sym = gen_rtx_SYMBOL_REF (Pmode, buf);
- assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
+ fc = cfun->eh->sjlj_fc;
- ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
- sym = gen_rtx_SYMBOL_REF (Pmode, buf);
- assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
-
- if (handler == NULL)
- assemble_integer (GEN_INT (0), POINTER_SIZE / BITS_PER_UNIT, 1);
- else
- {
- ASM_GENERATE_INTERNAL_LABEL (buf, "L", handler->handler_number);
- sym = gen_rtx_SYMBOL_REF (Pmode, buf);
- assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
- }
+ start_sequence ();
- if (flag_new_exceptions)
- {
- if (handler == NULL || handler->type_info == NULL)
- assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
- else
- if (handler->type_info == CATCH_ALL_TYPE)
- assemble_integer (GEN_INT (CATCH_ALL_TYPE),
- POINTER_SIZE / BITS_PER_UNIT, 1);
- else
- output_constant ((tree)(handler->type_info),
- POINTER_SIZE / BITS_PER_UNIT);
- }
- putc ('\n', file); /* blank line */
- /* We only output the first label under the old scheme */
- if (! flag_new_exceptions || handler == NULL)
- break;
+ emit_label (dispatch_label);
+
+#ifndef DONT_USE_BUILTIN_SETJMP
+ expand_builtin_setjmp_receiver (dispatch_label);
+#endif
+
+ /* Load up dispatch index, exc_ptr and filter values from the
+ function context. */
+ mem = adjust_address (fc, TYPE_MODE (integer_type_node),
+ sjlj_fc_call_site_ofs);
+ dispatch = copy_to_reg (mem);
+
+ mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
+ if (word_mode != Pmode)
+ {
+#ifdef POINTERS_EXTEND_UNSIGNED
+ mem = convert_memory_address (Pmode, mem);
+#else
+ mem = convert_to_mode (Pmode, mem, 0);
+#endif
}
-}
+ emit_move_insn (cfun->eh->exc_ptr, mem);
-/* Output the exception table if we have and need one. */
+ mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
+ emit_move_insn (cfun->eh->filter, mem);
-static short language_code = 0;
-static short version_code = 0;
+ /* Jump to one of the directly reachable regions. */
+ /* ??? This really ought to be using a switch statement. */
-/* This routine will set the language code for exceptions. */
-void
-set_exception_lang_code (code)
- int code;
-{
- language_code = code;
-}
+ first_reachable = 0;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ if (! lp_info[i].directly_reachable)
+ continue;
-/* This routine will set the language version code for exceptions. */
-void
-set_exception_version_code (code)
- int code;
-{
- version_code = code;
-}
+ if (! first_reachable)
+ {
+ first_reachable = i;
+ continue;
+ }
+ emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
+ EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
+ cfun->eh->region_array[i]->post_landing_pad);
+ }
-void
-output_exception_table ()
-{
- int i;
- char buf[256];
- extern FILE *asm_out_file;
+ seq = get_insns ();
+ end_sequence ();
- if (! doing_eh (0) || ! eh_table)
- return;
+ emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
+ ->post_landing_pad));
+}
- exception_section ();
+static void
+sjlj_build_landing_pads ()
+{
+ struct sjlj_lp_info *lp_info;
- /* Beginning marker for table. */
- assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
- assemble_label ("__EXCEPTION_TABLE__");
+ lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
+ sizeof (struct sjlj_lp_info));
- if (flag_new_exceptions)
+ if (sjlj_find_directly_reachable_regions (lp_info))
{
- assemble_integer (GEN_INT (NEW_EH_RUNTIME),
- POINTER_SIZE / BITS_PER_UNIT, 1);
- assemble_integer (GEN_INT (language_code), 2 , 1);
- assemble_integer (GEN_INT (version_code), 2 , 1);
+ rtx dispatch_label = gen_label_rtx ();
- /* Add enough padding to make sure table aligns on a pointer boundry. */
- i = GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT - 4;
- for ( ; i < 0; i = i + GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT)
- ;
- if (i != 0)
- assemble_integer (const0_rtx, i , 1);
+ cfun->eh->sjlj_fc
+ = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
+ int_size_in_bytes (sjlj_fc_type_node),
+ TYPE_ALIGN (sjlj_fc_type_node));
- /* Generate the label for offset calculations on rethrows */
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", 0);
- assemble_label(buf);
+ sjlj_assign_call_site_values (dispatch_label, lp_info);
+ sjlj_mark_call_sites (lp_info);
+
+ sjlj_emit_function_enter (dispatch_label);
+ sjlj_emit_dispatch_table (dispatch_label, lp_info);
+ sjlj_emit_function_exit ();
}
- for (i = 0; i < eh_table_size; ++i)
- output_exception_table_entry (asm_out_file, eh_table[i]);
+ free (lp_info);
+}
- free (eh_table);
- clear_function_eh_region ();
+void
+finish_eh_generation ()
+{
+ /* Nothing to do if no regions created. */
+ if (cfun->eh->region_tree == NULL)
+ return;
- /* Ending marker for table. */
- /* Generate the label for end of table. */
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", CODE_LABEL_NUMBER (final_rethrow));
- assemble_label(buf);
- assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ /* The object here is to provide find_basic_blocks with detailed
+ information (via reachable_handlers) on how exception control
+ flows within the function. In this first pass, we can include
+ type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
+ regions, and hope that it will be useful in deleting unreachable
+ handlers. Subsequently, we will generate landing pads which will
+ connect many of the handlers, and then type information will not
+ be effective. Still, this is a win over previous implementations. */
+
+ rebuild_jump_labels (get_insns ());
+ find_basic_blocks (get_insns (), max_reg_num (), 0);
+ cleanup_cfg (CLEANUP_PRE_LOOP);
+
+ /* These registers are used by the landing pads. Make sure they
+ have been generated. */
+ get_exception_pointer (cfun);
+ get_exception_filter (cfun);
+
+ /* Construct the landing pads. */
+
+ assign_filter_values ();
+ build_post_landing_pads ();
+ connect_post_landing_pads ();
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_build_landing_pads ();
+ else
+ dw2_build_landing_pads ();
- /* for binary compatability, the old __throw checked the second
- position for a -1, so we should output at least 2 -1's */
- if (! flag_new_exceptions)
- assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ cfun->eh->built_landing_pads = 1;
- putc ('\n', asm_out_file); /* blank line */
+ /* We've totally changed the CFG. Start over. */
+ find_exception_handler_labels ();
+ rebuild_jump_labels (get_insns ());
+ find_basic_blocks (get_insns (), max_reg_num (), 0);
+ cleanup_cfg (CLEANUP_PRE_LOOP);
}
-/* Emit code to get EH context.
-
- We have to scan thru the code to find possible EH context registers.
- Inlined functions may use it too, and thus we'll have to be able
- to change them too.
+/* This section handles removing dead code for flow. */
- This is done only if using exceptions_via_longjmp. */
+/* Remove LABEL from the exception_handler_labels list. */
-void
-emit_eh_context ()
+static void
+remove_exception_handler_label (label)
+ rtx label;
{
- rtx insn;
- rtx ehc = 0;
+ rtx *pl, l;
- if (! doing_eh (0))
+ /* If exception_handler_labels was not built yet,
+ there is nothing to do. */
+ if (exception_handler_labels == NULL)
return;
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == USE)
- {
- rtx reg = find_reg_note (insn, REG_EH_CONTEXT, 0);
- if (reg)
- {
- rtx insns;
-
- start_sequence ();
-
- /* If this is the first use insn, emit the call here. This
- will always be at the top of our function, because if
- expand_inline_function notices a REG_EH_CONTEXT note, it
- adds a use insn to this function as well. */
- if (ehc == 0)
- ehc = call_get_eh_context ();
-
- emit_move_insn (XEXP (reg, 0), ehc);
- insns = get_insns ();
- end_sequence ();
-
- emit_insns_before (insns, insn);
-
- /* At -O0, we must make the context register stay alive so
- that the stupid.c register allocator doesn't get confused. */
- if (obey_regdecls != 0)
- {
- insns = gen_rtx_USE (GET_MODE (XEXP (reg,0)), XEXP (reg,0));
- emit_insn_before (insns, get_last_insn ());
- }
- }
- }
-}
+ for (pl = &exception_handler_labels, l = *pl;
+ XEXP (l, 0) != label;
+ pl = &XEXP (l, 1), l = *pl)
+ continue;
-/* Scan the current insns and build a list of handler labels. The
- resulting list is placed in the global variable exception_handler_labels.
+ *pl = XEXP (l, 1);
+ free_EXPR_LIST_node (l);
+}
- It is called after the last exception handling region is added to
- the current function (when the rtl is almost all built for the
- current function) and before the jump optimization pass. */
+/* Splice REGION from the region tree etc. */
-void
-find_exception_handler_labels ()
+static void
+remove_eh_handler (region)
+ struct eh_region *region;
{
- rtx insn;
+ struct eh_region **pp, *p;
+ rtx lab;
+ int i;
- exception_handler_labels = NULL_RTX;
+ /* For the benefit of efficiently handling REG_EH_REGION notes,
+ replace this region in the region array with its containing
+ region. Note that previous region deletions may result in
+ multiple copies of this region in the array, so we have to
+ search the whole thing. */
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (cfun->eh->region_array[i] == region)
+ cfun->eh->region_array[i] = region->outer;
+
+ if (cfun->eh->built_landing_pads)
+ lab = region->landing_pad;
+ else
+ lab = region->label;
+ if (lab)
+ remove_exception_handler_label (lab);
- /* If we aren't doing exception handling, there isn't much to check. */
- if (! doing_eh (0))
- return;
+ if (region->outer)
+ pp = &region->outer->inner;
+ else
+ pp = &cfun->eh->region_tree;
+ for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
+ continue;
- /* For each start of a region, add its label to the list. */
+ if (region->inner)
+ {
+ for (p = region->inner; p->next_peer ; p = p->next_peer)
+ p->outer = region->outer;
+ p->next_peer = region->next_peer;
+ p->outer = region->outer;
+ *pp = region->inner;
+ }
+ else
+ *pp = region->next_peer;
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ if (region->type == ERT_CATCH)
{
- struct handler_info* ptr;
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
+ struct eh_region *try, *next, *prev;
+
+ for (try = region->next_peer;
+ try->type == ERT_CATCH;
+ try = try->next_peer)
+ continue;
+ if (try->type != ERT_TRY)
+ abort ();
+
+ next = region->u.catch.next_catch;
+ prev = region->u.catch.prev_catch;
+
+ if (next)
+ next->u.catch.prev_catch = prev;
+ else
+ try->u.try.last_catch = prev;
+ if (prev)
+ prev->u.catch.next_catch = next;
+ else
{
- ptr = get_first_handler (NOTE_BLOCK_NUMBER (insn));
- for ( ; ptr; ptr = ptr->next)
- {
- /* make sure label isn't in the list already */
- rtx x;
- for (x = exception_handler_labels; x; x = XEXP (x, 1))
- if (XEXP (x, 0) == ptr->handler_label)
- break;
- if (! x)
- exception_handler_labels = gen_rtx_EXPR_LIST (VOIDmode,
- ptr->handler_label, exception_handler_labels);
- }
+ try->u.try.catch = next;
+ if (! next)
+ remove_eh_handler (try);
}
}
-}
-/* Return a value of 1 if the parameter label number is an exception handler
- label. Return 0 otherwise. */
-
-int
-is_exception_handler_label (lab)
- int lab;
-{
- rtx x;
- for (x = exception_handler_labels ; x ; x = XEXP (x, 1))
- if (lab == CODE_LABEL_NUMBER (XEXP (x, 0)))
- return 1;
- return 0;
+ free (region);
}
-/* Perform sanity checking on the exception_handler_labels list.
-
- Can be called after find_exception_handler_labels is called to
- build the list of exception handlers for the current function and
- before we finish processing the current function. */
+/* LABEL heads a basic block that is about to be deleted. If this
+ label corresponds to an exception region, we may be able to
+ delete the region. */
void
-check_exception_handler_labels ()
+maybe_remove_eh_handler (label)
+ rtx label;
{
- rtx insn, insn2;
+ int i;
- /* If we aren't doing exception handling, there isn't much to check. */
- if (! doing_eh (0))
+ /* ??? After generating landing pads, it's not so simple to determine
+ if the region data is completely unused. One must examine the
+ landing pad and the post landing pad, and whether an inner try block
+ is referencing the catch handlers directly. */
+ if (cfun->eh->built_landing_pads)
return;
- /* Make sure there is no more than 1 copy of a label */
- for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
- int count = 0;
- for (insn2 = exception_handler_labels; insn2; insn2 = XEXP (insn2, 1))
- if (XEXP (insn, 0) == XEXP (insn2, 0))
- count++;
- if (count != 1)
- warning ("Counted %d copies of EH region %d in list.\n", count,
- CODE_LABEL_NUMBER (insn));
+ struct eh_region *region = cfun->eh->region_array[i];
+ if (region && region->label == label)
+ {
+ /* Flow will want to remove MUST_NOT_THROW regions as unreachable
+ because there is no path to the fallback call to terminate.
+ But the region continues to affect call-site data until there
+ are no more contained calls, which we don't see here. */
+ if (region->type == ERT_MUST_NOT_THROW)
+ {
+ remove_exception_handler_label (region->label);
+ region->label = NULL_RTX;
+ }
+ else
+ remove_eh_handler (region);
+ break;
+ }
}
-
}
+
-/* This group of functions initializes the exception handling data
- structures at the start of the compilation, initializes the data
- structures at the start of a function, and saves and restores the
- exception handling data structures for the start/end of a nested
- function. */
+/* This section describes CFG exception edges for flow. */
-/* Toplevel initialization for EH things. */
+/* For communicating between calls to reachable_next_level. */
+struct reachable_info
+{
+ tree types_caught;
+ tree types_allowed;
+ rtx handlers;
+};
-void
-init_eh ()
+/* A subroutine of reachable_next_level. Return true if TYPE, or a
+ base class of TYPE, is in HANDLED. */
+
+static int
+check_handled (handled, type)
+ tree handled, type;
{
- first_rethrow_symbol = create_rethrow_ref (0);
- final_rethrow = gen_exception_label ();
- last_rethrow_symbol = create_rethrow_ref (CODE_LABEL_NUMBER (final_rethrow));
+ tree t;
+
+ /* We can check for exact matches without front-end help. */
+ if (! lang_eh_type_covers)
+ {
+ for (t = handled; t ; t = TREE_CHAIN (t))
+ if (TREE_VALUE (t) == type)
+ return 1;
+ }
+ else
+ {
+ for (t = handled; t ; t = TREE_CHAIN (t))
+ if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
+ return 1;
+ }
+
+ return 0;
}
-/* Initialize the per-function EH information. */
+/* A subroutine of reachable_next_level. If we are collecting a list
+ of handlers, add one. After landing pad generation, reference
+ it instead of the handlers themselves. Further, the handlers are
+ all wired together, so by referencing one, we've got them all.
+ Before landing pad generation we reference each handler individually.
-void
-init_eh_for_function ()
+ LP_REGION contains the landing pad; REGION is the handler. */
+
+static void
+add_reachable_handler (info, lp_region, region)
+ struct reachable_info *info;
+ struct eh_region *lp_region;
+ struct eh_region *region;
{
- ehstack.top = 0;
- catchstack.top = 0;
- ehqueue.head = ehqueue.tail = 0;
- catch_clauses = NULL_RTX;
- false_label_stack = 0;
- caught_return_label_stack = 0;
- protect_list = NULL_TREE;
- current_function_ehc = NULL_RTX;
- eh_return_context = NULL_RTX;
- eh_return_stack_adjust = NULL_RTX;
- eh_return_handler = NULL_RTX;
- eh_return_stub_label = NULL_RTX;
-}
+ if (! info)
+ return;
-/* Save some of the per-function EH info into the save area denoted by
- P.
+ if (cfun->eh->built_landing_pads)
+ {
+ if (! info->handlers)
+ info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
+ }
+ else
+ info->handlers = alloc_INSN_LIST (region->label, info->handlers);
+}
- This is currently called from save_stmt_status. */
+/* Process one level of exception regions for reachability.
+ If TYPE_THROWN is non-null, then it is the *exact* type being
+ propagated. If INFO is non-null, then collect handler labels
+ and caught/allowed type information between invocations. */
-void
-save_eh_status (p)
- struct function *p;
+static enum reachable_code
+reachable_next_level (region, type_thrown, info)
+ struct eh_region *region;
+ tree type_thrown;
+ struct reachable_info *info;
{
- if (p == NULL)
- abort ();
+ switch (region->type)
+ {
+ case ERT_CLEANUP:
+ /* Before landing-pad generation, we model control flow
+ directly to the individual handlers. In this way we can
+ see that catch handler types may shadow one another. */
+ add_reachable_handler (info, region, region);
+ return RNL_MAYBE_CAUGHT;
+
+ case ERT_TRY:
+ {
+ struct eh_region *c;
+ enum reachable_code ret = RNL_NOT_CAUGHT;
- p->ehstack = ehstack;
- p->catchstack = catchstack;
- p->ehqueue = ehqueue;
- p->catch_clauses = catch_clauses;
- p->false_label_stack = false_label_stack;
- p->caught_return_label_stack = caught_return_label_stack;
- p->protect_list = protect_list;
- p->ehc = current_function_ehc;
- p->eh_return_stub_label = eh_return_stub_label;
+ for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
+ {
+ /* A catch-all handler ends the search. */
+ /* ??? _Unwind_ForcedUnwind will want outer cleanups
+ to be run as well. */
+ if (c->u.catch.type_list == NULL)
+ {
+ add_reachable_handler (info, region, c);
+ return RNL_CAUGHT;
+ }
+
+ if (type_thrown)
+ {
+ /* If we have at least one type match, end the search. */
+ tree tp_node = c->u.catch.type_list;
+
+ for (; tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ tree type = TREE_VALUE (tp_node);
+
+ if (type == type_thrown
+ || (lang_eh_type_covers
+ && (*lang_eh_type_covers) (type, type_thrown)))
+ {
+ add_reachable_handler (info, region, c);
+ return RNL_CAUGHT;
+ }
+ }
+
+ /* If we have definitive information of a match failure,
+ the catch won't trigger. */
+ if (lang_eh_type_covers)
+ return RNL_NOT_CAUGHT;
+ }
+
+ /* At this point, we either don't know what type is thrown or
+ don't have front-end assistance to help deciding if it is
+ covered by one of the types in the list for this region.
+
+ We'd then like to add this region to the list of reachable
+ handlers since it is indeed potentially reachable based on the
+ information we have.
+
+ Actually, this handler is for sure not reachable if all the
+ types it matches have already been caught. That is, it is only
+ potentially reachable if at least one of the types it catches
+ has not been previously caught. */
+
+ if (! info)
+ ret = RNL_MAYBE_CAUGHT;
+ else
+ {
+ tree tp_node = c->u.catch.type_list;
+ bool maybe_reachable = false;
+
+ /* Compute the potential reachability of this handler and
+ update the list of types caught at the same time. */
+ for (; tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ tree type = TREE_VALUE (tp_node);
+
+ if (! check_handled (info->types_caught, type))
+ {
+ info->types_caught
+ = tree_cons (NULL, type, info->types_caught);
+
+ maybe_reachable = true;
+ }
+ }
+
+ if (maybe_reachable)
+ {
+ add_reachable_handler (info, region, c);
+
+ /* ??? If the catch type is a base class of every allowed
+ type, then we know we can stop the search. */
+ ret = RNL_MAYBE_CAUGHT;
+ }
+ }
+ }
- init_eh_for_function ();
-}
+ return ret;
+ }
-/* Restore the per-function EH info saved into the area denoted by P.
+ case ERT_ALLOWED_EXCEPTIONS:
+ /* An empty list of types definitely ends the search. */
+ if (region->u.allowed.type_list == NULL_TREE)
+ {
+ add_reachable_handler (info, region, region);
+ return RNL_CAUGHT;
+ }
- This is currently called from restore_stmt_status. */
+ /* Collect a list of lists of allowed types for use in detecting
+ when a catch may be transformed into a catch-all. */
+ if (info)
+ info->types_allowed = tree_cons (NULL_TREE,
+ region->u.allowed.type_list,
+ info->types_allowed);
+
+ /* If we have definitive information about the type hierarchy,
+ then we can tell if the thrown type will pass through the
+ filter. */
+ if (type_thrown && lang_eh_type_covers)
+ {
+ if (check_handled (region->u.allowed.type_list, type_thrown))
+ return RNL_NOT_CAUGHT;
+ else
+ {
+ add_reachable_handler (info, region, region);
+ return RNL_CAUGHT;
+ }
+ }
-void
-restore_eh_status (p)
- struct function *p;
-{
- if (p == NULL)
- abort ();
+ add_reachable_handler (info, region, region);
+ return RNL_MAYBE_CAUGHT;
- protect_list = p->protect_list;
- caught_return_label_stack = p->caught_return_label_stack;
- false_label_stack = p->false_label_stack;
- catch_clauses = p->catch_clauses;
- ehqueue = p->ehqueue;
- ehstack = p->ehstack;
- catchstack = p->catchstack;
- current_function_ehc = p->ehc;
- eh_return_stub_label = p->eh_return_stub_label;
+ case ERT_CATCH:
+ /* Catch regions are handled by their controling try region. */
+ return RNL_NOT_CAUGHT;
+
+ case ERT_MUST_NOT_THROW:
+ /* Here we end our search, since no exceptions may propagate.
+ If we've touched down at some landing pad previous, then the
+ explicit function call we generated may be used. Otherwise
+ the call is made by the runtime. */
+ if (info && info->handlers)
+ {
+ add_reachable_handler (info, region, region);
+ return RNL_CAUGHT;
+ }
+ else
+ return RNL_BLOCKED;
+
+ case ERT_THROW:
+ case ERT_FIXUP:
+ case ERT_UNKNOWN:
+ /* Shouldn't see these here. */
+ break;
+ }
+
+ abort ();
}
-
-/* This section is for the exception handling specific optimization
- pass. First are the internal routines, and then the main
- optimization pass. */
-/* Determine if the given INSN can throw an exception. */
+/* Retrieve a list of labels of exception handlers which can be
+ reached by a given insn. */
-static int
-can_throw (insn)
+rtx
+reachable_handlers (insn)
rtx insn;
{
- /* Calls can always potentially throw exceptions. */
- if (GET_CODE (insn) == CALL_INSN)
- return 1;
+ struct reachable_info info;
+ struct eh_region *region;
+ tree type_thrown;
+ int region_number;
- if (asynchronous_exceptions)
+ if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) == RESX)
+ region_number = XINT (PATTERN (insn), 0);
+ else
{
- /* If we wanted asynchronous exceptions, then everything but NOTEs
- and CODE_LABELs could throw. */
- if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
- return 1;
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return NULL;
+ region_number = INTVAL (XEXP (note, 0));
}
- return 0;
-}
+ memset (&info, 0, sizeof (info));
-/* Scan a exception region looking for the matching end and then
- remove it if possible. INSN is the start of the region, N is the
- region number, and DELETE_OUTER is to note if anything in this
- region can throw.
+ region = cfun->eh->region_array[region_number];
- Regions are removed if they cannot possibly catch an exception.
- This is determined by invoking can_throw on each insn within the
- region; if can_throw returns true for any of the instructions, the
- region can catch an exception, since there is an insn within the
- region that is capable of throwing an exception.
+ type_thrown = NULL_TREE;
+ if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) == RESX)
+ {
+ /* A RESX leaves a region instead of entering it. Thus the
+ region itself may have been deleted out from under us. */
+ if (region == NULL)
+ return NULL;
+ region = region->outer;
+ }
+ else if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
- Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
- calls abort if it can't find one.
+ for (; region; region = region->outer)
+ if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
+ break;
- Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
- correspond to the region number, or if DELETE_OUTER is NULL. */
+ return info.handlers;
+}
-static rtx
-scan_region (insn, n, delete_outer)
+/* Determine if the given INSN can throw an exception that is caught
+ within the function. */
+
+bool
+can_throw_internal (insn)
rtx insn;
- int n;
- int *delete_outer;
{
- rtx start = insn;
-
- /* Assume we can delete the region. */
- int delete = 1;
-
- int r = find_func_region (n);
- /* Can't delete something which is rethrown to. */
- if (SYMBOL_REF_USED((function_eh_regions[r].rethrow_label)))
- delete = 0;
+ struct eh_region *region;
+ tree type_thrown;
+ rtx note;
- if (insn == NULL_RTX
- || GET_CODE (insn) != NOTE
- || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
- || NOTE_BLOCK_NUMBER (insn) != n
- || delete_outer == NULL)
- abort ();
+ if (! INSN_P (insn))
+ return false;
- insn = NEXT_INSN (insn);
+ if (GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
- /* Look for the matching end. */
- while (! (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
- /* If anything can throw, we can't remove the region. */
- if (delete && can_throw (insn))
+ int i;
+ for (i = 0; i < 3; ++i)
{
- delete = 0;
+ rtx sub = XEXP (PATTERN (insn), i);
+ for (; sub ; sub = NEXT_INSN (sub))
+ if (can_throw_internal (sub))
+ return true;
}
+ return false;
+ }
- /* Watch out for and handle nested regions. */
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
- {
- insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
- }
+ /* Every insn that might throw has an EH_REGION note. */
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return false;
+
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
- insn = NEXT_INSN (insn);
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
}
- /* The _BEG/_END NOTEs must match and nest. */
- if (NOTE_BLOCK_NUMBER (insn) != n)
- abort ();
+ /* If this exception is ignored by each and every containing region,
+ then control passes straight out. The runtime may handle some
+ regions, which also do not require processing internally. */
+ for (; region; region = region->outer)
+ {
+ enum reachable_code how = reachable_next_level (region, type_thrown, 0);
+ if (how == RNL_BLOCKED)
+ return false;
+ if (how != RNL_NOT_CAUGHT)
+ return true;
+ }
- /* If anything in this exception region can throw, we can throw. */
- if (! delete)
- *delete_outer = 0;
- else
+ return false;
+}
+
+/* Determine if the given INSN can throw an exception that is
+ visible outside the function. */
+
+bool
+can_throw_external (insn)
+ rtx insn;
+{
+ struct eh_region *region;
+ tree type_thrown;
+ rtx note;
+
+ if (! INSN_P (insn))
+ return false;
+
+ if (GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
+
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
- /* Delete the start and end of the region. */
- delete_insn (start);
- delete_insn (insn);
-
-/* We no longer removed labels here, since flow will now remove any
- handler which cannot be called any more. */
-
-#if 0
- /* Only do this part if we have built the exception handler
- labels. */
- if (exception_handler_labels)
+ int i;
+ for (i = 0; i < 3; ++i)
{
- rtx x, *prev = &exception_handler_labels;
+ rtx sub = XEXP (PATTERN (insn), i);
+ for (; sub ; sub = NEXT_INSN (sub))
+ if (can_throw_external (sub))
+ return true;
+ }
+ return false;
+ }
- /* Find it in the list of handlers. */
- for (x = exception_handler_labels; x; x = XEXP (x, 1))
- {
- rtx label = XEXP (x, 0);
- if (CODE_LABEL_NUMBER (label) == n)
- {
- /* If we are the last reference to the handler,
- delete it. */
- if (--LABEL_NUSES (label) == 0)
- delete_insn (label);
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note)
+ {
+ /* Calls (and trapping insns) without notes are outside any
+ exception handling region in this function. We have to
+ assume it might throw. Given that the front end and middle
+ ends mark known NOTHROW functions, this isn't so wildly
+ inaccurate. */
+ return (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn))));
+ }
+ if (INTVAL (XEXP (note, 0)) <= 0)
+ return false;
- if (optimize)
- {
- /* Remove it from the list of exception handler
- labels, if we are optimizing. If we are not, then
- leave it in the list, as we are not really going to
- remove the region. */
- *prev = XEXP (x, 1);
- XEXP (x, 1) = 0;
- XEXP (x, 0) = 0;
- }
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
- break;
- }
- prev = &XEXP (x, 1);
- }
- }
-#endif
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
}
- return insn;
-}
-/* Perform various interesting optimizations for exception handling
- code.
+ /* If the exception is caught or blocked by any containing region,
+ then it is not seen by any calling function. */
+ for (; region ; region = region->outer)
+ if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
+ return false;
- We look for empty exception regions and make them go (away). The
- jump optimization code will remove the handler if nothing else uses
- it. */
+ return true;
+}
-void
-exception_optimize ()
+/* True if nothing in this function can throw outside this function. */
+
+bool
+nothrow_function_p ()
{
rtx insn;
- int n;
- /* Remove empty regions. */
+ if (! flag_exceptions)
+ return true;
+
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- {
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
- {
- /* Since scan_region will return the NOTE_INSN_EH_REGION_END
- insn, we will indirectly skip through all the insns
- inbetween. We are also guaranteed that the value of insn
- returned will be valid, as otherwise scan_region won't
- return. */
- insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
- }
- }
+ if (can_throw_external (insn))
+ return false;
+ for (insn = current_function_epilogue_delay_list; insn;
+ insn = XEXP (insn, 1))
+ if (can_throw_external (insn))
+ return false;
+
+ return true;
}
+
-/* Various hooks for the DWARF 2 __throw routine. */
+/* Various hooks for unwind library. */
/* Do any necessary initialization to access arbitrary stack frames.
On the SPARC, this means flushing the register windows. */
@@ -2673,7 +3042,7 @@ void
expand_builtin_unwind_init ()
{
/* Set this so all the registers get saved in our frame; we need to be
- able to copy the saved values for any registers from frames we unwind. */
+ able to copy the saved values for any registers from frames we unwind. */
current_function_has_nonlocal_label = 1;
#ifdef SETUP_FRAME_ADDRESSES
@@ -2681,6 +3050,33 @@ expand_builtin_unwind_init ()
#endif
}
+rtx
+expand_builtin_eh_return_data_regno (arglist)
+ tree arglist;
+{
+ tree which = TREE_VALUE (arglist);
+ unsigned HOST_WIDE_INT iwhich;
+
+ if (TREE_CODE (which) != INTEGER_CST)
+ {
+ error ("argument of `__builtin_eh_return_regno' must be constant");
+ return constm1_rtx;
+ }
+
+ iwhich = tree_low_cst (which, 1);
+ iwhich = EH_RETURN_DATA_REGNO (iwhich);
+ if (iwhich == INVALID_REGNUM)
+ return constm1_rtx;
+
+#ifdef DWARF_FRAME_REGNUM
+ iwhich = DWARF_FRAME_REGNUM (iwhich);
+#else
+ iwhich = DBX_REGISTER_NUMBER (iwhich);
+#endif
+
+ return GEN_INT (iwhich);
+}
+
/* Given a value extracted from the return address register or stack slot,
return the actual address encoded in that value. */
@@ -2689,7 +3085,18 @@ expand_builtin_extract_return_addr (addr_tree)
tree addr_tree;
{
rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
- return eh_outer_context (addr);
+
+ /* First mask out any unwanted bits. */
+#ifdef MASK_RETURN_ADDR
+ expand_and (addr, MASK_RETURN_ADDR, addr);
+#endif
+
+ /* Then adjust to find the real return address. */
+#if defined (RETURN_ADDR_OFFSET)
+ addr = plus_constant (addr, RETURN_ADDR_OFFSET);
+#endif
+
+ return addr;
}
/* Given an actual address in addr_tree, do any necessary encoding
@@ -2700,287 +3107,759 @@ rtx
expand_builtin_frob_return_addr (addr_tree)
tree addr_tree;
{
- rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+ rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (addr) != Pmode)
+ addr = convert_memory_address (Pmode, addr);
+#endif
+
#ifdef RETURN_ADDR_OFFSET
+ addr = force_reg (Pmode, addr);
addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
#endif
+
return addr;
}
-/* Choose three registers for communication between the main body of
- __throw and the epilogue (or eh stub) and the exception handler.
- We must do this with hard registers because the epilogue itself
- will be generated after reload, at which point we may not reference
- pseudos at all.
-
- The first passes the exception context to the handler. For this
- we use the return value register for a void*.
+/* Set up the epilogue with the magic bits we'll need to return to the
+ exception handler. */
- The second holds the stack pointer value to be restored. For
- this we use the static chain register if it exists and is different
- from the previous, otherwise some arbitrary call-clobbered register.
+void
+expand_builtin_eh_return (stackadj_tree, handler_tree)
+ tree stackadj_tree, handler_tree;
+{
+ rtx stackadj, handler;
- The third holds the address of the handler itself. Here we use
- some arbitrary call-clobbered register. */
+ stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
+ handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
-static void
-eh_regs (pcontext, psp, pra, outgoing)
- rtx *pcontext, *psp, *pra;
- int outgoing;
-{
- rtx rcontext, rsp, rra;
- int i;
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (stackadj) != Pmode)
+ stackadj = convert_memory_address (Pmode, stackadj);
-#ifdef FUNCTION_OUTGOING_VALUE
- if (outgoing)
- rcontext = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
- current_function_decl);
- else
+ if (GET_MODE (handler) != Pmode)
+ handler = convert_memory_address (Pmode, handler);
#endif
- rcontext = FUNCTION_VALUE (build_pointer_type (void_type_node),
- current_function_decl);
-#ifdef STATIC_CHAIN_REGNUM
- if (outgoing)
- rsp = static_chain_incoming_rtx;
+ if (! cfun->eh->ehr_label)
+ {
+ cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
+ cfun->eh->ehr_handler = copy_to_reg (handler);
+ cfun->eh->ehr_label = gen_label_rtx ();
+ }
else
- rsp = static_chain_rtx;
- if (REGNO (rsp) == REGNO (rcontext))
-#endif /* STATIC_CHAIN_REGNUM */
- rsp = NULL_RTX;
-
- if (rsp == NULL_RTX)
{
- for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
- if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (rcontext))
- break;
- if (i == FIRST_PSEUDO_REGISTER)
- abort();
+ if (stackadj != cfun->eh->ehr_stackadj)
+ emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
+ if (handler != cfun->eh->ehr_handler)
+ emit_move_insn (cfun->eh->ehr_handler, handler);
+ }
+
+ emit_jump (cfun->eh->ehr_label);
+}
+
+void
+expand_eh_return ()
+{
+ rtx sa, ra, around_label;
+
+ if (! cfun->eh->ehr_label)
+ return;
- rsp = gen_rtx_REG (Pmode, i);
+ sa = EH_RETURN_STACKADJ_RTX;
+ if (! sa)
+ {
+ error ("__builtin_eh_return not supported on this target");
+ return;
}
- for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
- if (call_used_regs[i] && ! fixed_regs[i]
- && i != REGNO (rcontext) && i != REGNO (rsp))
- break;
- if (i == FIRST_PSEUDO_REGISTER)
- abort();
+ current_function_calls_eh_return = 1;
+
+ around_label = gen_label_rtx ();
+ emit_move_insn (sa, const0_rtx);
+ emit_jump (around_label);
+
+ emit_label (cfun->eh->ehr_label);
+ clobber_return_register ();
+
+#ifdef HAVE_eh_return
+ if (HAVE_eh_return)
+ emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
+ else
+#endif
+ {
+ ra = EH_RETURN_HANDLER_RTX;
+ if (! ra)
+ {
+ error ("__builtin_eh_return not supported on this target");
+ ra = gen_reg_rtx (Pmode);
+ }
- rra = gen_rtx_REG (Pmode, i);
+ emit_move_insn (sa, cfun->eh->ehr_stackadj);
+ emit_move_insn (ra, cfun->eh->ehr_handler);
+ }
- *pcontext = rcontext;
- *psp = rsp;
- *pra = rra;
+ emit_label (around_label);
}
+
+/* In the following functions, we represent entries in the action table
+ as 1-based indices. Special cases are:
-/* Retrieve the register which contains the pointer to the eh_context
- structure set the __throw. */
+ 0: null action record, non-null landing pad; implies cleanups
+ -1: null action record, null landing pad; implies no action
+ -2: no call-site entry; implies must_not_throw
+ -3: we have yet to process outer regions
-rtx
-get_reg_for_handler ()
+ Further, no special cases apply to the "next" field of the record.
+ For next, 0 means end of list. */
+
+struct action_record
{
- rtx reg1;
- reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
- current_function_decl);
- return reg1;
+ int offset;
+ int filter;
+ int next;
+};
+
+static int
+action_record_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
+{
+ const struct action_record *entry = (const struct action_record *) pentry;
+ const struct action_record *data = (const struct action_record *) pdata;
+ return entry->filter == data->filter && entry->next == data->next;
}
-/* Set up the epilogue with the magic bits we'll need to return to the
- exception handler. */
+static hashval_t
+action_record_hash (pentry)
+ const PTR pentry;
+{
+ const struct action_record *entry = (const struct action_record *) pentry;
+ return entry->next * 1009 + entry->filter;
+}
-void
-expand_builtin_eh_return (context, stack, handler)
- tree context, stack, handler;
+static int
+add_action_record (ar_hash, filter, next)
+ htab_t ar_hash;
+ int filter, next;
{
- if (eh_return_context)
- error("Duplicate call to __builtin_eh_return");
+ struct action_record **slot, *new, tmp;
- eh_return_context
- = copy_to_reg (expand_expr (context, NULL_RTX, VOIDmode, 0));
- eh_return_stack_adjust
- = copy_to_reg (expand_expr (stack, NULL_RTX, VOIDmode, 0));
- eh_return_handler
- = copy_to_reg (expand_expr (handler, NULL_RTX, VOIDmode, 0));
+ tmp.filter = filter;
+ tmp.next = next;
+ slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
+
+ if ((new = *slot) == NULL)
+ {
+ new = (struct action_record *) xmalloc (sizeof (*new));
+ new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
+ new->filter = filter;
+ new->next = next;
+ *slot = new;
+
+ /* The filter value goes in untouched. The link to the next
+ record is a "self-relative" byte offset, or zero to indicate
+ that there is no next record. So convert the absolute 1 based
+ indices we've been carrying around into a displacement. */
+
+ push_sleb128 (&cfun->eh->action_record_data, filter);
+ if (next)
+ next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
+ push_sleb128 (&cfun->eh->action_record_data, next);
+ }
+
+ return new->offset;
}
-void
-expand_eh_return ()
+static int
+collect_one_action_chain (ar_hash, region)
+ htab_t ar_hash;
+ struct eh_region *region;
{
- rtx reg1, reg2, reg3;
- rtx stub_start, after_stub;
- rtx ra, tmp;
+ struct eh_region *c;
+ int next;
- if (!eh_return_context)
- return;
+ /* If we've reached the top of the region chain, then we have
+ no actions, and require no landing pad. */
+ if (region == NULL)
+ return -1;
- current_function_cannot_inline = N_("function uses __builtin_eh_return");
+ switch (region->type)
+ {
+ case ERT_CLEANUP:
+ /* A cleanup adds a zero filter to the beginning of the chain, but
+ there are special cases to look out for. If there are *only*
+ cleanups along a path, then it compresses to a zero action.
+ Further, if there are multiple cleanups along a path, we only
+ need to represent one of them, as that is enough to trigger
+ entry to the landing pad at runtime. */
+ next = collect_one_action_chain (ar_hash, region->outer);
+ if (next <= 0)
+ return 0;
+ for (c = region->outer; c ; c = c->outer)
+ if (c->type == ERT_CLEANUP)
+ return next;
+ return add_action_record (ar_hash, 0, next);
+
+ case ERT_TRY:
+ /* Process the associated catch regions in reverse order.
+ If there's a catch-all handler, then we don't need to
+ search outer regions. Use a magic -3 value to record
+ that we haven't done the outer search. */
+ next = -3;
+ for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
+ {
+ if (c->u.catch.type_list == NULL)
+ {
+ /* Retrieve the filter from the head of the filter list
+ where we have stored it (see assign_filter_values). */
+ int filter
+ = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
- eh_regs (&reg1, &reg2, &reg3, 1);
-#ifdef POINTERS_EXTEND_UNSIGNED
- eh_return_context = convert_memory_address (Pmode, eh_return_context);
- eh_return_stack_adjust =
- convert_memory_address (Pmode, eh_return_stack_adjust);
- eh_return_handler = convert_memory_address (Pmode, eh_return_handler);
-#endif
- emit_move_insn (reg1, eh_return_context);
- emit_move_insn (reg2, eh_return_stack_adjust);
- emit_move_insn (reg3, eh_return_handler);
+ next = add_action_record (ar_hash, filter, 0);
+ }
+ else
+ {
+ /* Once the outer search is done, trigger an action record for
+ each filter we have. */
+ tree flt_node;
+
+ if (next == -3)
+ {
+ next = collect_one_action_chain (ar_hash, region->outer);
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
+ next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+ }
- /* Talk directly to the target's epilogue code when possible. */
+ flt_node = c->u.catch.filter_list;
+ for (; flt_node; flt_node = TREE_CHAIN (flt_node))
+ {
+ int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
+ next = add_action_record (ar_hash, filter, next);
+ }
+ }
+ }
+ return next;
+
+ case ERT_ALLOWED_EXCEPTIONS:
+ /* An exception specification adds its filter to the
+ beginning of the chain. */
+ next = collect_one_action_chain (ar_hash, region->outer);
+ return add_action_record (ar_hash, region->u.allowed.filter,
+ next < 0 ? 0 : next);
+
+ case ERT_MUST_NOT_THROW:
+ /* A must-not-throw region with no inner handlers or cleanups
+ requires no call-site entry. Note that this differs from
+ the no handler or cleanup case in that we do require an lsda
+ to be generated. Return a magic -2 value to record this. */
+ return -2;
+
+ case ERT_CATCH:
+ case ERT_THROW:
+ /* CATCH regions are handled in TRY above. THROW regions are
+ for optimization information only and produce no output. */
+ return collect_one_action_chain (ar_hash, region->outer);
+
+ default:
+ abort ();
+ }
+}
-#ifdef HAVE_eh_epilogue
- if (HAVE_eh_epilogue)
+static int
+add_call_site (landing_pad, action)
+ rtx landing_pad;
+ int action;
+{
+ struct call_site_record *data = cfun->eh->call_site_data;
+ int used = cfun->eh->call_site_data_used;
+ int size = cfun->eh->call_site_data_size;
+
+ if (used >= size)
{
- emit_insn (gen_eh_epilogue (reg1, reg2, reg3));
- return;
+ size = (size ? size * 2 : 64);
+ data = (struct call_site_record *)
+ xrealloc (data, sizeof (*data) * size);
+ cfun->eh->call_site_data = data;
+ cfun->eh->call_site_data_size = size;
}
-#endif
- /* Otherwise, use the same stub technique we had before. */
+ data[used].landing_pad = landing_pad;
+ data[used].action = action;
- eh_return_stub_label = stub_start = gen_label_rtx ();
- after_stub = gen_label_rtx ();
+ cfun->eh->call_site_data_used = used + 1;
- /* Set the return address to the stub label. */
+ return used + call_site_base;
+}
- ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
- 0, hard_frame_pointer_rtx);
- if (GET_CODE (ra) == REG && REGNO (ra) >= FIRST_PSEUDO_REGISTER)
- abort();
+/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
+ The new note numbers will not refer to region numbers, but
+ instead to call site entries. */
- tmp = memory_address (Pmode, gen_rtx_LABEL_REF (Pmode, stub_start));
-#ifdef RETURN_ADDR_OFFSET
- tmp = plus_constant (tmp, -RETURN_ADDR_OFFSET);
-#endif
- tmp = force_operand (tmp, ra);
- if (tmp != ra)
- emit_move_insn (ra, tmp);
+void
+convert_to_eh_region_ranges ()
+{
+ rtx insn, iter, note;
+ htab_t ar_hash;
+ int last_action = -3;
+ rtx last_action_insn = NULL_RTX;
+ rtx last_landing_pad = NULL_RTX;
+ rtx first_no_action_insn = NULL_RTX;
+ int call_site = 0;
+
+ if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
+ return;
+
+ VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
+
+ ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
+
+ for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
+ if (INSN_P (iter))
+ {
+ struct eh_region *region;
+ int this_action;
+ rtx this_landing_pad;
+
+ insn = iter;
+ if (GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
+
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note)
+ {
+ if (! (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn)))))
+ continue;
+ this_action = -1;
+ region = NULL;
+ }
+ else
+ {
+ if (INTVAL (XEXP (note, 0)) <= 0)
+ continue;
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ this_action = collect_one_action_chain (ar_hash, region);
+ }
- /* Indicate that the registers are in fact used. */
- emit_insn (gen_rtx_USE (VOIDmode, reg1));
- emit_insn (gen_rtx_USE (VOIDmode, reg2));
- emit_insn (gen_rtx_USE (VOIDmode, reg3));
- if (GET_CODE (ra) == REG)
- emit_insn (gen_rtx_USE (VOIDmode, ra));
+ /* Existence of catch handlers, or must-not-throw regions
+ implies that an lsda is needed (even if empty). */
+ if (this_action != -1)
+ cfun->uses_eh_lsda = 1;
- /* Generate the stub. */
+ /* Delay creation of region notes for no-action regions
+ until we're sure that an lsda will be required. */
+ else if (last_action == -3)
+ {
+ first_no_action_insn = iter;
+ last_action = -1;
+ }
- emit_jump (after_stub);
- emit_label (stub_start);
+ /* Cleanups and handlers may share action chains but not
+ landing pads. Collect the landing pad for this region. */
+ if (this_action >= 0)
+ {
+ struct eh_region *o;
+ for (o = region; ! o->landing_pad ; o = o->outer)
+ continue;
+ this_landing_pad = o->landing_pad;
+ }
+ else
+ this_landing_pad = NULL_RTX;
+
+ /* Differing actions or landing pads implies a change in call-site
+ info, which implies some EH_REGION note should be emitted. */
+ if (last_action != this_action
+ || last_landing_pad != this_landing_pad)
+ {
+ /* If we'd not seen a previous action (-3) or the previous
+ action was must-not-throw (-2), then we do not need an
+ end note. */
+ if (last_action >= -1)
+ {
+ /* If we delayed the creation of the begin, do it now. */
+ if (first_no_action_insn)
+ {
+ call_site = add_call_site (NULL_RTX, 0);
+ note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
+ first_no_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
+ first_no_action_insn = NULL_RTX;
+ }
+
+ note = emit_note_after (NOTE_INSN_EH_REGION_END,
+ last_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
+ }
+
+ /* If the new action is must-not-throw, then no region notes
+ are created. */
+ if (this_action >= -1)
+ {
+ call_site = add_call_site (this_landing_pad,
+ this_action < 0 ? 0 : this_action);
+ note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
+ NOTE_EH_HANDLER (note) = call_site;
+ }
+
+ last_action = this_action;
+ last_landing_pad = this_landing_pad;
+ }
+ last_action_insn = iter;
+ }
- eh_regs (&reg1, &reg2, &reg3, 0);
- adjust_stack (reg2);
- emit_indirect_jump (reg3);
+ if (last_action >= -1 && ! first_no_action_insn)
+ {
+ note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
+ }
- emit_label (after_stub);
+ htab_delete (ar_hash);
}
+
+static void
+push_uleb128 (data_area, value)
+ varray_type *data_area;
+ unsigned int value;
+{
+ do
+ {
+ unsigned char byte = value & 0x7f;
+ value >>= 7;
+ if (value)
+ byte |= 0x80;
+ VARRAY_PUSH_UCHAR (*data_area, byte);
+ }
+ while (value);
+}
-/* This contains the code required to verify whether arbitrary instructions
- are in the same exception region. */
+static void
+push_sleb128 (data_area, value)
+ varray_type *data_area;
+ int value;
+{
+ unsigned char byte;
+ int more;
-static int *insn_eh_region = (int *)0;
-static int maximum_uid;
+ do
+ {
+ byte = value & 0x7f;
+ value >>= 7;
+ more = ! ((value == 0 && (byte & 0x40) == 0)
+ || (value == -1 && (byte & 0x40) != 0));
+ if (more)
+ byte |= 0x80;
+ VARRAY_PUSH_UCHAR (*data_area, byte);
+ }
+ while (more);
+}
-static void
-set_insn_eh_region (first, region_num)
- rtx *first;
- int region_num;
+
+#ifndef HAVE_AS_LEB128
+static int
+dw2_size_of_call_site_table ()
{
- rtx insn;
- int rnum;
+ int n = cfun->eh->call_site_data_used;
+ int size = n * (4 + 4 + 4);
+ int i;
- for (insn = *first; insn; insn = NEXT_INSN (insn))
+ for (i = 0; i < n; ++i)
{
- if ((GET_CODE (insn) == NOTE) &&
- (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG))
- {
- rnum = NOTE_BLOCK_NUMBER (insn);
- insn_eh_region[INSN_UID (insn)] = rnum;
- insn = NEXT_INSN (insn);
- set_insn_eh_region (&insn, rnum);
- /* Upon return, insn points to the EH_REGION_END of nested region */
- continue;
- }
- insn_eh_region[INSN_UID (insn)] = region_num;
- if ((GET_CODE (insn) == NOTE) &&
- (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
- break;
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ size += size_of_uleb128 (cs->action);
}
- *first = insn;
+
+ return size;
}
-/* Free the insn table, an make sure it cannot be used again. */
+static int
+sjlj_size_of_call_site_table ()
+{
+ int n = cfun->eh->call_site_data_used;
+ int size = 0;
+ int i;
-void
-free_insn_eh_region ()
+ for (i = 0; i < n; ++i)
+ {
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ size += size_of_uleb128 (INTVAL (cs->landing_pad));
+ size += size_of_uleb128 (cs->action);
+ }
+
+ return size;
+}
+#endif
+
+static void
+dw2_output_call_site_table ()
{
- if (!doing_eh (0))
- return;
+ const char *const function_start_lab
+ = IDENTIFIER_POINTER (current_function_func_begin_label);
+ int n = cfun->eh->call_site_data_used;
+ int i;
- if (insn_eh_region)
+ for (i = 0; i < n; ++i)
{
- free (insn_eh_region);
- insn_eh_region = (int *)0;
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ char reg_start_lab[32];
+ char reg_end_lab[32];
+ char landing_pad_lab[32];
+
+ ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
+ ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
+
+ if (cs->landing_pad)
+ ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
+ CODE_LABEL_NUMBER (cs->landing_pad));
+
+ /* ??? Perhaps use insn length scaling if the assembler supports
+ generic arithmetic. */
+ /* ??? Perhaps use attr_length to choose data1 or data2 instead of
+ data4 if the function is small enough. */
+#ifdef HAVE_AS_LEB128
+ dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
+ "region %d start", i);
+ dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
+ "length");
+ if (cs->landing_pad)
+ dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
+ "landing pad");
+ else
+ dw2_asm_output_data_uleb128 (0, "landing pad");
+#else
+ dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
+ "region %d start", i);
+ dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
+ if (cs->landing_pad)
+ dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
+ "landing pad");
+ else
+ dw2_asm_output_data (4, 0, "landing pad");
+#endif
+ dw2_asm_output_data_uleb128 (cs->action, "action");
}
+
+ call_site_base += n;
}
-/* Initialize the table. max_uid must be calculated and handed into
- this routine. If it is unavailable, passing a value of 0 will
- cause this routine to calculate it as well. */
+static void
+sjlj_output_call_site_table ()
+{
+ int n = cfun->eh->call_site_data_used;
+ int i;
-void
-init_insn_eh_region (first, max_uid)
- rtx first;
- int max_uid;
+ for (i = 0; i < n; ++i)
+ {
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+
+ dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
+ "region %d landing pad", i);
+ dw2_asm_output_data_uleb128 (cs->action, "action");
+ }
+
+ call_site_base += n;
+}
+
+void
+output_function_exception_table ()
{
- rtx insn;
+ int tt_format, cs_format, lp_format, i, n;
+#ifdef HAVE_AS_LEB128
+ char ttype_label[32];
+ char cs_after_size_label[32];
+ char cs_end_label[32];
+#else
+ int call_site_len;
+#endif
+ int have_tt_data;
+ int funcdef_number;
+ int tt_format_size = 0;
- if (!doing_eh (0))
+ /* Not all functions need anything. */
+ if (! cfun->uses_eh_lsda)
return;
- if (insn_eh_region)
- free_insn_eh_region();
+ funcdef_number = (USING_SJLJ_EXCEPTIONS
+ ? sjlj_funcdef_number
+ : current_funcdef_number);
- if (max_uid == 0)
- for (insn = first; insn; insn = NEXT_INSN (insn))
- if (INSN_UID (insn) > max_uid) /* find largest UID */
- max_uid = INSN_UID (insn);
+#ifdef IA64_UNWIND_INFO
+ fputs ("\t.personality\t", asm_out_file);
+ output_addr_const (asm_out_file, eh_personality_libfunc);
+ fputs ("\n\t.handlerdata\n", asm_out_file);
+ /* Note that varasm still thinks we're in the function's code section.
+ The ".endp" directive that will immediately follow will take us back. */
+#else
+ (*targetm.asm_out.exception_section) ();
+#endif
- maximum_uid = max_uid;
- insn_eh_region = (int *) malloc ((max_uid + 1) * sizeof (int));
- insn = first;
- set_insn_eh_region (&insn, 0);
-}
+ have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
+ || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
+ /* Indicate the format of the @TType entries. */
+ if (! have_tt_data)
+ tt_format = DW_EH_PE_omit;
+ else
+ {
+ tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+#ifdef HAVE_AS_LEB128
+ ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
+#endif
+ tt_format_size = size_of_encoded_value (tt_format);
-/* Check whether 2 instructions are within the same region. */
+ assemble_align (tt_format_size * BITS_PER_UNIT);
+ }
-int
-in_same_eh_region (insn1, insn2)
- rtx insn1, insn2;
-{
- int ret, uid1, uid2;
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
- /* If no exceptions, instructions are always in same region. */
- if (!doing_eh (0))
- return 1;
+ /* The LSDA header. */
- /* If the table isn't allocated, assume the worst. */
- if (!insn_eh_region)
- return 0;
+ /* Indicate the format of the landing pad start pointer. An omitted
+ field implies @LPStart == @Start. */
+ /* Currently we always put @LPStart == @Start. This field would
+ be most useful in moving the landing pads completely out of
+ line to another section, but it could also be used to minimize
+ the size of uleb128 landing pad offsets. */
+ lp_format = DW_EH_PE_omit;
+ dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
+ eh_data_format_name (lp_format));
- uid1 = INSN_UID (insn1);
- uid2 = INSN_UID (insn2);
+ /* @LPStart pointer would go here. */
- /* if instructions have been allocated beyond the end, either
- the table is out of date, or this is a late addition, or
- something... Assume the worst. */
- if (uid1 > maximum_uid || uid2 > maximum_uid)
- return 0;
+ dw2_asm_output_data (1, tt_format, "@TType format (%s)",
+ eh_data_format_name (tt_format));
- ret = (insn_eh_region[uid1] == insn_eh_region[uid2]);
- return ret;
-}
+#ifndef HAVE_AS_LEB128
+ if (USING_SJLJ_EXCEPTIONS)
+ call_site_len = sjlj_size_of_call_site_table ();
+ else
+ call_site_len = dw2_size_of_call_site_table ();
+#endif
+
+ /* A pc-relative 4-byte displacement to the @TType data. */
+ if (have_tt_data)
+ {
+#ifdef HAVE_AS_LEB128
+ char ttype_after_disp_label[32];
+ ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
+ funcdef_number);
+ dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
+ "@TType base offset");
+ ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
+#else
+ /* Ug. Alignment queers things. */
+ unsigned int before_disp, after_disp, last_disp, disp;
+
+ before_disp = 1 + 1;
+ after_disp = (1 + size_of_uleb128 (call_site_len)
+ + call_site_len
+ + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
+ + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
+ * tt_format_size));
+
+ disp = after_disp;
+ do
+ {
+ unsigned int disp_size, pad;
+
+ last_disp = disp;
+ disp_size = size_of_uleb128 (disp);
+ pad = before_disp + disp_size + after_disp;
+ if (pad % tt_format_size)
+ pad = tt_format_size - (pad % tt_format_size);
+ else
+ pad = 0;
+ disp = after_disp + pad;
+ }
+ while (disp != last_disp);
+
+ dw2_asm_output_data_uleb128 (disp, "@TType base offset");
+#endif
+ }
+
+ /* Indicate the format of the call-site offsets. */
+#ifdef HAVE_AS_LEB128
+ cs_format = DW_EH_PE_uleb128;
+#else
+ cs_format = DW_EH_PE_udata4;
+#endif
+ dw2_asm_output_data (1, cs_format, "call-site format (%s)",
+ eh_data_format_name (cs_format));
+
+#ifdef HAVE_AS_LEB128
+ ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
+ funcdef_number);
+ ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
+ funcdef_number);
+ dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
+ "Call-site table length");
+ ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_output_call_site_table ();
+ else
+ dw2_output_call_site_table ();
+ ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
+#else
+ dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_output_call_site_table ();
+ else
+ dw2_output_call_site_table ();
+#endif
+
+ /* ??? Decode and interpret the data for flag_debug_asm. */
+ n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
+ for (i = 0; i < n; ++i)
+ dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
+ (i ? NULL : "Action record table"));
+
+ if (have_tt_data)
+ assemble_align (tt_format_size * BITS_PER_UNIT);
+
+ i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
+ while (i-- > 0)
+ {
+ tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
+ rtx value;
+
+ if (type == NULL_TREE)
+ type = integer_zero_node;
+ else
+ type = lookup_type_for_runtime (type);
+ value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
+ if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
+ assemble_integer (value, tt_format_size,
+ tt_format_size * BITS_PER_UNIT, 1);
+ else
+ dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
+ }
+
+#ifdef HAVE_AS_LEB128
+ if (have_tt_data)
+ ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
+#endif
+
+ /* ??? Decode and interpret the data for flag_debug_asm. */
+ n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
+ for (i = 0; i < n; ++i)
+ dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
+ (i ? NULL : "Exception specification table"));
+
+ function_section (current_function_decl);
+
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_funcdef_number += 1;
+}
OpenPOWER on IntegriCloud