summaryrefslogtreecommitdiffstats
path: root/contrib/gcc/tree-inline.c
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/gcc/tree-inline.c')
-rw-r--r--contrib/gcc/tree-inline.c3494
1 files changed, 2170 insertions, 1324 deletions
diff --git a/contrib/gcc/tree-inline.c b/contrib/gcc/tree-inline.c
index ccc49e7..1c0b79b 100644
--- a/contrib/gcc/tree-inline.c
+++ b/contrib/gcc/tree-inline.c
@@ -1,5 +1,5 @@
-/* Control and data flow functions for trees.
- Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+/* Tree inlining.
+ Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
Contributed by Alexandre Oliva <aoliva@redhat.com>
This file is part of GCC.
@@ -16,8 +16,8 @@ GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
@@ -32,23 +32,59 @@ Boston, MA 02111-1307, USA. */
#include "params.h"
#include "input.h"
#include "insn-config.h"
-#include "integrate.h"
#include "varray.h"
#include "hashtab.h"
-#include "splay-tree.h"
#include "langhooks.h"
+#include "basic-block.h"
+#include "tree-iterator.h"
#include "cgraph.h"
#include "intl.h"
+#include "tree-mudflap.h"
+#include "tree-flow.h"
+#include "function.h"
+#include "ggc.h"
+#include "tree-flow.h"
#include "diagnostic.h"
-
-/* This should be eventually be generalized to other languages, but
- this would require a shared function-as-trees infrastructure. */
-#ifndef INLINER_FOR_JAVA
-#include "c-common.h"
-#else /* INLINER_FOR_JAVA */
-#include "parse.h"
-#include "java-tree.h"
-#endif /* INLINER_FOR_JAVA */
+#include "except.h"
+#include "debug.h"
+#include "pointer-set.h"
+#include "ipa-prop.h"
+
+/* I'm not real happy about this, but we need to handle gimple and
+ non-gimple trees. */
+#include "tree-gimple.h"
+
+/* Inlining, Cloning, Versioning, Parallelization
+
+ Inlining: a function body is duplicated, but the PARM_DECLs are
+ remapped into VAR_DECLs, and non-void RETURN_EXPRs become
+ MODIFY_EXPRs that store to a dedicated returned-value variable.
+ The duplicated eh_region info of the copy will later be appended
+ to the info for the caller; the eh_region info in copied throwing
+ statements and RESX_EXPRs is adjusted accordingly.
+
+ Cloning: (only in C++) We have one body for a con/de/structor, and
+ multiple function decls, each with a unique parameter list.
+ Duplicate the body, using the given splay tree; some parameters
+ will become constants (like 0 or 1).
+
+ Versioning: a function body is duplicated and the result is a new
+ function rather than into blocks of an existing function as with
+ inlining. Some parameters will become constants.
+
+ Parallelization: a region of a function is duplicated resulting in
+ a new function. Variables may be replaced with complex expressions
+ to enable shared variable semantics.
+
+ All of these will simultaneously lookup any callgraph edges. If
+ we're going to inline the duplicated function body, and the given
+ function has some cloned callgraph nodes (one for each place this
+ function will be inlined) those callgraph edges will be duplicated.
+ If we're cloning the body, those callgraph edges will be
+ updated to point into the new body. (Note that the original
+ callgraph node and edge list will not be altered.)
+
+ See the CALL_EXPR handling case in copy_body_r (). */
/* 0 if we should not perform inlining.
1 if we should expand functions calls inline at the tree level.
@@ -69,136 +105,96 @@ int flag_inline_trees = 0;
o Provide heuristics to clamp inlining of recursive template
calls? */
-/* Data required for function inlining. */
-
-typedef struct inline_data
-{
- /* A stack of the functions we are inlining. For example, if we are
- compiling `f', which calls `g', which calls `h', and we are
- inlining the body of `h', the stack will contain, `h', followed
- by `g', followed by `f'. The first few elements of the stack may
- contain other functions that we know we should not recurse into,
- even though they are not directly being inlined. */
- varray_type fns;
- /* The index of the first element of FNS that really represents an
- inlined function. */
- unsigned first_inlined_fn;
- /* The label to jump to when a return statement is encountered. If
- this value is NULL, then return statements will simply be
- remapped as return statements, rather than as jumps. */
- tree ret_label;
- /* The map from local declarations in the inlined function to
- equivalents in the function into which it is being inlined. */
- splay_tree decl_map;
- /* Nonzero if we are currently within the cleanup for a
- TARGET_EXPR. */
- int in_target_cleanup_p;
- /* A list of the functions current function has inlined. */
- varray_type inlined_fns;
- /* We use the same mechanism to build clones that we do to perform
- inlining. However, there are a few places where we need to
- distinguish between those two situations. This flag is true if
- we are cloning, rather than inlining. */
- bool cloning_p;
- /* Hash table used to prevent walk_tree from visiting the same node
- umpteen million times. */
- htab_t tree_pruner;
- /* Decl of function we are inlining into. */
- tree decl;
- tree current_decl;
-} inline_data;
-
/* Prototypes. */
-static tree declare_return_variable (inline_data *, tree, tree *);
-static tree copy_body_r (tree *, int *, void *);
-static tree copy_body (inline_data *);
-static tree expand_call_inline (tree *, int *, void *);
-static void expand_calls_inline (tree *, inline_data *);
+static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
+static tree copy_generic_body (copy_body_data *);
static bool inlinable_function_p (tree);
-static tree remap_decl (tree, inline_data *);
-static tree remap_type (tree, inline_data *);
-#ifndef INLINER_FOR_JAVA
-static tree initialize_inlined_parameters (inline_data *, tree, tree);
-static void remap_block (tree, tree, inline_data *);
-static void copy_scope_stmt (tree *, int *, inline_data *);
-#else /* INLINER_FOR_JAVA */
-static tree initialize_inlined_parameters (inline_data *, tree, tree, tree);
-static void remap_block (tree *, tree, inline_data *);
-static tree add_stmt_to_compound (tree, tree, tree);
-#endif /* INLINER_FOR_JAVA */
+static void remap_block (tree *, copy_body_data *);
+static tree remap_decls (tree, copy_body_data *);
+static void copy_bind_expr (tree *, int *, copy_body_data *);
+static tree mark_local_for_remap_r (tree *, int *, void *);
+static void unsave_expr_1 (tree);
+static tree unsave_r (tree *, int *, void *);
+static void declare_inline_vars (tree, tree);
+static void remap_save_expr (tree *, void *, int *);
+static void add_lexical_block (tree current_block, tree new_block);
+static tree copy_decl_to_var (tree, copy_body_data *);
+static tree copy_result_decl_to_var (tree, copy_body_data *);
+static tree copy_decl_no_change (tree, copy_body_data *);
+static tree copy_decl_maybe_to_var (tree, copy_body_data *);
+
+/* Insert a tree->tree mapping for ID. Despite the name suggests
+ that the trees should be variables, it is used for more than that. */
+
+void
+insert_decl_map (copy_body_data *id, tree key, tree value)
+{
+ splay_tree_insert (id->decl_map, (splay_tree_key) key,
+ (splay_tree_value) value);
+
+ /* Always insert an identity map as well. If we see this same new
+ node again, we won't want to duplicate it a second time. */
+ if (key != value)
+ splay_tree_insert (id->decl_map, (splay_tree_key) value,
+ (splay_tree_value) value);
+}
/* Remap DECL during the copying of the BLOCK tree for the function. */
-static tree
-remap_decl (tree decl, inline_data *id)
+tree
+remap_decl (tree decl, copy_body_data *id)
{
splay_tree_node n;
tree fn;
/* We only remap local variables in the current function. */
- fn = VARRAY_TOP_TREE (id->fns);
- if (! (*lang_hooks.tree_inlining.auto_var_in_fn_p) (decl, fn))
- return NULL_TREE;
+ fn = id->src_fn;
/* See if we have remapped this declaration. */
+
n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
/* If we didn't already have an equivalent for this declaration,
create one now. */
if (!n)
{
- tree t;
-
/* Make a copy of the variable or label. */
- t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
+ tree t = id->copy_decl (decl, id);
+
+ /* Remember it, so that if we encounter this local entity again
+ we can reuse this copy. Do this early because remap_type may
+ need this decl for TYPE_STUB_DECL. */
+ insert_decl_map (id, decl, t);
+
+ if (!DECL_P (t))
+ return t;
/* Remap types, if necessary. */
TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
if (TREE_CODE (t) == TYPE_DECL)
DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
- else if (TREE_CODE (t) == PARM_DECL)
- DECL_ARG_TYPE_AS_WRITTEN (t)
- = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
/* Remap sizes as necessary. */
walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
-#ifndef INLINER_FOR_JAVA
- if (! DECL_NAME (t) && TREE_TYPE (t)
- && (*lang_hooks.tree_inlining.anon_aggr_type_p) (TREE_TYPE (t)))
+ /* If fields, do likewise for offset and qualifier. */
+ if (TREE_CODE (t) == FIELD_DECL)
{
- /* For a VAR_DECL of anonymous type, we must also copy the
- member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
- tree members = NULL;
- tree src;
-
- for (src = DECL_ANON_UNION_ELEMS (t); src;
- src = TREE_CHAIN (src))
- {
- tree member = remap_decl (TREE_VALUE (src), id);
-
- if (TREE_PURPOSE (src))
- abort ();
- members = tree_cons (NULL, member, members);
- }
- DECL_ANON_UNION_ELEMS (t) = nreverse (members);
+ walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
+ if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
+ walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
}
-#endif /* not INLINER_FOR_JAVA */
- /* Remember it, so that if we encounter this local entity
- again we can reuse this copy. */
- n = splay_tree_insert (id->decl_map,
- (splay_tree_key) decl,
- (splay_tree_value) t);
+ return t;
}
- return (tree) n->value;
+ return unshare_expr ((tree) n->value);
}
static tree
-remap_type (tree type, inline_data *id)
+remap_type_1 (tree type, copy_body_data *id)
{
splay_tree_node node;
tree new, t;
@@ -212,17 +208,35 @@ remap_type (tree type, inline_data *id)
return (tree) node->value;
/* The type only needs remapping if it's variably modified. */
- if (! variably_modified_type_p (type))
+ if (! variably_modified_type_p (type, id->src_fn))
{
- splay_tree_insert (id->decl_map, (splay_tree_key) type,
- (splay_tree_value) type);
+ insert_decl_map (id, type, type);
return type;
}
-
- /* We do need a copy. build and register it now. */
- new = copy_node (type);
- splay_tree_insert (id->decl_map, (splay_tree_key) type,
- (splay_tree_value) new);
+
+ /* We do need a copy. build and register it now. If this is a pointer or
+ reference type, remap the designated type and make a new pointer or
+ reference type. */
+ if (TREE_CODE (type) == POINTER_TYPE)
+ {
+ new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
+ TYPE_MODE (type),
+ TYPE_REF_CAN_ALIAS_ALL (type));
+ insert_decl_map (id, type, new);
+ return new;
+ }
+ else if (TREE_CODE (type) == REFERENCE_TYPE)
+ {
+ new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
+ TYPE_MODE (type),
+ TYPE_REF_CAN_ALIAS_ALL (type));
+ insert_decl_map (id, type, new);
+ return new;
+ }
+ else
+ new = copy_node (type);
+
+ insert_decl_map (id, type, new);
/* This is a new type, not a copy of an old type. Need to reassociate
variants. We can handle everything except the main variant lazily. */
@@ -240,6 +254,9 @@ remap_type (tree type, inline_data *id)
TYPE_NEXT_VARIANT (new) = NULL;
}
+ if (TYPE_STUB_DECL (type))
+ TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
+
/* Lazily create pointer and reference types. */
TYPE_POINTER_TO (new) = NULL;
TYPE_REFERENCE_TO (new) = NULL;
@@ -250,28 +267,15 @@ remap_type (tree type, inline_data *id)
case REAL_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
- case CHAR_TYPE:
t = TYPE_MIN_VALUE (new);
if (t && TREE_CODE (t) != INTEGER_CST)
walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
+
t = TYPE_MAX_VALUE (new);
if (t && TREE_CODE (t) != INTEGER_CST)
walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
return new;
-
- case POINTER_TYPE:
- TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
- if (TYPE_MODE (new) == ptr_mode)
- TYPE_POINTER_TO (t) = new;
- return new;
-
- case REFERENCE_TYPE:
- TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
- if (TYPE_MODE (new) == ptr_mode)
- TYPE_REFERENCE_TO (t) = new;
- return new;
- case METHOD_TYPE:
case FUNCTION_TYPE:
TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
@@ -285,15 +289,24 @@ remap_type (tree type, inline_data *id)
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
- walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
+ {
+ tree f, nf = NULL;
+
+ for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
+ {
+ t = remap_decl (f, id);
+ DECL_CONTEXT (t) = new;
+ TREE_CHAIN (t) = nf;
+ nf = t;
+ }
+ TYPE_FIELDS (new) = nreverse (nf);
+ }
break;
- case FILE_TYPE:
- case SET_TYPE:
case OFFSET_TYPE:
default:
/* Shouldn't have been thought variable sized. */
- abort ();
+ gcc_unreachable ();
}
walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
@@ -302,117 +315,78 @@ remap_type (tree type, inline_data *id)
return new;
}
-#ifndef INLINER_FOR_JAVA
-/* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
- remapped versions of the variables therein. And hook the new block
- into the block-tree. If non-NULL, the DECLS are declarations to
- add to use instead of the BLOCK_VARS in the old block. */
-#else /* INLINER_FOR_JAVA */
-/* Copy the BLOCK to contain remapped versions of the variables
- therein. And hook the new block into the block-tree. */
-#endif /* INLINER_FOR_JAVA */
-
-static void
-#ifndef INLINER_FOR_JAVA
-remap_block (tree scope_stmt, tree decls, inline_data *id)
-#else /* INLINER_FOR_JAVA */
-remap_block (tree *block, tree decls, inline_data *id)
-#endif /* INLINER_FOR_JAVA */
+tree
+remap_type (tree type, copy_body_data *id)
{
-#ifndef INLINER_FOR_JAVA
- /* We cannot do this in the cleanup for a TARGET_EXPR since we do
- not know whether or not expand_expr will actually write out the
- code we put there. If it does not, then we'll have more BLOCKs
- than block-notes, and things will go awry. At some point, we
- should make the back-end handle BLOCK notes in a tidier way,
- without requiring a strict correspondence to the block-tree; then
- this check can go. */
- if (id->in_target_cleanup_p)
+ splay_tree_node node;
+
+ if (type == NULL)
+ return type;
+
+ /* See if we have remapped this type. */
+ node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
+ if (node)
+ return (tree) node->value;
+
+ /* The type only needs remapping if it's variably modified. */
+ if (! variably_modified_type_p (type, id->src_fn))
{
- SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
- return;
+ insert_decl_map (id, type, type);
+ return type;
}
- /* If this is the beginning of a scope, remap the associated BLOCK. */
- if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
+ return remap_type_1 (type, id);
+}
+
+static tree
+remap_decls (tree decls, copy_body_data *id)
+{
+ tree old_var;
+ tree new_decls = NULL_TREE;
+
+ /* Remap its variables. */
+ for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
{
- tree old_block;
- tree new_block;
- tree old_var;
- tree fn;
-
- /* Make the new block. */
- old_block = SCOPE_STMT_BLOCK (scope_stmt);
- new_block = make_node (BLOCK);
- TREE_USED (new_block) = TREE_USED (old_block);
- BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
- SCOPE_STMT_BLOCK (scope_stmt) = new_block;
-
- /* Remap its variables. */
- for (old_var = decls ? decls : BLOCK_VARS (old_block);
- old_var;
- old_var = TREE_CHAIN (old_var))
+ tree new_var;
+
+ /* We can not chain the local static declarations into the unexpanded_var_list
+ as we can't duplicate them or break one decl rule. Go ahead and link
+ them into unexpanded_var_list. */
+ if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
+ && !DECL_EXTERNAL (old_var))
{
- tree new_var;
-
- /* Remap the variable. */
- new_var = remap_decl (old_var, id);
- /* If we didn't remap this variable, so we can't mess with
- its TREE_CHAIN. If we remapped this variable to
- something other than a declaration (say, if we mapped it
- to a constant), then we must similarly omit any mention
- of it here. */
- if (!new_var || !DECL_P (new_var))
- ;
- else
- {
- TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
- BLOCK_VARS (new_block) = new_var;
- }
+ cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
+ cfun->unexpanded_var_list);
+ continue;
}
- /* We put the BLOCK_VARS in reverse order; fix that now. */
- BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
- fn = VARRAY_TREE (id->fns, 0);
- if (id->cloning_p)
- /* We're building a clone; DECL_INITIAL is still
- error_mark_node, and current_binding_level is the parm
- binding level. */
- (*lang_hooks.decls.insert_block) (new_block);
+
+ /* Remap the variable. */
+ new_var = remap_decl (old_var, id);
+
+ /* If we didn't remap this variable, so we can't mess with its
+ TREE_CHAIN. If we remapped this variable to the return slot, it's
+ already declared somewhere else, so don't declare it here. */
+ if (!new_var || new_var == id->retvar)
+ ;
else
{
- /* Attach this new block after the DECL_INITIAL block for the
- function into which this block is being inlined. In
- rest_of_compilation we will straighten out the BLOCK tree. */
- tree *first_block;
- if (DECL_INITIAL (fn))
- first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
- else
- first_block = &DECL_INITIAL (fn);
- BLOCK_CHAIN (new_block) = *first_block;
- *first_block = new_block;
+ gcc_assert (DECL_P (new_var));
+ TREE_CHAIN (new_var) = new_decls;
+ new_decls = new_var;
}
- /* Remember the remapped block. */
- splay_tree_insert (id->decl_map,
- (splay_tree_key) old_block,
- (splay_tree_value) new_block);
}
- /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
- remapped block. */
- else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
- {
- splay_tree_node n;
-
- /* Find this block in the table of remapped things. */
- n = splay_tree_lookup (id->decl_map,
- (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
- if (! n)
- abort ();
- SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
- }
-#else /* INLINER_FOR_JAVA */
+
+ return nreverse (new_decls);
+}
+
+/* Copy the BLOCK to contain remapped versions of the variables
+ therein. And hook the new block into the block-tree. */
+
+static void
+remap_block (tree *block, copy_body_data *id)
+{
tree old_block;
tree new_block;
- tree old_var;
tree fn;
/* Make the new block. */
@@ -420,225 +394,172 @@ remap_block (tree *block, tree decls, inline_data *id)
new_block = make_node (BLOCK);
TREE_USED (new_block) = TREE_USED (old_block);
BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
- BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (old_block);
- TREE_SIDE_EFFECTS (new_block) = TREE_SIDE_EFFECTS (old_block);
- TREE_TYPE (new_block) = TREE_TYPE (old_block);
+ BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
*block = new_block;
/* Remap its variables. */
- for (old_var = decls ? decls : BLOCK_VARS (old_block);
- old_var;
- old_var = TREE_CHAIN (old_var))
- {
- tree new_var;
+ BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
- /* All local class initialization flags go in the outermost
- scope. */
- if (LOCAL_CLASS_INITIALIZATION_FLAG_P (old_var))
- {
- /* We may already have one. */
- if (! splay_tree_lookup (id->decl_map, (splay_tree_key) old_var))
- {
- tree outermost_block;
- new_var = remap_decl (old_var, id);
- DECL_ABSTRACT_ORIGIN (new_var) = NULL;
- outermost_block = DECL_SAVED_TREE (current_function_decl);
- TREE_CHAIN (new_var) = BLOCK_VARS (outermost_block);
- BLOCK_VARS (outermost_block) = new_var;
- }
- continue;
- }
+ fn = id->dst_fn;
+
+ if (id->transform_lang_insert_block)
+ lang_hooks.decls.insert_block (new_block);
- /* Remap the variable. */
- new_var = remap_decl (old_var, id);
- /* If we didn't remap this variable, so we can't mess with
- its TREE_CHAIN. If we remapped this variable to
- something other than a declaration (say, if we mapped it
- to a constant), then we must similarly omit any mention
- of it here. */
- if (!new_var || !DECL_P (new_var))
- ;
- else
- {
- TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
- BLOCK_VARS (new_block) = new_var;
- }
- }
- /* We put the BLOCK_VARS in reverse order; fix that now. */
- BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
- fn = VARRAY_TREE (id->fns, 0);
/* Remember the remapped block. */
- splay_tree_insert (id->decl_map,
- (splay_tree_key) old_block,
- (splay_tree_value) new_block);
-#endif /* INLINER_FOR_JAVA */
+ insert_decl_map (id, old_block, new_block);
}
-#ifndef INLINER_FOR_JAVA
-/* Copy the SCOPE_STMT pointed to by TP. */
+/* Copy the whole block tree and root it in id->block. */
+static tree
+remap_blocks (tree block, copy_body_data *id)
+{
+ tree t;
+ tree new = block;
+
+ if (!block)
+ return NULL;
+
+ remap_block (&new, id);
+ gcc_assert (new != block);
+ for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
+ add_lexical_block (new, remap_blocks (t, id));
+ return new;
+}
static void
-copy_scope_stmt (tree *tp, int *walk_subtrees, inline_data *id)
+copy_statement_list (tree *tp)
{
- tree block;
+ tree_stmt_iterator oi, ni;
+ tree new;
+
+ new = alloc_stmt_list ();
+ ni = tsi_start (new);
+ oi = tsi_start (*tp);
+ *tp = new;
+
+ for (; !tsi_end_p (oi); tsi_next (&oi))
+ tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
+}
- /* Remember whether or not this statement was nullified. When
- making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
- doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
- deal with copying BLOCKs if they do not wish to do so. */
- block = SCOPE_STMT_BLOCK (*tp);
+static void
+copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
+{
+ tree block = BIND_EXPR_BLOCK (*tp);
/* Copy (and replace) the statement. */
copy_tree_r (tp, walk_subtrees, NULL);
- /* Restore the SCOPE_STMT_BLOCK. */
- SCOPE_STMT_BLOCK (*tp) = block;
+ if (block)
+ {
+ remap_block (&block, id);
+ BIND_EXPR_BLOCK (*tp) = block;
+ }
- /* Remap the associated block. */
- remap_block (*tp, NULL_TREE, id);
+ if (BIND_EXPR_VARS (*tp))
+ /* This will remap a lot of the same decls again, but this should be
+ harmless. */
+ BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
}
-#endif /* not INLINER_FOR_JAVA */
-/* Called from copy_body via walk_tree. DATA is really an
- `inline_data *'. */
-static tree
+/* Called from copy_body_id via walk_tree. DATA is really an
+ `copy_body_data *'. */
+
+tree
copy_body_r (tree *tp, int *walk_subtrees, void *data)
{
- inline_data* id;
- tree fn;
-
- /* Set up. */
- id = (inline_data *) data;
- fn = VARRAY_TOP_TREE (id->fns);
-
-#if 0
- /* All automatic variables should have a DECL_CONTEXT indicating
- what function they come from. */
- if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
- && DECL_NAMESPACE_SCOPE_P (*tp))
- if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
- abort ();
-#endif
+ copy_body_data *id = (copy_body_data *) data;
+ tree fn = id->src_fn;
+ tree new_block;
-#ifdef INLINER_FOR_JAVA
- if (TREE_CODE (*tp) == BLOCK)
- remap_block (tp, NULL_TREE, id);
-#endif
+ /* Begin by recognizing trees that we'll completely rewrite for the
+ inlining context. Our output for these trees is completely
+ different from out input (e.g. RETURN_EXPR is deleted, and morphs
+ into an edge). Further down, we'll handle trees that get
+ duplicated and/or tweaked. */
- /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
- GOTO_STMT with the RET_LABEL as its target. */
-#ifndef INLINER_FOR_JAVA
- if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
-#else /* INLINER_FOR_JAVA */
- if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
-#endif /* INLINER_FOR_JAVA */
+ /* When requested, RETURN_EXPRs should be transformed to just the
+ contained MODIFY_EXPR. The branch semantics of the return will
+ be handled elsewhere by manipulating the CFG rather than a statement. */
+ if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
{
- tree return_stmt = *tp;
- tree goto_stmt;
-
- /* Build the GOTO_STMT. */
-#ifndef INLINER_FOR_JAVA
- goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
- TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
- GOTO_FAKE_P (goto_stmt) = 1;
-#else /* INLINER_FOR_JAVA */
- tree assignment = TREE_OPERAND (return_stmt, 0);
- goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
- TREE_SIDE_EFFECTS (goto_stmt) = 1;
-#endif /* INLINER_FOR_JAVA */
+ tree assignment = TREE_OPERAND (*tp, 0);
/* If we're returning something, just turn that into an
- assignment into the equivalent of the original
- RESULT_DECL. */
-#ifndef INLINER_FOR_JAVA
- if (RETURN_STMT_EXPR (return_stmt))
+ assignment into the equivalent of the original RESULT_DECL.
+ If the "assignment" is just the result decl, the result
+ decl has already been set (e.g. a recent "foo (&result_decl,
+ ...)"); just toss the entire RETURN_EXPR. */
+ if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
{
- *tp = build_stmt (EXPR_STMT,
- RETURN_STMT_EXPR (return_stmt));
- STMT_IS_FULL_EXPR_P (*tp) = 1;
- /* And then jump to the end of the function. */
- TREE_CHAIN (*tp) = goto_stmt;
+ /* Replace the RETURN_EXPR with (a copy of) the
+ MODIFY_EXPR hanging underneath. */
+ *tp = copy_node (assignment);
}
-#else /* INLINER_FOR_JAVA */
- if (assignment)
+ else /* Else the RETURN_EXPR returns no value. */
{
- copy_body_r (&assignment, walk_subtrees, data);
- *tp = build (COMPOUND_EXPR, void_type_node, assignment, goto_stmt);
- TREE_SIDE_EFFECTS (*tp) = 1;
+ *tp = NULL;
+ return (tree) (void *)1;
}
-#endif /* INLINER_FOR_JAVA */
- /* If we're not returning anything just do the jump. */
- else
- *tp = goto_stmt;
}
+
/* Local variables and labels need to be replaced by equivalent
variables. We don't want to copy static variables; there's only
one of those, no matter how many times we inline the containing
- function.
- We do not also want to copy the label which we put into
- GOTO_STMT which replaced RETURN_STMT. */
- else if (*tp != id->ret_label
- && (*lang_hooks.tree_inlining.auto_var_in_fn_p) (*tp, fn))
+ function. Similarly for globals from an outer function. */
+ else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
{
tree new_decl;
/* Remap the declaration. */
new_decl = remap_decl (*tp, id);
- if (! new_decl)
- abort ();
+ gcc_assert (new_decl);
/* Replace this variable with the copy. */
STRIP_TYPE_NOPS (new_decl);
*tp = new_decl;
+ *walk_subtrees = 0;
}
-#if 0
- else if (nonstatic_local_decl_p (*tp)
- && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
- abort ();
-#endif
+ else if (TREE_CODE (*tp) == STATEMENT_LIST)
+ copy_statement_list (tp);
else if (TREE_CODE (*tp) == SAVE_EXPR)
- remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
- walk_subtrees);
- else if (TREE_CODE (*tp) == UNSAVE_EXPR)
- /* UNSAVE_EXPRs should not be generated until expansion time. */
- abort ();
-#ifndef INLINER_FOR_JAVA
- /* For a SCOPE_STMT, we must copy the associated block so that we
- can write out debugging information for the inlined variables. */
- else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
- copy_scope_stmt (tp, walk_subtrees, id);
-#else /* INLINER_FOR_JAVA */
- else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
- {
- /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
- will refer to it, so save a copy ready for remapping. We
- save it in the decl_map, although it isn't a decl. */
- tree new_block = copy_node (*tp);
- splay_tree_insert (id->decl_map,
- (splay_tree_key) *tp,
- (splay_tree_value) new_block);
- *tp = new_block;
- }
- else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
- {
- splay_tree_node n
- = splay_tree_lookup (id->decl_map,
- (splay_tree_key) TREE_OPERAND (*tp, 0));
- /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
- if (! n)
- abort ();
- *tp = copy_node (*tp);
- TREE_OPERAND (*tp, 0) = (tree) n->value;
- }
-#endif /* INLINER_FOR_JAVA */
+ remap_save_expr (tp, id->decl_map, walk_subtrees);
+ else if (TREE_CODE (*tp) == LABEL_DECL
+ && (! DECL_CONTEXT (*tp)
+ || decl_function_context (*tp) == id->src_fn))
+ /* These may need to be remapped for EH handling. */
+ *tp = remap_decl (*tp, id);
+ else if (TREE_CODE (*tp) == BIND_EXPR)
+ copy_bind_expr (tp, walk_subtrees, id);
/* Types may need remapping as well. */
else if (TYPE_P (*tp))
*tp = remap_type (*tp, id);
+ /* If this is a constant, we have to copy the node iff the type will be
+ remapped. copy_tree_r will not copy a constant. */
+ else if (CONSTANT_CLASS_P (*tp))
+ {
+ tree new_type = remap_type (TREE_TYPE (*tp), id);
+
+ if (new_type == TREE_TYPE (*tp))
+ *walk_subtrees = 0;
+
+ else if (TREE_CODE (*tp) == INTEGER_CST)
+ *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
+ TREE_INT_CST_HIGH (*tp));
+ else
+ {
+ *tp = copy_node (*tp);
+ TREE_TYPE (*tp) = new_type;
+ }
+ }
+
/* Otherwise, just copy the node. Note that copy_tree_r already
knows not to copy VAR_DECLs, etc., so this is safe. */
else
{
+ /* Here we handle trees that are not completely rewritten.
+ First we detect some inlining-induced bogosities for
+ discarding. */
if (TREE_CODE (*tp) == MODIFY_EXPR
&& TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
- && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
+ && (lang_hooks.tree_inlining.auto_var_in_fn_p
(TREE_OPERAND (*tp, 0), fn)))
{
/* Some assignments VAR = VAR; don't generate any rtl code
@@ -654,36 +575,77 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
STRIP_TYPE_NOPS (value);
if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
{
- *tp = value;
+ *tp = build_empty_stmt ();
return copy_body_r (tp, walk_subtrees, data);
}
}
}
- else if (TREE_CODE (*tp) == ADDR_EXPR
- && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
- (TREE_OPERAND (*tp, 0), fn)))
+ else if (TREE_CODE (*tp) == INDIRECT_REF)
{
- /* Get rid of &* from inline substitutions. It can occur when
- someone takes the address of a parm or return slot passed by
- invisible reference. */
- tree decl = TREE_OPERAND (*tp, 0), value;
+ /* Get rid of *& from inline substitutions that can happen when a
+ pointer argument is an ADDR_EXPR. */
+ tree decl = TREE_OPERAND (*tp, 0);
splay_tree_node n;
n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
if (n)
{
- value = (tree) n->value;
- if (TREE_CODE (value) == INDIRECT_REF)
- {
- *tp = convert (TREE_TYPE (*tp), TREE_OPERAND (value, 0));
- return copy_body_r (tp, walk_subtrees, data);
+ tree new;
+ tree old;
+ /* If we happen to get an ADDR_EXPR in n->value, strip
+ it manually here as we'll eventually get ADDR_EXPRs
+ which lie about their types pointed to. In this case
+ build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
+ but we absolutely rely on that. As fold_indirect_ref
+ does other useful transformations, try that first, though. */
+ tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
+ new = unshare_expr ((tree)n->value);
+ old = *tp;
+ *tp = fold_indirect_ref_1 (type, new);
+ if (! *tp)
+ {
+ if (TREE_CODE (new) == ADDR_EXPR)
+ *tp = TREE_OPERAND (new, 0);
+ else
+ {
+ *tp = build1 (INDIRECT_REF, type, new);
+ TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+ }
}
+ *walk_subtrees = 0;
+ return NULL;
}
}
+ /* Here is the "usual case". Copy this tree node, and then
+ tweak some special cases. */
copy_tree_r (tp, walk_subtrees, NULL);
+
+ /* If EXPR has block defined, map it to newly constructed block.
+ When inlining we want EXPRs without block appear in the block
+ of function call. */
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
+ {
+ new_block = id->block;
+ if (TREE_BLOCK (*tp))
+ {
+ splay_tree_node n;
+ n = splay_tree_lookup (id->decl_map,
+ (splay_tree_key) TREE_BLOCK (*tp));
+ gcc_assert (n);
+ new_block = (tree) n->value;
+ }
+ TREE_BLOCK (*tp) = new_block;
+ }
+
+ if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
+ TREE_OPERAND (*tp, 0) =
+ build_int_cst
+ (NULL_TREE,
+ id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
- TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
+ if (TREE_CODE (*tp) != OMP_CLAUSE)
+ TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
/* The copied TARGET_EXPR has never been expanded, even if the
original node was expanded already. */
@@ -692,295 +654,650 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
TREE_OPERAND (*tp, 3) = NULL_TREE;
}
+
+ /* Variable substitution need not be simple. In particular, the
+ INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
+ and friends are up-to-date. */
+ else if (TREE_CODE (*tp) == ADDR_EXPR)
+ {
+ walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
+ /* Handle the case where we substituted an INDIRECT_REF
+ into the operand of the ADDR_EXPR. */
+ if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
+ *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
+ else
+ recompute_tree_invariant_for_addr_expr (*tp);
+ *walk_subtrees = 0;
+ }
}
/* Keep iterating. */
return NULL_TREE;
}
+/* Copy basic block, scale profile accordingly. Edges will be taken care of
+ later */
+
+static basic_block
+copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
+{
+ block_stmt_iterator bsi, copy_bsi;
+ basic_block copy_basic_block;
+
+ /* create_basic_block() will append every new block to
+ basic_block_info automatically. */
+ copy_basic_block = create_basic_block (NULL, (void *) 0,
+ (basic_block) bb->prev_bb->aux);
+ copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
+ copy_basic_block->frequency = (bb->frequency
+ * frequency_scale / REG_BR_PROB_BASE);
+ copy_bsi = bsi_start (copy_basic_block);
+
+ for (bsi = bsi_start (bb);
+ !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ tree orig_stmt = stmt;
+
+ walk_tree (&stmt, copy_body_r, id, NULL);
+
+ /* RETURN_EXPR might be removed,
+ this is signalled by making stmt pointer NULL. */
+ if (stmt)
+ {
+ tree call, decl;
+
+ /* With return slot optimization we can end up with
+ non-gimple (foo *)&this->m, fix that here. */
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
+ && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)))
+ gimplify_stmt (&stmt);
+
+ bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
+ call = get_call_expr_in (stmt);
+ /* We're duplicating a CALL_EXPR. Find any corresponding
+ callgraph edges and update or duplicate them. */
+ if (call && (decl = get_callee_fndecl (call)))
+ {
+ struct cgraph_node *node;
+ struct cgraph_edge *edge;
+
+ switch (id->transform_call_graph_edges)
+ {
+ case CB_CGE_DUPLICATE:
+ edge = cgraph_edge (id->src_node, orig_stmt);
+ if (edge)
+ cgraph_clone_edge (edge, id->dst_node, stmt,
+ REG_BR_PROB_BASE, 1, true);
+ break;
+
+ case CB_CGE_MOVE_CLONES:
+ for (node = id->dst_node->next_clone;
+ node;
+ node = node->next_clone)
+ {
+ edge = cgraph_edge (node, orig_stmt);
+ gcc_assert (edge);
+ cgraph_set_call_stmt (edge, stmt);
+ }
+ /* FALLTHRU */
+
+ case CB_CGE_MOVE:
+ edge = cgraph_edge (id->dst_node, orig_stmt);
+ if (edge)
+ cgraph_set_call_stmt (edge, stmt);
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ }
+ /* If you think we can abort here, you are wrong.
+ There is no region 0 in tree land. */
+ gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
+ != 0);
+
+ if (tree_could_throw_p (stmt))
+ {
+ int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
+ /* Add an entry for the copied tree in the EH hashtable.
+ When cloning or versioning, use the hashtable in
+ cfun, and just copy the EH number. When inlining, use the
+ hashtable in the caller, and adjust the region number. */
+ if (region > 0)
+ add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
+
+ /* If this tree doesn't have a region associated with it,
+ and there is a "current region,"
+ then associate this tree with the current region
+ and add edges associated with this region. */
+ if ((lookup_stmt_eh_region_fn (id->src_cfun,
+ orig_stmt) <= 0
+ && id->eh_region > 0)
+ && tree_could_throw_p (stmt))
+ add_stmt_to_eh_region (stmt, id->eh_region);
+ }
+ }
+ }
+ return copy_basic_block;
+}
+
+/* Copy edges from BB into its copy constructed earlier, scale profile
+ accordingly. Edges will be taken care of later. Assume aux
+ pointers to point to the copies of each BB. */
+static void
+copy_edges_for_bb (basic_block bb, int count_scale)
+{
+ basic_block new_bb = (basic_block) bb->aux;
+ edge_iterator ei;
+ edge old_edge;
+ block_stmt_iterator bsi;
+ int flags;
+
+ /* Use the indices from the original blocks to create edges for the
+ new ones. */
+ FOR_EACH_EDGE (old_edge, ei, bb->succs)
+ if (!(old_edge->flags & EDGE_EH))
+ {
+ edge new;
+
+ flags = old_edge->flags;
+
+ /* Return edges do get a FALLTHRU flag when the get inlined. */
+ if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
+ && old_edge->dest->aux != EXIT_BLOCK_PTR)
+ flags |= EDGE_FALLTHRU;
+ new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
+ new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
+ new->probability = old_edge->probability;
+ }
+
+ if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
+ return;
+
+ for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
+ {
+ tree copy_stmt;
+
+ copy_stmt = bsi_stmt (bsi);
+ update_stmt (copy_stmt);
+ /* Do this before the possible split_block. */
+ bsi_next (&bsi);
+
+ /* If this tree could throw an exception, there are two
+ cases where we need to add abnormal edge(s): the
+ tree wasn't in a region and there is a "current
+ region" in the caller; or the original tree had
+ EH edges. In both cases split the block after the tree,
+ and add abnormal edge(s) as needed; we need both
+ those from the callee and the caller.
+ We check whether the copy can throw, because the const
+ propagation can change an INDIRECT_REF which throws
+ into a COMPONENT_REF which doesn't. If the copy
+ can throw, the original could also throw. */
+
+ if (tree_can_throw_internal (copy_stmt))
+ {
+ if (!bsi_end_p (bsi))
+ /* Note that bb's predecessor edges aren't necessarily
+ right at this point; split_block doesn't care. */
+ {
+ edge e = split_block (new_bb, copy_stmt);
+ new_bb = e->dest;
+ bsi = bsi_start (new_bb);
+ }
+
+ make_eh_edges (copy_stmt);
+ }
+ }
+}
+
+/* Wrapper for remap_decl so it can be used as a callback. */
+static tree
+remap_decl_1 (tree decl, void *data)
+{
+ return remap_decl (decl, (copy_body_data *) data);
+}
+
+/* Make a copy of the body of FN so that it can be inserted inline in
+ another function. Walks FN via CFG, returns new fndecl. */
+
+static tree
+copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
+ basic_block entry_block_map, basic_block exit_block_map)
+{
+ tree callee_fndecl = id->src_fn;
+ /* Original cfun for the callee, doesn't change. */
+ struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+ /* Copy, built by this function. */
+ struct function *new_cfun;
+ /* Place to copy from; when a copy of the function was saved off earlier,
+ use that instead of the main copy. */
+ struct function *cfun_to_copy =
+ (struct function *) ggc_alloc_cleared (sizeof (struct function));
+ basic_block bb;
+ tree new_fndecl = NULL;
+ int count_scale, frequency_scale;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+ count_scale = (REG_BR_PROB_BASE * count
+ / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+ else
+ count_scale = 1;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
+ frequency_scale = (REG_BR_PROB_BASE * frequency
+ /
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
+ else
+ frequency_scale = count_scale;
+
+ /* Register specific tree functions. */
+ tree_register_cfg_hooks ();
+
+ /* Must have a CFG here at this point. */
+ gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
+ (DECL_STRUCT_FUNCTION (callee_fndecl)));
+
+ *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
+
+ id->src_cfun = cfun_to_copy;
+
+ /* If requested, create new basic_block_info and label_to_block_maps.
+ Otherwise, insert our new blocks and labels into the existing cfg. */
+ if (id->transform_new_cfg)
+ {
+ new_cfun =
+ (struct function *) ggc_alloc_cleared (sizeof (struct function));
+ *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
+ new_cfun->cfg = NULL;
+ new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
+ new_cfun->ib_boundaries_block = NULL;
+ DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
+ push_cfun (new_cfun);
+ init_empty_tree_cfg ();
+
+ ENTRY_BLOCK_PTR->count =
+ (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+ REG_BR_PROB_BASE);
+ ENTRY_BLOCK_PTR->frequency =
+ (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+ frequency_scale / REG_BR_PROB_BASE);
+ EXIT_BLOCK_PTR->count =
+ (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+ REG_BR_PROB_BASE);
+ EXIT_BLOCK_PTR->frequency =
+ (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+ frequency_scale / REG_BR_PROB_BASE);
+
+ entry_block_map = ENTRY_BLOCK_PTR;
+ exit_block_map = EXIT_BLOCK_PTR;
+ }
+
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
+ EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
+
+ /* Duplicate any exception-handling regions. */
+ if (cfun->eh)
+ {
+ if (id->transform_new_cfg)
+ init_eh_for_function ();
+ id->eh_region_offset
+ = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
+ 0, id->eh_region);
+ }
+ /* Use aux pointers to map the original blocks to copy. */
+ FOR_EACH_BB_FN (bb, cfun_to_copy)
+ bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
+ /* Now that we've duplicated the blocks, duplicate their edges. */
+ FOR_ALL_BB_FN (bb, cfun_to_copy)
+ copy_edges_for_bb (bb, count_scale);
+ FOR_ALL_BB_FN (bb, cfun_to_copy)
+ bb->aux = NULL;
+
+ if (id->transform_new_cfg)
+ pop_cfun ();
+
+ return new_fndecl;
+}
+
/* Make a copy of the body of FN so that it can be inserted inline in
another function. */
static tree
-copy_body (inline_data *id)
+copy_generic_body (copy_body_data *id)
{
tree body;
+ tree fndecl = id->src_fn;
- body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
+ body = DECL_SAVED_TREE (fndecl);
walk_tree (&body, copy_body_r, id, NULL);
return body;
}
+static tree
+copy_body (copy_body_data *id, gcov_type count, int frequency,
+ basic_block entry_block_map, basic_block exit_block_map)
+{
+ tree fndecl = id->src_fn;
+ tree body;
+
+ /* If this body has a CFG, walk CFG and copy. */
+ gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
+ body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
+
+ return body;
+}
+
+/* Return true if VALUE is an ADDR_EXPR of an automatic variable
+ defined in function FN, or of a data member thereof. */
+
+static bool
+self_inlining_addr_expr (tree value, tree fn)
+{
+ tree var;
+
+ if (TREE_CODE (value) != ADDR_EXPR)
+ return false;
+
+ var = get_base_address (TREE_OPERAND (value, 0));
+
+ return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
+}
+
+static void
+setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
+ basic_block bb, tree *vars)
+{
+ tree init_stmt;
+ tree var;
+ tree var_sub;
+
+ /* If the parameter is never assigned to, we may not need to
+ create a new variable here at all. Instead, we may be able
+ to just use the argument value. */
+ if (TREE_READONLY (p)
+ && !TREE_ADDRESSABLE (p)
+ && value && !TREE_SIDE_EFFECTS (value))
+ {
+ /* We may produce non-gimple trees by adding NOPs or introduce
+ invalid sharing when operand is not really constant.
+ It is not big deal to prohibit constant propagation here as
+ we will constant propagate in DOM1 pass anyway. */
+ if (is_gimple_min_invariant (value)
+ && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
+ /* We have to be very careful about ADDR_EXPR. Make sure
+ the base variable isn't a local variable of the inlined
+ function, e.g., when doing recursive inlining, direct or
+ mutually-recursive or whatever, which is why we don't
+ just test whether fn == current_function_decl. */
+ && ! self_inlining_addr_expr (value, fn))
+ {
+ insert_decl_map (id, p, value);
+ return;
+ }
+ }
+
+ /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
+ here since the type of this decl must be visible to the calling
+ function. */
+ var = copy_decl_to_var (p, id);
+
+ /* See if the frontend wants to pass this by invisible reference. If
+ so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
+ replace uses of the PARM_DECL with dereferences. */
+ if (TREE_TYPE (var) != TREE_TYPE (p)
+ && POINTER_TYPE_P (TREE_TYPE (var))
+ && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
+ {
+ insert_decl_map (id, var, var);
+ var_sub = build_fold_indirect_ref (var);
+ }
+ else
+ var_sub = var;
+
+ /* Register the VAR_DECL as the equivalent for the PARM_DECL;
+ that way, when the PARM_DECL is encountered, it will be
+ automatically replaced by the VAR_DECL. */
+ insert_decl_map (id, p, var_sub);
+
+ /* Declare this new variable. */
+ TREE_CHAIN (var) = *vars;
+ *vars = var;
+
+ /* Make gimplifier happy about this variable. */
+ DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
+
+ /* Even if P was TREE_READONLY, the new VAR should not be.
+ In the original code, we would have constructed a
+ temporary, and then the function body would have never
+ changed the value of P. However, now, we will be
+ constructing VAR directly. The constructor body may
+ change its value multiple times as it is being
+ constructed. Therefore, it must not be TREE_READONLY;
+ the back-end assumes that TREE_READONLY variable is
+ assigned to only once. */
+ if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
+ TREE_READONLY (var) = 0;
+
+ /* Initialize this VAR_DECL from the equivalent argument. Convert
+ the argument to the proper type in case it was promoted. */
+ if (value)
+ {
+ tree rhs = fold_convert (TREE_TYPE (var), value);
+ block_stmt_iterator bsi = bsi_last (bb);
+
+ if (rhs == error_mark_node)
+ return;
+
+ STRIP_USELESS_TYPE_CONVERSION (rhs);
+
+ /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
+ keep our trees in gimple form. */
+ init_stmt = build2 (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
+
+ /* If we did not create a gimple value and we did not create a gimple
+ cast of a gimple value, then we will need to gimplify INIT_STMTS
+ at the end. Note that is_gimple_cast only checks the outer
+ tree code, not its operand. Thus the explicit check that its
+ operand is a gimple value. */
+ if (!is_gimple_val (rhs)
+ && (!is_gimple_cast (rhs)
+ || !is_gimple_val (TREE_OPERAND (rhs, 0))))
+ gimplify_stmt (&init_stmt);
+
+ /* If VAR represents a zero-sized variable, it's possible that the
+ assignment statment may result in no gimple statements. */
+ if (init_stmt)
+ bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
+ }
+}
+
/* Generate code to initialize the parameters of the function at the
top of the stack in ID from the ARGS (presented as a TREE_LIST). */
-static tree
-#ifndef INLINER_FOR_JAVA
-initialize_inlined_parameters (inline_data *id, tree args, tree fn)
-#else /* INLINER_FOR_JAVA */
-initialize_inlined_parameters (inline_data *id, tree args, tree fn, tree block)
-#endif /* INLINER_FOR_JAVA */
+static void
+initialize_inlined_parameters (copy_body_data *id, tree args, tree static_chain,
+ tree fn, basic_block bb)
{
- tree init_stmts;
tree parms;
tree a;
tree p;
-#ifdef INLINER_FOR_JAVA
tree vars = NULL_TREE;
-#endif /* INLINER_FOR_JAVA */
int argnum = 0;
/* Figure out what the parameters are. */
- parms =
-DECL_ARGUMENTS (fn);
-
- /* Start with no initializations whatsoever. */
- init_stmts = NULL_TREE;
+ parms = DECL_ARGUMENTS (fn);
/* Loop through the parameter declarations, replacing each with an
equivalent VAR_DECL, appropriately initialized. */
for (p = parms, a = args; p;
a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
{
-#ifndef INLINER_FOR_JAVA
- tree init_stmt;
- tree cleanup;
-#endif /* not INLINER_FOR_JAVA */
- tree var;
tree value;
- tree var_sub;
++argnum;
/* Find the initializer. */
- value = (*lang_hooks.tree_inlining.convert_parm_for_inlining)
+ value = lang_hooks.tree_inlining.convert_parm_for_inlining
(p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
- /* If the parameter is never assigned to, we may not need to
- create a new variable here at all. Instead, we may be able
- to just use the argument value. */
- if (TREE_READONLY (p)
- && !TREE_ADDRESSABLE (p)
- && value && !TREE_SIDE_EFFECTS (value))
- {
- /* Simplify the value, if possible. */
- value = fold (DECL_P (value) ? decl_constant_value (value) : value);
-
- /* We can't risk substituting complex expressions. They
- might contain variables that will be assigned to later.
- Theoretically, we could check the expression to see if
- all of the variables that determine its value are
- read-only, but we don't bother. */
- if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
- {
- /* If this is a declaration, wrap it a NOP_EXPR so that
- we don't try to put the VALUE on the list of
- BLOCK_VARS. */
- if (DECL_P (value))
- value = build1 (NOP_EXPR, TREE_TYPE (value), value);
-
- /* If this is a constant, make sure it has the right type. */
- else if (TREE_TYPE (value) != TREE_TYPE (p))
- value = fold (build1 (NOP_EXPR, TREE_TYPE (p), value));
-
- splay_tree_insert (id->decl_map,
- (splay_tree_key) p,
- (splay_tree_value) value);
- continue;
- }
- }
-
- /* Make an equivalent VAR_DECL. */
- var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
-
- /* See if the frontend wants to pass this by invisible reference. If
- so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
- replace uses of the PARM_DECL with dereferences. */
- if (TREE_TYPE (var) != TREE_TYPE (p)
- && POINTER_TYPE_P (TREE_TYPE (var))
- && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
- var_sub = build1 (INDIRECT_REF, TREE_TYPE (p), var);
- else
- var_sub = var;
-
- /* Register the VAR_DECL as the equivalent for the PARM_DECL;
- that way, when the PARM_DECL is encountered, it will be
- automatically replaced by the VAR_DECL. */
- splay_tree_insert (id->decl_map,
- (splay_tree_key) p,
- (splay_tree_value) var_sub);
-
- /* Declare this new variable. */
-#ifndef INLINER_FOR_JAVA
- init_stmt = build_stmt (DECL_STMT, var);
- TREE_CHAIN (init_stmt) = init_stmts;
- init_stmts = init_stmt;
-#else /* INLINER_FOR_JAVA */
- TREE_CHAIN (var) = vars;
- vars = var;
-#endif /* INLINER_FOR_JAVA */
-
- /* Initialize this VAR_DECL from the equivalent argument. If
- the argument is an object, created via a constructor or copy,
- this will not result in an extra copy: the TARGET_EXPR
- representing the argument will be bound to VAR, and the
- object will be constructed in VAR. */
- if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
-#ifndef INLINER_FOR_JAVA
- DECL_INITIAL (var) = value;
- else
- {
- /* Even if P was TREE_READONLY, the new VAR should not be.
- In the original code, we would have constructed a
- temporary, and then the function body would have never
- changed the value of P. However, now, we will be
- constructing VAR directly. The constructor body may
- change its value multiple times as it is being
- constructed. Therefore, it must not be TREE_READONLY;
- the back-end assumes that TREE_READONLY variable is
- assigned to only once. */
- TREE_READONLY (var) = 0;
-
- /* Build a run-time initialization. */
- init_stmt = build_stmt (EXPR_STMT,
- build (INIT_EXPR, TREE_TYPE (p),
- var, value));
- /* Add this initialization to the list. Note that we want the
- declaration *after* the initialization because we are going
- to reverse all the initialization statements below. */
- TREE_CHAIN (init_stmt) = init_stmts;
- init_stmts = init_stmt;
- }
-
- /* See if we need to clean up the declaration. */
- cleanup = (*lang_hooks.maybe_build_cleanup) (var);
- if (cleanup)
- {
- tree cleanup_stmt;
- /* Build the cleanup statement. */
- cleanup_stmt = build_stmt (CLEANUP_STMT, var, cleanup);
- /* Add it to the *front* of the list; the list will be
- reversed below. */
- TREE_CHAIN (cleanup_stmt) = init_stmts;
- init_stmts = cleanup_stmt;
- }
-#else /* INLINER_FOR_JAVA */
- {
- tree assignment = build (MODIFY_EXPR, TREE_TYPE (p), var, value);
- init_stmts = add_stmt_to_compound (init_stmts, TREE_TYPE (p),
- assignment);
- }
- else
- {
- /* Java objects don't ever need constructing when being
- passed as arguments because only call by reference is
- supported. */
- abort ();
- }
-#endif /* INLINER_FOR_JAVA */
+ setup_one_parameter (id, p, value, fn, bb, &vars);
}
-#ifndef INLINER_FOR_JAVA
- /* Evaluate trailing arguments. */
- for (; a; a = TREE_CHAIN (a))
+ /* Initialize the static chain. */
+ p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
+ gcc_assert (fn != current_function_decl);
+ if (p)
{
- tree init_stmt;
- tree value = TREE_VALUE (a);
+ /* No static chain? Seems like a bug in tree-nested.c. */
+ gcc_assert (static_chain);
- if (! value || ! TREE_SIDE_EFFECTS (value))
- continue;
-
- init_stmt = build_stmt (EXPR_STMT, value);
- TREE_CHAIN (init_stmt) = init_stmts;
- init_stmts = init_stmt;
+ setup_one_parameter (id, p, static_chain, fn, bb, &vars);
}
- /* The initialization statements have been built up in reverse
- order. Straighten them out now. */
- return nreverse (init_stmts);
-#else /* INLINER_FOR_JAVA */
- BLOCK_VARS (block) = nreverse (vars);
- return init_stmts;
-#endif /* INLINER_FOR_JAVA */
+ declare_inline_vars (id->block, vars);
}
/* Declare a return variable to replace the RESULT_DECL for the
function we are calling. An appropriate DECL_STMT is returned.
- The USE_STMT is filled in to contain a use of the declaration to
- indicate the return value of the function. */
+ The USE_STMT is filled to contain a use of the declaration to
+ indicate the return value of the function.
+
+ RETURN_SLOT_ADDR, if non-null, was a fake parameter that
+ took the address of the result. MODIFY_DEST, if non-null, was the LHS of
+ the MODIFY_EXPR to which this call is the RHS.
+
+ The return value is a (possibly null) value that is the result of the
+ function as seen by the callee. *USE_P is a (possibly null) value that
+ holds the result as seen by the caller. */
-#ifndef INLINER_FOR_JAVA
-static tree
-declare_return_variable (struct inline_data *id, tree return_slot_addr,
- tree *use_stmt)
-#else /* INLINER_FOR_JAVA */
static tree
-declare_return_variable (struct inline_data *id, tree return_slot_addr,
- tree *var)
-#endif /* INLINER_FOR_JAVA */
+declare_return_variable (copy_body_data *id, tree return_slot_addr,
+ tree modify_dest, tree *use_p)
{
- tree fn = VARRAY_TOP_TREE (id->fns);
- tree result = DECL_RESULT (fn);
-#ifndef INLINER_FOR_JAVA
- tree var;
-#endif /* not INLINER_FOR_JAVA */
- int need_return_decl = 1;
+ tree callee = id->src_fn;
+ tree caller = id->dst_fn;
+ tree result = DECL_RESULT (callee);
+ tree callee_type = TREE_TYPE (result);
+ tree caller_type = TREE_TYPE (TREE_TYPE (callee));
+ tree var, use;
/* We don't need to do anything for functions that don't return
anything. */
- if (!result || VOID_TYPE_P (TREE_TYPE (result)))
+ if (!result || VOID_TYPE_P (callee_type))
{
-#ifndef INLINER_FOR_JAVA
- *use_stmt = NULL_TREE;
-#else /* INLINER_FOR_JAVA */
- *var = NULL_TREE;
-#endif /* INLINER_FOR_JAVA */
+ *use_p = NULL_TREE;
return NULL_TREE;
}
-#ifndef INLINER_FOR_JAVA
- var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
- (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
- &need_return_decl, return_slot_addr));
+ /* If there was a return slot, then the return value is the
+ dereferenced address of that object. */
+ if (return_slot_addr)
+ {
+ /* The front end shouldn't have used both return_slot_addr and
+ a modify expression. */
+ gcc_assert (!modify_dest);
+ if (DECL_BY_REFERENCE (result))
+ var = return_slot_addr;
+ else
+ var = build_fold_indirect_ref (return_slot_addr);
+ if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ && !DECL_COMPLEX_GIMPLE_REG_P (result)
+ && DECL_P (var))
+ DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
+ use = NULL;
+ goto done;
+ }
+
+ /* All types requiring non-trivial constructors should have been handled. */
+ gcc_assert (!TREE_ADDRESSABLE (callee_type));
+
+ /* Attempt to avoid creating a new temporary variable. */
+ if (modify_dest)
+ {
+ bool use_it = false;
+
+ /* We can't use MODIFY_DEST if there's type promotion involved. */
+ if (!lang_hooks.types_compatible_p (caller_type, callee_type))
+ use_it = false;
+
+ /* ??? If we're assigning to a variable sized type, then we must
+ reuse the destination variable, because we've no good way to
+ create variable sized temporaries at this point. */
+ else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
+ use_it = true;
+
+ /* If the callee cannot possibly modify MODIFY_DEST, then we can
+ reuse it as the result of the call directly. Don't do this if
+ it would promote MODIFY_DEST to addressable. */
+ else if (TREE_ADDRESSABLE (result))
+ use_it = false;
+ else
+ {
+ tree base_m = get_base_address (modify_dest);
+
+ /* If the base isn't a decl, then it's a pointer, and we don't
+ know where that's going to go. */
+ if (!DECL_P (base_m))
+ use_it = false;
+ else if (is_global_var (base_m))
+ use_it = false;
+ else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ && !DECL_COMPLEX_GIMPLE_REG_P (result)
+ && DECL_COMPLEX_GIMPLE_REG_P (base_m))
+ use_it = false;
+ else if (!TREE_ADDRESSABLE (base_m))
+ use_it = true;
+ }
+
+ if (use_it)
+ {
+ var = modify_dest;
+ use = NULL;
+ goto done;
+ }
+ }
+
+ gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
+
+ var = copy_result_decl_to_var (result, id);
+
+ DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
+ DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
+ = tree_cons (NULL_TREE, var,
+ DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
+
+ /* Do not have the rest of GCC warn about this variable as it should
+ not be visible to the user. */
+ TREE_NO_WARNING (var) = 1;
+
+ declare_inline_vars (id->block, var);
+
+ /* Build the use expr. If the return type of the function was
+ promoted, convert it back to the expected type. */
+ use = var;
+ if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
+ use = fold_convert (caller_type, var);
+
+ STRIP_USELESS_TYPE_CONVERSION (use);
+
+ if (DECL_BY_REFERENCE (result))
+ var = build_fold_addr_expr (var);
+ done:
/* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
way, when the RESULT_DECL is encountered, it will be
automatically replaced by the VAR_DECL. */
- splay_tree_insert (id->decl_map,
- (splay_tree_key) result,
- (splay_tree_value) var);
+ insert_decl_map (id, result, var);
- /* Build the USE_STMT. If the return type of the function was
- promoted, convert it back to the expected type. */
- if (TREE_TYPE (var) == TREE_TYPE (TREE_TYPE (fn)))
- *use_stmt = build_stmt (EXPR_STMT, var);
- else
- *use_stmt = build_stmt (EXPR_STMT,
- build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- var));
- TREE_ADDRESSABLE (*use_stmt) = 1;
-
- /* Build the declaration statement if FN does not return an
- aggregate. */
- if (need_return_decl)
- return build_stmt (DECL_STMT, var);
-#else /* INLINER_FOR_JAVA */
- *var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
- (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
- &need_return_decl, return_slot_addr));
-
- splay_tree_insert (id->decl_map,
- (splay_tree_key) result,
- (splay_tree_value) *var);
- DECL_IGNORED_P (*var) = 1;
- if (need_return_decl)
- return *var;
-#endif /* INLINER_FOR_JAVA */
- /* If FN does return an aggregate, there's no need to declare the
- return variable; we're using a variable in our caller's frame. */
- else
- return NULL_TREE;
+ /* Remember this so we can ignore it in remap_decls. */
+ id->retvar = var;
+
+ *use_p = use;
+ return var;
}
/* Returns nonzero if a function can be inlined as a tree. */
@@ -1013,7 +1330,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
&& !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because it uses "
+ = G_("function %q+F can never be inlined because it uses "
"alloca (override using the always_inline attribute)");
return node;
}
@@ -1021,16 +1338,15 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
if (! t)
break;
-
/* We cannot inline functions that call setjmp. */
if (setjmp_call_p (t))
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
+ = G_("function %q+F can never be inlined because it uses setjmp");
return node;
}
- if (DECL_BUILT_IN (t))
+ if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (t))
{
/* We cannot inline functions that take a variable number of
@@ -1039,49 +1355,45 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
case BUILT_IN_STDARG_START:
case BUILT_IN_NEXT_ARG:
case BUILT_IN_VA_END:
- {
- inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because it "
- "uses variable argument lists");
- return node;
- }
+ inline_forbidden_reason
+ = G_("function %q+F can never be inlined because it "
+ "uses variable argument lists");
+ return node;
+
case BUILT_IN_LONGJMP:
- {
- /* We can't inline functions that call __builtin_longjmp at
- all. The non-local goto machinery really requires the
- destination be in a different function. If we allow the
- function calling __builtin_longjmp to be inlined into the
- function calling __builtin_setjmp, Things will Go Awry. */
- /* ??? Need front end help to identify "regular" non-local
- goto. */
- if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
- {
- inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because "
- "it uses setjmp-longjmp exception handling");
- return node;
- }
- }
+ /* We can't inline functions that call __builtin_longjmp at
+ all. The non-local goto machinery really requires the
+ destination be in a different function. If we allow the
+ function calling __builtin_longjmp to be inlined into the
+ function calling __builtin_setjmp, Things will Go Awry. */
+ inline_forbidden_reason
+ = G_("function %q+F can never be inlined because "
+ "it uses setjmp-longjmp exception handling");
+ return node;
+
+ case BUILT_IN_NONLOCAL_GOTO:
+ /* Similarly. */
+ inline_forbidden_reason
+ = G_("function %q+F can never be inlined because "
+ "it uses non-local goto");
+ return node;
+
+ case BUILT_IN_RETURN:
+ case BUILT_IN_APPLY_ARGS:
+ /* If a __builtin_apply_args caller would be inlined,
+ it would be saving arguments of the function it has
+ been inlined into. Similarly __builtin_return would
+ return from the function the inline has been inlined into. */
+ inline_forbidden_reason
+ = G_("function %q+F can never be inlined because "
+ "it uses __builtin_return or __builtin_apply_args");
+ return node;
default:
break;
}
break;
-#ifndef INLINER_FOR_JAVA
- case DECL_STMT:
- /* We cannot inline functions that contain other functions. */
- if (TREE_CODE (TREE_OPERAND (node, 0)) == FUNCTION_DECL
- && DECL_INITIAL (TREE_OPERAND (node, 0)))
- {
- inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
- "because it contains a nested function");
- return node;
- }
- break;
-
- case GOTO_STMT:
case GOTO_EXPR:
t = TREE_OPERAND (node, 0);
@@ -1092,21 +1404,24 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
if (TREE_CODE (t) != LABEL_DECL)
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
+ = G_("function %q+F can never be inlined "
"because it contains a computed goto");
return node;
}
+ break;
- /* We cannot inline a nested function that jumps to a nonlocal
- label. */
- if (TREE_CODE (t) == LABEL_DECL && DECL_CONTEXT (t) != fn)
+ case LABEL_EXPR:
+ t = TREE_OPERAND (node, 0);
+ if (DECL_NONLOCAL (t))
{
+ /* We cannot inline a function that receives a non-local goto
+ because we cannot remap the destination label used in the
+ function that is performing the non-local goto. */
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
- "because it contains a nonlocal goto");
+ = G_("function %q+F can never be inlined "
+ "because it receives a non-local goto");
return node;
}
-
break;
case RECORD_TYPE:
@@ -1120,16 +1435,19 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
UNION_TYPE nodes, then it goes into infinite recursion on a
structure containing a pointer to its own type. If it doesn't,
then the type node for S doesn't get adjusted properly when
- F is inlined, and we abort in find_function_data. */
+ F is inlined.
+
+ ??? This is likely no longer true, but it's too late in the 4.0
+ cycle to try to find out. This should be checked for 4.1. */
for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
- if (variably_modified_type_p (TREE_TYPE (t)))
+ if (variably_modified_type_p (TREE_TYPE (t), NULL))
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
+ = G_("function %q+F can never be inlined "
"because it uses variable sized variables");
return node;
}
-#endif
+
default:
break;
}
@@ -1142,8 +1460,20 @@ static tree
inline_forbidden_p (tree fndecl)
{
location_t saved_loc = input_location;
- tree ret = walk_tree_without_duplicates
- (&DECL_SAVED_TREE (fndecl), inline_forbidden_p_1, fndecl);
+ block_stmt_iterator bsi;
+ basic_block bb;
+ tree ret = NULL_TREE;
+
+ FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
+ inline_forbidden_p_1, fndecl);
+ if (ret)
+ goto egress;
+ }
+
+egress:
input_location = saved_loc;
return ret;
}
@@ -1166,7 +1496,7 @@ inlinable_function_p (tree fn)
in C++ it may result in template instantiation.)
If the function is not inlinable for language-specific reasons,
it is left up to the langhook to explain why. */
- inlinable = !(*lang_hooks.tree_inlining.cannot_inline_tree_fn) (&fn);
+ inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
/* If we don't have the function body available, we can't inline it.
However, this should not be recorded since we also get here for
@@ -1192,12 +1522,6 @@ inlinable_function_p (tree fn)
else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
inlinable = false;
-#ifdef INLINER_FOR_JAVA
- /* Synchronized methods can't be inlined. This is a bug. */
- else if (METHOD_SYNCHRONIZED (fn))
- inlinable = false;
-#endif /* INLINER_FOR_JAVA */
-
else if (inline_forbidden_p (fn))
{
/* See if we should warn about uninlinable functions. Previously,
@@ -1213,11 +1537,10 @@ inlinable_function_p (tree fn)
&& DECL_DECLARED_INLINE_P (fn)
&& !DECL_IN_SYSTEM_HEADER (fn));
- if (lookup_attribute ("always_inline",
- DECL_ATTRIBUTES (fn)))
- sorry (inline_forbidden_reason, fn, fn);
+ if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
+ sorry (inline_forbidden_reason, fn);
else if (do_warning)
- warning (inline_forbidden_reason, fn, fn);
+ warning (OPT_Winline, inline_forbidden_reason, fn);
inlinable = false;
}
@@ -1228,91 +1551,409 @@ inlinable_function_p (tree fn)
return inlinable;
}
-/* If *TP is a CALL_EXPR, replace it with its inline expansion. */
+/* Estimate the cost of a memory move. Use machine dependent
+ word size and take possible memcpy call into account. */
+
+int
+estimate_move_cost (tree type)
+{
+ HOST_WIDE_INT size;
+
+ size = int_size_in_bytes (type);
+
+ if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
+ /* Cost of a memcpy call, 3 arguments and the call. */
+ return 4;
+ else
+ return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
+}
+
+/* Used by estimate_num_insns. Estimate number of instructions seen
+ by given statement. */
static tree
-expand_call_inline (tree *tp, int *walk_subtrees, void *data)
+estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
{
- inline_data *id;
- tree t;
- tree expr;
- tree stmt;
-#ifndef INLINER_FOR_JAVA
- tree chain;
- tree scope_stmt;
- tree use_stmt;
-#else /* INLINER_FOR_JAVA */
- tree retvar;
-#endif /* INLINER_FOR_JAVA */
- tree fn;
- tree arg_inits;
- tree *inlined_body;
- splay_tree st;
- tree args;
- tree return_slot_addr;
- const char *reason;
+ int *count = (int *) data;
+ tree x = *tp;
- /* See what we've got. */
- id = (inline_data *) data;
- t = *tp;
+ if (IS_TYPE_OR_DECL_P (x))
+ {
+ *walk_subtrees = 0;
+ return NULL;
+ }
+ /* Assume that constants and references counts nothing. These should
+ be majorized by amount of operations among them we count later
+ and are common target of CSE and similar optimizations. */
+ else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
+ return NULL;
- /* Recurse, but letting recursive invocations know that we are
- inside the body of a TARGET_EXPR. */
- if (TREE_CODE (*tp) == TARGET_EXPR)
+ switch (TREE_CODE (x))
{
-#ifndef INLINER_FOR_JAVA
- int i, len = first_rtl_op (TARGET_EXPR);
+ /* Containers have no cost. */
+ case TREE_LIST:
+ case TREE_VEC:
+ case BLOCK:
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case OBJ_TYPE_REF:
+ case EXC_PTR_EXPR: /* ??? */
+ case FILTER_EXPR: /* ??? */
+ case COMPOUND_EXPR:
+ case BIND_EXPR:
+ case WITH_CLEANUP_EXPR:
+ case NOP_EXPR:
+ case VIEW_CONVERT_EXPR:
+ case SAVE_EXPR:
+ case ADDR_EXPR:
+ case COMPLEX_EXPR:
+ case RANGE_EXPR:
+ case CASE_LABEL_EXPR:
+ case SSA_NAME:
+ case CATCH_EXPR:
+ case EH_FILTER_EXPR:
+ case STATEMENT_LIST:
+ case ERROR_MARK:
+ case NON_LVALUE_EXPR:
+ case FDESC_EXPR:
+ case VA_ARG_EXPR:
+ case TRY_CATCH_EXPR:
+ case TRY_FINALLY_EXPR:
+ case LABEL_EXPR:
+ case GOTO_EXPR:
+ case RETURN_EXPR:
+ case EXIT_EXPR:
+ case LOOP_EXPR:
+ case PHI_NODE:
+ case WITH_SIZE_EXPR:
+ case OMP_CLAUSE:
+ case OMP_RETURN:
+ case OMP_CONTINUE:
+ break;
- /* We're walking our own subtrees. */
+ /* We don't account constants for now. Assume that the cost is amortized
+ by operations that do use them. We may re-consider this decision once
+ we are able to optimize the tree before estimating its size and break
+ out static initializers. */
+ case IDENTIFIER_NODE:
+ case INTEGER_CST:
+ case REAL_CST:
+ case COMPLEX_CST:
+ case VECTOR_CST:
+ case STRING_CST:
*walk_subtrees = 0;
+ return NULL;
+
+ /* Try to estimate the cost of assignments. We have three cases to
+ deal with:
+ 1) Simple assignments to registers;
+ 2) Stores to things that must live in memory. This includes
+ "normal" stores to scalars, but also assignments of large
+ structures, or constructors of big arrays;
+ 3) TARGET_EXPRs.
+
+ Let us look at the first two cases, assuming we have "a = b + C":
+ <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
+ If "a" is a GIMPLE register, the assignment to it is free on almost
+ any target, because "a" usually ends up in a real register. Hence
+ the only cost of this expression comes from the PLUS_EXPR, and we
+ can ignore the MODIFY_EXPR.
+ If "a" is not a GIMPLE register, the assignment to "a" will most
+ likely be a real store, so the cost of the MODIFY_EXPR is the cost
+ of moving something into "a", which we compute using the function
+ estimate_move_cost.
+
+ The third case deals with TARGET_EXPRs, for which the semantics are
+ that a temporary is assigned, unless the TARGET_EXPR itself is being
+ assigned to something else. In the latter case we do not need the
+ temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
+ MODIFY_EXPR is free. */
+ case INIT_EXPR:
+ case MODIFY_EXPR:
+ /* Is the right and side a TARGET_EXPR? */
+ if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
+ break;
+ /* ... fall through ... */
- /* Actually walk over them. This loop is the body of
- walk_trees, omitting the case where the TARGET_EXPR
- itself is handled. */
- for (i = 0; i < len; ++i)
- {
- if (i == 2)
- ++id->in_target_cleanup_p;
- walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
- id->tree_pruner);
- if (i == 2)
- --id->in_target_cleanup_p;
- }
+ case TARGET_EXPR:
+ x = TREE_OPERAND (x, 0);
+ /* Is this an assignments to a register? */
+ if (is_gimple_reg (x))
+ break;
+ /* Otherwise it's a store, so fall through to compute the move cost. */
- return NULL_TREE;
-#else /* INLINER_FOR_JAVA */
- abort ();
-#endif /* INLINER_FOR_JAVA */
+ case CONSTRUCTOR:
+ *count += estimate_move_cost (TREE_TYPE (x));
+ break;
+
+ /* Assign cost of 1 to usual operations.
+ ??? We may consider mapping RTL costs to this. */
+ case COND_EXPR:
+ case VEC_COND_EXPR:
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+
+ case FIX_TRUNC_EXPR:
+ case FIX_CEIL_EXPR:
+ case FIX_FLOOR_EXPR:
+ case FIX_ROUND_EXPR:
+
+ case NEGATE_EXPR:
+ case FLOAT_EXPR:
+ case MIN_EXPR:
+ case MAX_EXPR:
+ case ABS_EXPR:
+
+ case LSHIFT_EXPR:
+ case RSHIFT_EXPR:
+ case LROTATE_EXPR:
+ case RROTATE_EXPR:
+ case VEC_LSHIFT_EXPR:
+ case VEC_RSHIFT_EXPR:
+
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ case BIT_AND_EXPR:
+ case BIT_NOT_EXPR:
+
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ case TRUTH_NOT_EXPR:
+
+ case LT_EXPR:
+ case LE_EXPR:
+ case GT_EXPR:
+ case GE_EXPR:
+ case EQ_EXPR:
+ case NE_EXPR:
+ case ORDERED_EXPR:
+ case UNORDERED_EXPR:
+
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ case LTGT_EXPR:
+
+ case CONVERT_EXPR:
+
+ case CONJ_EXPR:
+
+ case PREDECREMENT_EXPR:
+ case PREINCREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+
+ case SWITCH_EXPR:
+
+ case ASM_EXPR:
+
+ case REALIGN_LOAD_EXPR:
+
+ case REDUC_MAX_EXPR:
+ case REDUC_MIN_EXPR:
+ case REDUC_PLUS_EXPR:
+ case WIDEN_SUM_EXPR:
+ case DOT_PROD_EXPR:
+
+ case WIDEN_MULT_EXPR:
+
+ case RESX_EXPR:
+ *count += 1;
+ break;
+
+ /* Few special cases of expensive operations. This is useful
+ to avoid inlining on functions having too many of these. */
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+ case FLOOR_DIV_EXPR:
+ case ROUND_DIV_EXPR:
+ case EXACT_DIV_EXPR:
+ case TRUNC_MOD_EXPR:
+ case CEIL_MOD_EXPR:
+ case FLOOR_MOD_EXPR:
+ case ROUND_MOD_EXPR:
+ case RDIV_EXPR:
+ *count += 10;
+ break;
+ case CALL_EXPR:
+ {
+ tree decl = get_callee_fndecl (x);
+ tree arg;
+
+ if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (decl))
+ {
+ case BUILT_IN_CONSTANT_P:
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ case BUILT_IN_EXPECT:
+ return NULL_TREE;
+ default:
+ break;
+ }
+
+ /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
+ that does use function declaration to figure out the arguments. */
+ if (!decl)
+ {
+ for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
+ *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
+ }
+ else
+ {
+ for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
+ *count += estimate_move_cost (TREE_TYPE (arg));
+ }
+
+ *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
+ break;
+ }
+
+ case OMP_PARALLEL:
+ case OMP_FOR:
+ case OMP_SECTIONS:
+ case OMP_SINGLE:
+ case OMP_SECTION:
+ case OMP_MASTER:
+ case OMP_ORDERED:
+ case OMP_CRITICAL:
+ case OMP_ATOMIC:
+ /* OpenMP directives are generally very expensive. */
+ *count += 40;
+ break;
+
+ default:
+ gcc_unreachable ();
}
- else if (TREE_CODE (t) == EXPR_WITH_FILE_LOCATION)
- {
- /* We're walking the subtree directly. */
- *walk_subtrees = 0;
- /* Update the source position. */
- push_srcloc (EXPR_WFL_FILENAME (t), EXPR_WFL_LINENO (t));
- walk_tree (&EXPR_WFL_NODE (t), expand_call_inline, data,
- id->tree_pruner);
- /* Restore the original source position. */
- pop_srcloc ();
+ return NULL;
+}
- return NULL_TREE;
+/* Estimate number of instructions that will be created by expanding EXPR. */
+
+int
+estimate_num_insns (tree expr)
+{
+ int num = 0;
+ struct pointer_set_t *visited_nodes;
+ basic_block bb;
+ block_stmt_iterator bsi;
+ struct function *my_function;
+
+ /* If we're given an entire function, walk the CFG. */
+ if (TREE_CODE (expr) == FUNCTION_DECL)
+ {
+ my_function = DECL_STRUCT_FUNCTION (expr);
+ gcc_assert (my_function && my_function->cfg);
+ visited_nodes = pointer_set_create ();
+ FOR_EACH_BB_FN (bb, my_function)
+ {
+ for (bsi = bsi_start (bb);
+ !bsi_end_p (bsi);
+ bsi_next (&bsi))
+ {
+ walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
+ &num, visited_nodes);
+ }
+ }
+ pointer_set_destroy (visited_nodes);
}
+ else
+ walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
- if (TYPE_P (t))
- /* Because types were not copied in copy_body, CALL_EXPRs beneath
- them should not be expanded. This can happen if the type is a
- dynamic array type, for example. */
- *walk_subtrees = 0;
+ return num;
+}
+
+typedef struct function *function_p;
+
+DEF_VEC_P(function_p);
+DEF_VEC_ALLOC_P(function_p,heap);
+
+/* Initialized with NOGC, making this poisonous to the garbage collector. */
+static VEC(function_p,heap) *cfun_stack;
+
+void
+push_cfun (struct function *new_cfun)
+{
+ VEC_safe_push (function_p, heap, cfun_stack, cfun);
+ cfun = new_cfun;
+}
+
+void
+pop_cfun (void)
+{
+ cfun = VEC_pop (function_p, cfun_stack);
+}
+
+/* Install new lexical TREE_BLOCK underneath 'current_block'. */
+static void
+add_lexical_block (tree current_block, tree new_block)
+{
+ tree *blk_p;
+
+ /* Walk to the last sub-block. */
+ for (blk_p = &BLOCK_SUBBLOCKS (current_block);
+ *blk_p;
+ blk_p = &TREE_CHAIN (*blk_p))
+ ;
+ *blk_p = new_block;
+ BLOCK_SUPERCONTEXT (new_block) = current_block;
+}
+
+/* If *TP is a CALL_EXPR, replace it with its inline expansion. */
+
+static bool
+expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
+{
+ copy_body_data *id;
+ tree t;
+ tree use_retvar;
+ tree fn;
+ splay_tree st;
+ tree args;
+ tree return_slot_addr;
+ tree modify_dest;
+ location_t saved_location;
+ struct cgraph_edge *cg_edge;
+ const char *reason;
+ basic_block return_block;
+ edge e;
+ block_stmt_iterator bsi, stmt_bsi;
+ bool successfully_inlined = FALSE;
+ bool purge_dead_abnormal_edges;
+ tree t_step;
+ tree var;
+
+ /* See what we've got. */
+ id = (copy_body_data *) data;
+ t = *tp;
+
+ /* Set input_location here so we get the right instantiation context
+ if we call instantiate_decl from inlinable_function_p. */
+ saved_location = input_location;
+ if (EXPR_HAS_LOCATION (t))
+ input_location = EXPR_LOCATION (t);
/* From here on, we're only interested in CALL_EXPRs. */
if (TREE_CODE (t) != CALL_EXPR)
- return NULL_TREE;
+ goto egress;
/* First, see if we can figure out what function is being called.
If we cannot, then there is no hope of inlining the function. */
fn = get_callee_fndecl (t);
if (!fn)
- return NULL_TREE;
+ goto egress;
/* Turn forward declarations into real ones. */
fn = cgraph_node (fn)->decl;
@@ -1329,57 +1970,102 @@ expand_call_inline (tree *tp, int *walk_subtrees, void *data)
&& DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
fn = DECL_ABSTRACT_ORIGIN (fn);
+ /* Objective C and fortran still calls tree_rest_of_compilation directly.
+ Kill this check once this is fixed. */
+ if (!id->dst_node->analyzed)
+ goto egress;
+
+ cg_edge = cgraph_edge (id->dst_node, stmt);
+
+ /* Constant propagation on argument done during previous inlining
+ may create new direct call. Produce an edge for it. */
+ if (!cg_edge)
+ {
+ struct cgraph_node *dest = cgraph_node (fn);
+
+ /* We have missing edge in the callgraph. This can happen in one case
+ where previous inlining turned indirect call into direct call by
+ constant propagating arguments. In all other cases we hit a bug
+ (incorrect node sharing is most common reason for missing edges. */
+ gcc_assert (dest->needed || !flag_unit_at_a_time);
+ cgraph_create_edge (id->dst_node, dest, stmt,
+ bb->count, bb->loop_depth)->inline_failed
+ = N_("originally indirect function call not considered for inlining");
+ goto egress;
+ }
+
/* Don't try to inline functions that are not well-suited to
inlining. */
- if (!cgraph_inline_p (id->current_decl, fn, &reason))
+ if (!cgraph_inline_p (cg_edge, &reason))
{
- if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
+ if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
+ /* Avoid warnings during early inline pass. */
+ && (!flag_unit_at_a_time || cgraph_global_info_ready))
{
- sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
+ sorry ("inlining failed in call to %q+F: %s", fn, reason);
sorry ("called from here");
}
else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
&& !DECL_IN_SYSTEM_HEADER (fn)
&& strlen (reason)
- && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
+ && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
+ /* Avoid warnings during early inline pass. */
+ && (!flag_unit_at_a_time || cgraph_global_info_ready))
{
- warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
- warning ("called from here");
+ warning (OPT_Winline, "inlining failed in call to %q+F: %s",
+ fn, reason);
+ warning (OPT_Winline, "called from here");
}
- return NULL_TREE;
+ goto egress;
+ }
+ fn = cg_edge->callee->decl;
+
+#ifdef ENABLE_CHECKING
+ if (cg_edge->callee->decl != id->dst_node->decl)
+ verify_cgraph_node (cg_edge->callee);
+#endif
+
+ /* We will be inlining this callee. */
+ id->eh_region = lookup_stmt_eh_region (stmt);
+
+ /* Split the block holding the CALL_EXPR. */
+ e = split_block (bb, stmt);
+ bb = e->src;
+ return_block = e->dest;
+ remove_edge (e);
+
+ /* split_block splits after the statement; work around this by
+ moving the call into the second block manually. Not pretty,
+ but seems easier than doing the CFG manipulation by hand
+ when the CALL_EXPR is in the last statement of BB. */
+ stmt_bsi = bsi_last (bb);
+ bsi_remove (&stmt_bsi, false);
+
+ /* If the CALL_EXPR was in the last statement of BB, it may have
+ been the source of abnormal edges. In this case, schedule
+ the removal of dead abnormal edges. */
+ bsi = bsi_start (return_block);
+ if (bsi_end_p (bsi))
+ {
+ bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+ purge_dead_abnormal_edges = true;
+ }
+ else
+ {
+ bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+ purge_dead_abnormal_edges = false;
}
- if (! (*lang_hooks.tree_inlining.start_inlining) (fn))
- return NULL_TREE;
-
- /* Set the current filename and line number to the function we are
- inlining so that when we create new _STMT nodes here they get
- line numbers corresponding to the function we are calling. We
- wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
- because individual statements don't record the filename. */
- push_srcloc (DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn));
-
-#ifndef INLINER_FOR_JAVA
- /* Build a statement-expression containing code to initialize the
- arguments, the actual inline expansion of the body, and a label
- for the return statements within the function to jump to. The
- type of the statement expression is the return type of the
- function call. */
- expr = build1 (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), make_node (COMPOUND_STMT));
- /* There is no scope associated with the statement-expression. */
- STMT_EXPR_NO_SCOPE (expr) = 1;
- if (lookup_attribute ("warn_unused_result",
- TYPE_ATTRIBUTES (TREE_TYPE (fn))))
- STMT_EXPR_WARN_UNUSED_RESULT (expr) = 1;
- stmt = STMT_EXPR_STMT (expr);
-#else /* INLINER_FOR_JAVA */
+ stmt_bsi = bsi_start (return_block);
+
/* Build a block containing code to initialize the arguments, the
actual inline expansion of the body, and a label for the return
statements within the function to jump to. The type of the
statement expression is the return type of the function call. */
- stmt = NULL;
- expr = build (BLOCK, TREE_TYPE (TREE_TYPE (fn)), stmt);
-#endif /* INLINER_FOR_JAVA */
+ id->block = make_node (BLOCK);
+ BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
+ BLOCK_SOURCE_LOCATION (id->block) = input_location;
+ add_lexical_block (TREE_BLOCK (stmt), id->block);
/* Local declarations will be replaced by their equivalents in this
map. */
@@ -1389,227 +2075,138 @@ expand_call_inline (tree *tp, int *walk_subtrees, void *data)
/* Initialize the parameters. */
args = TREE_OPERAND (t, 1);
- return_slot_addr = NULL_TREE;
- if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
- {
- return_slot_addr = TREE_VALUE (args);
- args = TREE_CHAIN (args);
- }
-
-#ifndef INLINER_FOR_JAVA
- arg_inits = initialize_inlined_parameters (id, args, fn);
- /* Expand any inlined calls in the initializers. Do this before we
- push FN on the stack of functions we are inlining; we want to
- inline calls to FN that appear in the initializers for the
- parameters. */
- expand_calls_inline (&arg_inits, id);
- /* And add them to the tree. */
- COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), arg_inits);
-#else /* INLINER_FOR_JAVA */
- arg_inits = initialize_inlined_parameters (id, args, fn, expr);
- if (arg_inits)
- {
- /* Expand any inlined calls in the initializers. Do this before we
- push FN on the stack of functions we are inlining; we want to
- inline calls to FN that appear in the initializers for the
- parameters. */
- expand_calls_inline (&arg_inits, id);
-
- /* And add them to the tree. */
- BLOCK_EXPR_BODY (expr) = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
- TREE_TYPE (arg_inits),
- arg_inits);
- }
-#endif /* INLINER_FOR_JAVA */
- /* Record the function we are about to inline so that we can avoid
- recursing into it. */
- VARRAY_PUSH_TREE (id->fns, fn);
+ /* Record the function we are about to inline. */
+ id->src_fn = fn;
+ id->src_node = cg_edge->callee;
- /* Record the function we are about to inline if optimize_function
- has not been called on it yet and we don't have it in the list. */
- if (! DECL_INLINED_FNS (fn))
- {
- int i;
+ initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
- for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
- if (VARRAY_TREE (id->inlined_fns, i) == fn)
- break;
- if (i < 0)
- VARRAY_PUSH_TREE (id->inlined_fns, fn);
- }
+ if (DECL_INITIAL (fn))
+ add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
/* Return statements in the function body will be replaced by jumps
to the RET_LABEL. */
- id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
- DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
- if (! DECL_INITIAL (fn)
- || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
- abort ();
-
-#ifndef INLINER_FOR_JAVA
- /* Create a block to put the parameters in. We have to do this
- after the parameters have been remapped because remapping
- parameters is different from remapping ordinary variables. */
- scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
- SCOPE_BEGIN_P (scope_stmt) = 1;
- SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
- remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
- TREE_CHAIN (scope_stmt) = COMPOUND_BODY (stmt);
- COMPOUND_BODY (stmt) = scope_stmt;
-
- /* Tell the debugging backends that this block represents the
- outermost scope of the inlined function. */
- if (SCOPE_STMT_BLOCK (scope_stmt))
- BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
+ gcc_assert (DECL_INITIAL (fn));
+ gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
- /* Declare the return variable for the function. */
- COMPOUND_BODY (stmt)
- = chainon (COMPOUND_BODY (stmt),
- declare_return_variable (id, return_slot_addr, &use_stmt));
-#else /* INLINER_FOR_JAVA */
- {
- /* Declare the return variable for the function. */
- tree decl = declare_return_variable (id, return_slot_addr, &retvar);
- if (retvar)
- {
- tree *next = &BLOCK_VARS (expr);
- while (*next)
- next = &TREE_CHAIN (*next);
- *next = decl;
- }
- }
-#endif /* INLINER_FOR_JAVA */
-
- /* After we've initialized the parameters, we insert the body of the
- function itself. */
-#ifndef INLINER_FOR_JAVA
- inlined_body = &COMPOUND_BODY (stmt);
- while (*inlined_body)
- inlined_body = &TREE_CHAIN (*inlined_body);
- *inlined_body = copy_body (id);
-#else /* INLINER_FOR_JAVA */
- {
- tree new_body;
- java_inlining_map_static_initializers (fn, id->decl_map);
- new_body = copy_body (id);
- TREE_TYPE (new_body) = TREE_TYPE (TREE_TYPE (fn));
- BLOCK_EXPR_BODY (expr)
- = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
- TREE_TYPE (new_body), new_body);
- inlined_body = &BLOCK_EXPR_BODY (expr);
- }
-#endif /* INLINER_FOR_JAVA */
-
- /* After the body of the function comes the RET_LABEL. This must come
- before we evaluate the returned value below, because that evaluation
- may cause RTL to be generated. */
-#ifndef INLINER_FOR_JAVA
- COMPOUND_BODY (stmt)
- = chainon (COMPOUND_BODY (stmt),
- build_stmt (LABEL_STMT, id->ret_label));
-#else /* INLINER_FOR_JAVA */
- {
- tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
- BLOCK_EXPR_BODY (expr)
- = add_stmt_to_compound (BLOCK_EXPR_BODY (expr), void_type_node, label);
- TREE_SIDE_EFFECTS (label) = TREE_SIDE_EFFECTS (t);
- }
-#endif /* INLINER_FOR_JAVA */
-
- /* Finally, mention the returned value so that the value of the
- statement-expression is the returned value of the function. */
-#ifndef INLINER_FOR_JAVA
- COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), use_stmt);
-
- /* Close the block for the parameters. */
- scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
- SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
- remap_block (scope_stmt, NULL_TREE, id);
- COMPOUND_BODY (stmt)
- = chainon (COMPOUND_BODY (stmt), scope_stmt);
-#else /* INLINER_FOR_JAVA */
- if (retvar)
+ /* Find the lhs to which the result of this call is assigned. */
+ return_slot_addr = NULL;
+ if (TREE_CODE (stmt) == MODIFY_EXPR)
{
- /* Mention the retvar. If the return type of the function was
- promoted, convert it back to the expected type. */
- if (TREE_TYPE (TREE_TYPE (fn)) != TREE_TYPE (retvar))
- retvar = build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)), retvar);
- BLOCK_EXPR_BODY (expr)
- = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
- TREE_TYPE (retvar), retvar);
+ modify_dest = TREE_OPERAND (stmt, 0);
+
+ /* The function which we are inlining might not return a value,
+ in which case we should issue a warning that the function
+ does not return a value. In that case the optimizers will
+ see that the variable to which the value is assigned was not
+ initialized. We do not want to issue a warning about that
+ uninitialized variable. */
+ if (DECL_P (modify_dest))
+ TREE_NO_WARNING (modify_dest) = 1;
+ if (CALL_EXPR_RETURN_SLOT_OPT (t))
+ {
+ return_slot_addr = build_fold_addr_expr (modify_dest);
+ STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
+ modify_dest = NULL;
+ }
}
+ else
+ modify_dest = NULL;
- java_inlining_merge_static_initializers (fn, id->decl_map);
-#endif /* INLINER_FOR_JAVA */
+ /* Declare the return variable for the function. */
+ declare_return_variable (id, return_slot_addr,
+ modify_dest, &use_retvar);
+
+ /* This is it. Duplicate the callee body. Assume callee is
+ pre-gimplified. Note that we must not alter the caller
+ function in any way before this point, as this CALL_EXPR may be
+ a self-referential call; if we're calling ourselves, we need to
+ duplicate our body before altering anything. */
+ copy_body (id, bb->count, bb->frequency, bb, return_block);
+
+ /* Add local vars in this inlined callee to caller. */
+ t_step = id->src_cfun->unexpanded_var_list;
+ for (; t_step; t_step = TREE_CHAIN (t_step))
+ {
+ var = TREE_VALUE (t_step);
+ if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
+ cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
+ cfun->unexpanded_var_list);
+ else
+ cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
+ cfun->unexpanded_var_list);
+ }
/* Clean up. */
splay_tree_delete (id->decl_map);
id->decl_map = st;
- /* Although, from the semantic viewpoint, the new expression has
- side-effects only if the old one did, it is not possible, from
- the technical viewpoint, to evaluate the body of a function
- multiple times without serious havoc. */
- TREE_SIDE_EFFECTS (expr) = 1;
-
- /* Replace the call by the inlined body. Wrap it in an
- EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
- pointing to the right place. */
-#ifndef INLINER_FOR_JAVA
- chain = TREE_CHAIN (*tp);
-#endif /* INLINER_FOR_JAVA */
- *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
- /*col=*/0);
- EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
-#ifndef INLINER_FOR_JAVA
- TREE_CHAIN (*tp) = chain;
-#endif /* not INLINER_FOR_JAVA */
- pop_srcloc ();
+ /* If the inlined function returns a result that we care about,
+ clobber the CALL_EXPR with a reference to the return variable. */
+ if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
+ {
+ *tp = use_retvar;
+ maybe_clean_or_replace_eh_stmt (stmt, stmt);
+ }
+ else
+ /* We're modifying a TSI owned by gimple_expand_calls_inline();
+ tsi_delink() will leave the iterator in a sane state. */
+ bsi_remove (&stmt_bsi, true);
+
+ if (purge_dead_abnormal_edges)
+ tree_purge_dead_abnormal_call_edges (return_block);
/* If the value of the new expression is ignored, that's OK. We
don't warn about this for CALL_EXPRs, so we shouldn't warn about
the equivalent inlined version either. */
TREE_USED (*tp) = 1;
- /* Update callgraph if needed. */
- if (id->decl)
- {
- cgraph_remove_call (id->decl, fn);
- cgraph_create_edges (id->decl, *inlined_body);
- }
-
- /* Recurse into the body of the just inlined function. */
- {
- tree old_decl = id->current_decl;
- id->current_decl = fn;
- expand_calls_inline (inlined_body, id);
- id->current_decl = old_decl;
- }
- VARRAY_POP (id->fns);
+ /* Output the inlining info for this abstract function, since it has been
+ inlined. If we don't do this now, we can lose the information about the
+ variables in the function when the blocks get blown away as soon as we
+ remove the cgraph node. */
+ (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
- /* Don't walk into subtrees. We've already handled them above. */
- *walk_subtrees = 0;
+ /* Update callgraph if needed. */
+ cgraph_remove_node (cg_edge->callee);
- (*lang_hooks.tree_inlining.end_inlining) (fn);
+ id->block = NULL_TREE;
+ successfully_inlined = TRUE;
- /* Keep iterating. */
- return NULL_TREE;
+ egress:
+ input_location = saved_location;
+ return successfully_inlined;
}
-/* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
- expansions as appropriate. */
-static void
-expand_calls_inline (tree *tp, inline_data *id)
+/* Expand call statements reachable from STMT_P.
+ We can only have CALL_EXPRs as the "toplevel" tree code or nested
+ in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
+ unfortunately not use that function here because we need a pointer
+ to the CALL_EXPR, not the tree itself. */
+
+static bool
+gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
{
- /* Search through *TP, replacing all calls to inline functions by
- appropriate equivalents. Use walk_tree in no-duplicates mode
- to avoid exponential time complexity. (We can't just use
- walk_tree_without_duplicates, because of the special TARGET_EXPR
- handling in expand_calls. The hash table is set up in
- optimize_function. */
- walk_tree (tp, expand_call_inline, id, id->tree_pruner);
+ block_stmt_iterator bsi;
+
+ /* Register specific tree functions. */
+ tree_register_cfg_hooks ();
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree *expr_p = bsi_stmt_ptr (bsi);
+ tree stmt = *expr_p;
+
+ if (TREE_CODE (*expr_p) == MODIFY_EXPR)
+ expr_p = &TREE_OPERAND (*expr_p, 1);
+ if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
+ expr_p = &TREE_OPERAND (*expr_p, 0);
+ if (TREE_CODE (*expr_p) == CALL_EXPR)
+ if (expand_call_inline (bb, stmt, expr_p, id))
+ return true;
+ }
+ return false;
}
/* Expand calls to inline functions in the body of FN. */
@@ -1617,9 +2214,9 @@ expand_calls_inline (tree *tp, inline_data *id)
void
optimize_inline_calls (tree fn)
{
- inline_data id;
+ copy_body_data id;
tree prev_fn;
-
+ basic_block bb;
/* There is no point in performing inlining if errors have already
occurred -- and we might crash if we try to inline invalid
code. */
@@ -1629,435 +2226,684 @@ optimize_inline_calls (tree fn)
/* Clear out ID. */
memset (&id, 0, sizeof (id));
- id.decl = fn;
- id.current_decl = fn;
- /* Don't allow recursion into FN. */
- VARRAY_TREE_INIT (id.fns, 32, "fns");
- VARRAY_PUSH_TREE (id.fns, fn);
+ id.src_node = id.dst_node = cgraph_node (fn);
+ id.dst_fn = fn;
/* Or any functions that aren't finished yet. */
prev_fn = NULL_TREE;
if (current_function_decl)
{
- VARRAY_PUSH_TREE (id.fns, current_function_decl);
+ id.dst_fn = current_function_decl;
prev_fn = current_function_decl;
}
- prev_fn = ((*lang_hooks.tree_inlining.add_pending_fn_decls)
- (&id.fns, prev_fn));
-
- /* Create the list of functions this call will inline. */
- VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
-
- /* Keep track of the low-water mark, i.e., the point where the first
- real inlining is represented in ID.FNS. */
- id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
-
- /* Replace all calls to inline functions with the bodies of those
- functions. */
- id.tree_pruner = htab_create (37, htab_hash_pointer,
- htab_eq_pointer, NULL);
- expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
-
- /* Clean up. */
- htab_delete (id.tree_pruner);
- if (DECL_LANG_SPECIFIC (fn))
+ id.copy_decl = copy_decl_maybe_to_var;
+ id.transform_call_graph_edges = CB_CGE_DUPLICATE;
+ id.transform_new_cfg = false;
+ id.transform_return_to_modify = true;
+ id.transform_lang_insert_block = false;
+
+ push_gimplify_context ();
+
+ /* Reach the trees by walking over the CFG, and note the
+ enclosing basic-blocks in the call edges. */
+ /* We walk the blocks going forward, because inlined function bodies
+ will split id->current_basic_block, and the new blocks will
+ follow it; we'll trudge through them, processing their CALL_EXPRs
+ along the way. */
+ FOR_EACH_BB (bb)
+ gimple_expand_calls_inline (bb, &id);
+
+ pop_gimplify_context (NULL);
+ /* Renumber the (code) basic_blocks consecutively. */
+ compact_blocks ();
+ /* Renumber the lexical scoping (non-code) blocks consecutively. */
+ number_blocks (fn);
+
+#ifdef ENABLE_CHECKING
{
- tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
+ struct cgraph_edge *e;
+
+ verify_cgraph_node (id.dst_node);
- if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
- memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
- VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
- DECL_INLINED_FNS (fn) = ifn;
+ /* Double check that we inlined everything we are supposed to inline. */
+ for (e = id.dst_node->callees; e; e = e->next_callee)
+ gcc_assert (e->inline_failed);
}
+#endif
+ /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
+ as inlining loops might increase the maximum. */
+ if (ENTRY_BLOCK_PTR->count)
+ counts_to_freqs ();
+ fold_cond_expr_cond ();
}
-/* FN is a function that has a complete body, and CLONE is a function
- whose body is to be set to a copy of FN, mapping argument
- declarations according to the ARG_MAP splay_tree. */
+/* FN is a function that has a complete body, and CLONE is a function whose
+ body is to be set to a copy of FN, mapping argument declarations according
+ to the ARG_MAP splay_tree. */
void
clone_body (tree clone, tree fn, void *arg_map)
{
- inline_data id;
+ copy_body_data id;
- /* Clone the body, as if we were making an inline call. But, remap
- the parameters in the callee to the parameters of caller. If
- there's an in-charge parameter, map it to an appropriate
- constant. */
+ /* Clone the body, as if we were making an inline call. But, remap the
+ parameters in the callee to the parameters of caller. */
memset (&id, 0, sizeof (id));
- VARRAY_TREE_INIT (id.fns, 2, "fns");
- VARRAY_PUSH_TREE (id.fns, clone);
- VARRAY_PUSH_TREE (id.fns, fn);
+ id.src_fn = fn;
+ id.dst_fn = clone;
+ id.src_cfun = DECL_STRUCT_FUNCTION (fn);
id.decl_map = (splay_tree)arg_map;
- /* Cloning is treated slightly differently from inlining. Set
- CLONING_P so that it's clear which operation we're performing. */
- id.cloning_p = true;
+ id.copy_decl = copy_decl_no_change;
+ id.transform_call_graph_edges = CB_CGE_DUPLICATE;
+ id.transform_new_cfg = true;
+ id.transform_return_to_modify = false;
+ id.transform_lang_insert_block = true;
+
+ /* We're not inside any EH region. */
+ id.eh_region = -1;
/* Actually copy the body. */
- TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
+ append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
}
-/* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
- FUNC is called with the DATA and the address of each sub-tree. If
- FUNC returns a non-NULL value, the traversal is aborted, and the
- value returned by FUNC is returned. If HTAB is non-NULL it is used
- to record the nodes visited, and to avoid visiting a node more than
- once. */
+/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
tree
-walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
+copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
- htab_t htab = (htab_t) htab_;
- enum tree_code code;
- int walk_subtrees;
- tree result;
-
-#define WALK_SUBTREE(NODE) \
- do \
- { \
- result = walk_tree (&(NODE), func, data, htab); \
- if (result) \
- return result; \
- } \
- while (0)
-
-#define WALK_SUBTREE_TAIL(NODE) \
- do \
- { \
- tp = & (NODE); \
- goto tail_recurse; \
- } \
- while (0)
-
- tail_recurse:
- /* Skip empty subtrees. */
- if (!*tp)
- return NULL_TREE;
-
- if (htab)
+ enum tree_code code = TREE_CODE (*tp);
+
+ /* We make copies of most nodes. */
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ || code == TREE_LIST
+ || code == TREE_VEC
+ || code == TYPE_DECL
+ || code == OMP_CLAUSE)
{
- void **slot;
-
- /* Don't walk the same tree twice, if the user has requested
- that we avoid doing so. */
- slot = htab_find_slot (htab, *tp, INSERT);
- if (*slot)
- return NULL_TREE;
- *slot = *tp;
- }
+ /* Because the chain gets clobbered when we make a copy, we save it
+ here. */
+ tree chain = TREE_CHAIN (*tp);
+ tree new;
- /* Call the function. */
- walk_subtrees = 1;
- result = (*func) (tp, &walk_subtrees, data);
+ /* Copy the node. */
+ new = copy_node (*tp);
- /* If we found something, return it. */
- if (result)
- return result;
+ /* Propagate mudflap marked-ness. */
+ if (flag_mudflap && mf_marked_p (*tp))
+ mf_mark (new);
- code = TREE_CODE (*tp);
+ *tp = new;
-#ifndef INLINER_FOR_JAVA
- /* Even if we didn't, FUNC may have decided that there was nothing
- interesting below this point in the tree. */
- if (!walk_subtrees)
+ /* Now, restore the chain, if appropriate. That will cause
+ walk_tree to walk into the chain as well. */
+ if (code == PARM_DECL
+ || code == TREE_LIST
+ || code == OMP_CLAUSE)
+ TREE_CHAIN (*tp) = chain;
+
+ /* For now, we don't update BLOCKs when we make copies. So, we
+ have to nullify all BIND_EXPRs. */
+ if (TREE_CODE (*tp) == BIND_EXPR)
+ BIND_EXPR_BLOCK (*tp) = NULL_TREE;
+ }
+ else if (code == CONSTRUCTOR)
{
- if (STATEMENT_CODE_P (code) || code == TREE_LIST
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
- /* But we still need to check our siblings. */
- WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
- else
- return NULL_TREE;
+ /* CONSTRUCTOR nodes need special handling because
+ we need to duplicate the vector of elements. */
+ tree new;
+
+ new = copy_node (*tp);
+
+ /* Propagate mudflap marked-ness. */
+ if (flag_mudflap && mf_marked_p (*tp))
+ mf_mark (new);
+
+ CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
+ CONSTRUCTOR_ELTS (*tp));
+ *tp = new;
}
+ else if (TREE_CODE_CLASS (code) == tcc_type)
+ *walk_subtrees = 0;
+ else if (TREE_CODE_CLASS (code) == tcc_declaration)
+ *walk_subtrees = 0;
+ else if (TREE_CODE_CLASS (code) == tcc_constant)
+ *walk_subtrees = 0;
+ else
+ gcc_assert (code != STATEMENT_LIST);
+ return NULL_TREE;
+}
- /* Handle common cases up front. */
- if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
-#else /* INLINER_FOR_JAVA */
- if (code != EXIT_BLOCK_EXPR
- && code != SAVE_EXPR
- && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
-#endif /* INLINER_FOR_JAVA */
- {
- int i, len;
-
-#ifndef INLINER_FOR_JAVA
- /* Set lineno here so we get the right instantiation context
- if we call instantiate_decl from inlinable_function_p. */
- if (STATEMENT_CODE_P (code) && !STMT_LINENO_FOR_FN_P (*tp))
- input_line = STMT_LINENO (*tp);
-#endif /* not INLINER_FOR_JAVA */
-
- /* Walk over all the sub-trees of this operand. */
- len = first_rtl_op (code);
- /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
- But, we only want to walk once. */
- if (code == TARGET_EXPR
- && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
- --len;
- /* Go through the subtrees. We need to do this in forward order so
- that the scope of a FOR_EXPR is handled properly. */
- for (i = 0; i < len; ++i)
- WALK_SUBTREE (TREE_OPERAND (*tp, i));
-
-#ifndef INLINER_FOR_JAVA
- /* For statements, we also walk the chain so that we cover the
- entire statement tree. */
- if (STATEMENT_CODE_P (code))
- {
- if (code == DECL_STMT
- && DECL_STMT_DECL (*tp)
- && DECL_P (DECL_STMT_DECL (*tp)))
- {
- /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
- into declarations that are just mentioned, rather than
- declared; they don't really belong to this part of the tree.
- And, we can see cycles: the initializer for a declaration can
- refer to the declaration itself. */
- WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
- WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
- WALK_SUBTREE (DECL_SIZE_UNIT (DECL_STMT_DECL (*tp)));
- WALK_SUBTREE (TREE_TYPE (*tp));
- }
+/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
+ information indicating to what new SAVE_EXPR this one should be mapped,
+ use that one. Otherwise, create a new node and enter it in ST. FN is
+ the function into which the copy will be placed. */
- /* This can be tail-recursion optimized if we write it this way. */
- WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
- }
+static void
+remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
+{
+ splay_tree st = (splay_tree) st_;
+ splay_tree_node n;
+ tree t;
-#endif /* not INLINER_FOR_JAVA */
- /* We didn't find what we were looking for. */
- return NULL_TREE;
+ /* See if we already encountered this SAVE_EXPR. */
+ n = splay_tree_lookup (st, (splay_tree_key) *tp);
+
+ /* If we didn't already remap this SAVE_EXPR, do so now. */
+ if (!n)
+ {
+ t = copy_node (*tp);
+
+ /* Remember this SAVE_EXPR. */
+ splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
+ /* Make sure we don't remap an already-remapped SAVE_EXPR. */
+ splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
}
- else if (TREE_CODE_CLASS (code) == 'd')
+ else
{
- WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
+ /* We've already walked into this SAVE_EXPR; don't do it again. */
+ *walk_subtrees = 0;
+ t = (tree) n->value;
}
- else if (TREE_CODE_CLASS (code) == 't')
+
+ /* Replace this SAVE_EXPR with the copy. */
+ *tp = t;
+}
+
+/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
+ copies the declaration and enters it in the splay_tree in DATA (which is
+ really an `copy_body_data *'). */
+
+static tree
+mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data)
+{
+ copy_body_data *id = (copy_body_data *) data;
+
+ /* Don't walk into types. */
+ if (TYPE_P (*tp))
+ *walk_subtrees = 0;
+
+ else if (TREE_CODE (*tp) == LABEL_EXPR)
{
- WALK_SUBTREE (TYPE_SIZE (*tp));
- WALK_SUBTREE (TYPE_SIZE_UNIT (*tp));
- /* Also examine various special fields, below. */
+ tree decl = TREE_OPERAND (*tp, 0);
+
+ /* Copy the decl and remember the copy. */
+ insert_decl_map (id, decl, id->copy_decl (decl, id));
}
- result = (*lang_hooks.tree_inlining.walk_subtrees) (tp, &walk_subtrees, func,
- data, htab);
- if (result || ! walk_subtrees)
- return result;
+ return NULL_TREE;
+}
+
+/* Perform any modifications to EXPR required when it is unsaved. Does
+ not recurse into EXPR's subtrees. */
- /* Not one of the easy cases. We must explicitly go through the
- children. */
- switch (code)
+static void
+unsave_expr_1 (tree expr)
+{
+ switch (TREE_CODE (expr))
{
- case ERROR_MARK:
- case IDENTIFIER_NODE:
- case INTEGER_CST:
- case REAL_CST:
- case VECTOR_CST:
- case STRING_CST:
- case REAL_TYPE:
- case COMPLEX_TYPE:
- case VECTOR_TYPE:
- case VOID_TYPE:
- case BOOLEAN_TYPE:
- case UNION_TYPE:
- case ENUMERAL_TYPE:
- case BLOCK:
- case RECORD_TYPE:
- case CHAR_TYPE:
- case PLACEHOLDER_EXPR:
- /* None of these have subtrees other than those already walked
- above. */
- break;
+ case TARGET_EXPR:
+ /* Don't mess with a TARGET_EXPR that hasn't been expanded.
+ It's OK for this to happen if it was part of a subtree that
+ isn't immediately expanded, such as operand 2 of another
+ TARGET_EXPR. */
+ if (TREE_OPERAND (expr, 1))
+ break;
- case POINTER_TYPE:
- case REFERENCE_TYPE:
- WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
+ TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
+ TREE_OPERAND (expr, 3) = NULL_TREE;
break;
- case TREE_LIST:
- WALK_SUBTREE (TREE_VALUE (*tp));
- WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
+ default:
break;
+ }
+}
- case TREE_VEC:
- {
- int len = TREE_VEC_LENGTH (*tp);
+/* Called via walk_tree when an expression is unsaved. Using the
+ splay_tree pointed to by ST (which is really a `splay_tree'),
+ remaps all local declarations to appropriate replacements. */
- if (len == 0)
- break;
+static tree
+unsave_r (tree *tp, int *walk_subtrees, void *data)
+{
+ copy_body_data *id = (copy_body_data *) data;
+ splay_tree st = id->decl_map;
+ splay_tree_node n;
- /* Walk all elements but the first. */
- while (--len)
- WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
+ /* Only a local declaration (variable or label). */
+ if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
+ || TREE_CODE (*tp) == LABEL_DECL)
+ {
+ /* Lookup the declaration. */
+ n = splay_tree_lookup (st, (splay_tree_key) *tp);
- /* Now walk the first one as a tail call. */
- WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
- }
+ /* If it's there, remap it. */
+ if (n)
+ *tp = (tree) n->value;
+ }
- case COMPLEX_CST:
- WALK_SUBTREE (TREE_REALPART (*tp));
- WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
+ else if (TREE_CODE (*tp) == STATEMENT_LIST)
+ copy_statement_list (tp);
+ else if (TREE_CODE (*tp) == BIND_EXPR)
+ copy_bind_expr (tp, walk_subtrees, id);
+ else if (TREE_CODE (*tp) == SAVE_EXPR)
+ remap_save_expr (tp, st, walk_subtrees);
+ else
+ {
+ copy_tree_r (tp, walk_subtrees, NULL);
- case CONSTRUCTOR:
- WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
+ /* Do whatever unsaving is required. */
+ unsave_expr_1 (*tp);
+ }
- case METHOD_TYPE:
- WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
- /* Fall through. */
+ /* Keep iterating. */
+ return NULL_TREE;
+}
- case FUNCTION_TYPE:
- WALK_SUBTREE (TREE_TYPE (*tp));
- {
- tree arg = TYPE_ARG_TYPES (*tp);
+/* Copies everything in EXPR and replaces variables, labels
+ and SAVE_EXPRs local to EXPR. */
- /* We never want to walk into default arguments. */
- for (; arg; arg = TREE_CHAIN (arg))
- WALK_SUBTREE (TREE_VALUE (arg));
- }
- break;
+tree
+unsave_expr_now (tree expr)
+{
+ copy_body_data id;
- case ARRAY_TYPE:
- WALK_SUBTREE (TREE_TYPE (*tp));
- WALK_SUBTREE_TAIL (TYPE_DOMAIN (*tp));
+ /* There's nothing to do for NULL_TREE. */
+ if (expr == 0)
+ return expr;
- case INTEGER_TYPE:
- WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
- WALK_SUBTREE_TAIL (TYPE_MAX_VALUE (*tp));
+ /* Set up ID. */
+ memset (&id, 0, sizeof (id));
+ id.src_fn = current_function_decl;
+ id.dst_fn = current_function_decl;
+ id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
- case OFFSET_TYPE:
- WALK_SUBTREE (TREE_TYPE (*tp));
- WALK_SUBTREE_TAIL (TYPE_OFFSET_BASETYPE (*tp));
+ id.copy_decl = copy_decl_no_change;
+ id.transform_call_graph_edges = CB_CGE_DUPLICATE;
+ id.transform_new_cfg = false;
+ id.transform_return_to_modify = false;
+ id.transform_lang_insert_block = false;
-#ifdef INLINER_FOR_JAVA
- case EXIT_BLOCK_EXPR:
- WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
+ /* Walk the tree once to find local labels. */
+ walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
- case SAVE_EXPR:
- WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
-#endif /* INLINER_FOR_JAVA */
+ /* Walk the tree again, copying, remapping, and unsaving. */
+ walk_tree (&expr, unsave_r, &id, NULL);
- default:
- abort ();
+ /* Clean up. */
+ splay_tree_delete (id.decl_map);
+
+ return expr;
+}
+
+/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
+
+static tree
+debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
+{
+ if (*tp == data)
+ return (tree) data;
+ else
+ return NULL;
+}
+
+bool
+debug_find_tree (tree top, tree search)
+{
+ return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
+}
+
+
+/* Declare the variables created by the inliner. Add all the variables in
+ VARS to BIND_EXPR. */
+
+static void
+declare_inline_vars (tree block, tree vars)
+{
+ tree t;
+ for (t = vars; t; t = TREE_CHAIN (t))
+ {
+ DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
+ gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
+ cfun->unexpanded_var_list =
+ tree_cons (NULL_TREE, t,
+ cfun->unexpanded_var_list);
}
- /* We didn't find what we were looking for. */
- return NULL_TREE;
+ if (block)
+ BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
+}
+
-#undef WALK_SUBTREE
-#undef WALK_SUBTREE_TAIL
+/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
+ but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
+ VAR_DECL translation. */
+
+static tree
+copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
+{
+ /* Don't generate debug information for the copy if we wouldn't have
+ generated it for the copy either. */
+ DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
+ DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
+
+ /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
+ declaration inspired this copy. */
+ DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
+
+ /* The new variable/label has no RTL, yet. */
+ if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
+ && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
+ SET_DECL_RTL (copy, NULL_RTX);
+
+ /* These args would always appear unused, if not for this. */
+ TREE_USED (copy) = 1;
+
+ /* Set the context for the new declaration. */
+ if (!DECL_CONTEXT (decl))
+ /* Globals stay global. */
+ ;
+ else if (DECL_CONTEXT (decl) != id->src_fn)
+ /* Things that weren't in the scope of the function we're inlining
+ from aren't in the scope we're inlining to, either. */
+ ;
+ else if (TREE_STATIC (decl))
+ /* Function-scoped static variables should stay in the original
+ function. */
+ ;
+ else
+ /* Ordinary automatic local variables are now in the scope of the
+ new function. */
+ DECL_CONTEXT (copy) = id->dst_fn;
+
+ return copy;
}
-/* Like walk_tree, but does not walk duplicate nodes more than
- once. */
+static tree
+copy_decl_to_var (tree decl, copy_body_data *id)
+{
+ tree copy, type;
-tree
-walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
+ gcc_assert (TREE_CODE (decl) == PARM_DECL
+ || TREE_CODE (decl) == RESULT_DECL);
+
+ type = TREE_TYPE (decl);
+
+ copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
+ TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
+ TREE_READONLY (copy) = TREE_READONLY (decl);
+ TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
+ DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
+
+ return copy_decl_for_dup_finish (id, decl, copy);
+}
+
+/* Like copy_decl_to_var, but create a return slot object instead of a
+ pointer variable for return by invisible reference. */
+
+static tree
+copy_result_decl_to_var (tree decl, copy_body_data *id)
{
- tree result;
- htab_t htab;
+ tree copy, type;
+
+ gcc_assert (TREE_CODE (decl) == PARM_DECL
+ || TREE_CODE (decl) == RESULT_DECL);
+
+ type = TREE_TYPE (decl);
+ if (DECL_BY_REFERENCE (decl))
+ type = TREE_TYPE (type);
+
+ copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
+ TREE_READONLY (copy) = TREE_READONLY (decl);
+ TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
+ if (!DECL_BY_REFERENCE (decl))
+ {
+ TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
+ DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
+ }
- htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
- result = walk_tree (tp, func, data, htab);
- htab_delete (htab);
- return result;
+ return copy_decl_for_dup_finish (id, decl, copy);
}
-/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
-tree
-copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+static tree
+copy_decl_no_change (tree decl, copy_body_data *id)
{
- enum tree_code code = TREE_CODE (*tp);
+ tree copy;
- /* We make copies of most nodes. */
- if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
- || TREE_CODE_CLASS (code) == 'c'
- || code == TREE_LIST
- || code == TREE_VEC
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
+ copy = copy_node (decl);
+
+ /* The COPY is not abstract; it will be generated in DST_FN. */
+ DECL_ABSTRACT (copy) = 0;
+ lang_hooks.dup_lang_specific_decl (copy);
+
+ /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
+ been taken; it's for internal bookkeeping in expand_goto_internal. */
+ if (TREE_CODE (copy) == LABEL_DECL)
{
- /* Because the chain gets clobbered when we make a copy, we save it
- here. */
- tree chain = TREE_CHAIN (*tp);
+ TREE_ADDRESSABLE (copy) = 0;
+ LABEL_DECL_UID (copy) = -1;
+ }
- /* Copy the node. */
- *tp = copy_node (*tp);
+ return copy_decl_for_dup_finish (id, decl, copy);
+}
- /* Now, restore the chain, if appropriate. That will cause
- walk_tree to walk into the chain as well. */
- if (code == PARM_DECL || code == TREE_LIST
-#ifndef INLINER_FOR_JAVA
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp)
- || STATEMENT_CODE_P (code))
- TREE_CHAIN (*tp) = chain;
+static tree
+copy_decl_maybe_to_var (tree decl, copy_body_data *id)
+{
+ if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
+ return copy_decl_to_var (decl, id);
+ else
+ return copy_decl_no_change (decl, id);
+}
- /* For now, we don't update BLOCKs when we make copies. So, we
- have to nullify all scope-statements. */
- if (TREE_CODE (*tp) == SCOPE_STMT)
- SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
-#else /* INLINER_FOR_JAVA */
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
- TREE_CHAIN (*tp) = chain;
-#endif /* INLINER_FOR_JAVA */
+/* Return a copy of the function's argument tree. */
+static tree
+copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
+{
+ tree *arg_copy, *parg;
+
+ arg_copy = &orig_parm;
+ for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
+ {
+ tree new = remap_decl (*parg, id);
+ lang_hooks.dup_lang_specific_decl (new);
+ TREE_CHAIN (new) = TREE_CHAIN (*parg);
+ *parg = new;
}
- else if (TREE_CODE_CLASS (code) == 't')
- *walk_subtrees = 0;
+ return orig_parm;
+}
- return NULL_TREE;
+/* Return a copy of the function's static chain. */
+static tree
+copy_static_chain (tree static_chain, copy_body_data * id)
+{
+ tree *chain_copy, *pvar;
+
+ chain_copy = &static_chain;
+ for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
+ {
+ tree new = remap_decl (*pvar, id);
+ lang_hooks.dup_lang_specific_decl (new);
+ TREE_CHAIN (new) = TREE_CHAIN (*pvar);
+ *pvar = new;
+ }
+ return static_chain;
}
-/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
- information indicating to what new SAVE_EXPR this one should be
- mapped, use that one. Otherwise, create a new node and enter it in
- ST. FN is the function into which the copy will be placed. */
+/* Return true if the function is allowed to be versioned.
+ This is a guard for the versioning functionality. */
+bool
+tree_versionable_function_p (tree fndecl)
+{
+ if (fndecl == NULL_TREE)
+ return false;
+ /* ??? There are cases where a function is
+ uninlinable but can be versioned. */
+ if (!tree_inlinable_function_p (fndecl))
+ return false;
+
+ return true;
+}
+/* Create a copy of a function's tree.
+ OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
+ of the original function and the new copied function
+ respectively. In case we want to replace a DECL
+ tree with another tree while duplicating the function's
+ body, TREE_MAP represents the mapping between these
+ trees. If UPDATE_CLONES is set, the call_stmt fields
+ of edges of clones of the function will be updated. */
void
-remap_save_expr (tree *tp, void *st_, tree fn, int *walk_subtrees)
+tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
+ bool update_clones)
{
- splay_tree st = (splay_tree) st_;
- splay_tree_node n;
+ struct cgraph_node *old_version_node;
+ struct cgraph_node *new_version_node;
+ copy_body_data id;
+ tree p, new_fndecl;
+ unsigned i;
+ struct ipa_replace_map *replace_info;
+ basic_block old_entry_block;
+ tree t_step;
+
+ gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
+ && TREE_CODE (new_decl) == FUNCTION_DECL);
+ DECL_POSSIBLY_INLINED (old_decl) = 1;
+
+ old_version_node = cgraph_node (old_decl);
+ new_version_node = cgraph_node (new_decl);
+
+ allocate_struct_function (new_decl);
+ /* Cfun points to the new allocated function struct at this point. */
+ cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
+
+ DECL_ARTIFICIAL (new_decl) = 1;
+ DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
+
+ /* Generate a new name for the new version. */
+ if (!update_clones)
+ DECL_NAME (new_decl) = create_tmp_var_name (NULL);
+ /* Create a new SYMBOL_REF rtx for the new name. */
+ if (DECL_RTL (old_decl) != NULL)
+ {
+ SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
+ XEXP (DECL_RTL (new_decl), 0) =
+ gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
+ IDENTIFIER_POINTER (DECL_NAME (new_decl)));
+ }
- /* See if we already encountered this SAVE_EXPR. */
- n = splay_tree_lookup (st, (splay_tree_key) *tp);
+ /* Prepare the data structures for the tree copy. */
+ memset (&id, 0, sizeof (id));
+
+ id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+ id.src_fn = old_decl;
+ id.dst_fn = new_decl;
+ id.src_node = old_version_node;
+ id.dst_node = new_version_node;
+ id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
+
+ id.copy_decl = copy_decl_no_change;
+ id.transform_call_graph_edges
+ = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
+ id.transform_new_cfg = true;
+ id.transform_return_to_modify = false;
+ id.transform_lang_insert_block = false;
+
+ current_function_decl = new_decl;
+
+ /* Copy the function's static chain. */
+ p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
+ if (p)
+ DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
+ copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
+ &id);
+ /* Copy the function's arguments. */
+ if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
+ DECL_ARGUMENTS (new_decl) =
+ copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
+
+ /* If there's a tree_map, prepare for substitution. */
+ if (tree_map)
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
+ {
+ replace_info = VARRAY_GENERIC_PTR (tree_map, i);
+ if (replace_info->replace_p)
+ insert_decl_map (&id, replace_info->old_tree,
+ replace_info->new_tree);
+ }
+
+ DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
+
+ /* Renumber the lexical scoping (non-code) blocks consecutively. */
+ number_blocks (id.dst_fn);
+
+ if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
+ /* Add local vars. */
+ for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
+ t_step; t_step = TREE_CHAIN (t_step))
+ {
+ tree var = TREE_VALUE (t_step);
+ if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
+ cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
+ cfun->unexpanded_var_list);
+ else
+ cfun->unexpanded_var_list =
+ tree_cons (NULL_TREE, remap_decl (var, &id),
+ cfun->unexpanded_var_list);
+ }
+
+ /* Copy the Function's body. */
+ old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
+ (DECL_STRUCT_FUNCTION (old_decl));
+ new_fndecl = copy_body (&id,
+ old_entry_block->count,
+ old_entry_block->frequency, NULL, NULL);
+
+ DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
- /* If we didn't already remap this SAVE_EXPR, do so now. */
- if (!n)
- {
- tree t = copy_node (*tp);
+ DECL_STRUCT_FUNCTION (new_decl)->cfg =
+ DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
+ DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
+ DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
+ DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
+ DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
+ DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
- /* The SAVE_EXPR is now part of the function into which we
- are inlining this body. */
- SAVE_EXPR_CONTEXT (t) = fn;
- /* And we haven't evaluated it yet. */
- SAVE_EXPR_RTL (t) = NULL_RTX;
- /* Remember this SAVE_EXPR. */
- n = splay_tree_insert (st,
- (splay_tree_key) *tp,
- (splay_tree_value) t);
- /* Make sure we don't remap an already-remapped SAVE_EXPR. */
- splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
+ if (DECL_RESULT (old_decl) != NULL_TREE)
+ {
+ tree *res_decl = &DECL_RESULT (old_decl);
+ DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
+ lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
}
- else
- /* We've already walked into this SAVE_EXPR, so we needn't do it
- again. */
- *walk_subtrees = 0;
+
+ current_function_decl = NULL;
+ /* Renumber the lexical scoping (non-code) blocks consecutively. */
+ number_blocks (new_decl);
- /* Replace this SAVE_EXPR with the copy. */
- *tp = (tree) n->value;
+ /* Clean up. */
+ splay_tree_delete (id.decl_map);
+ fold_cond_expr_cond ();
+ return;
}
-#ifdef INLINER_FOR_JAVA
-/* Add STMT to EXISTING if possible, otherwise create a new
- COMPOUND_EXPR and add STMT to it. */
+/* Duplicate a type, fields and all. */
-static tree
-add_stmt_to_compound (tree existing, tree type, tree stmt)
+tree
+build_duplicate_type (tree type)
{
- if (!stmt)
- return existing;
- else if (existing)
- return build (COMPOUND_EXPR, type, existing, stmt);
- else
- return stmt;
-}
+ struct copy_body_data id;
-#endif /* INLINER_FOR_JAVA */
+ memset (&id, 0, sizeof (id));
+ id.src_fn = current_function_decl;
+ id.dst_fn = current_function_decl;
+ id.src_cfun = cfun;
+ id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+
+ type = remap_type_1 (type, &id);
+
+ splay_tree_delete (id.decl_map);
+
+ return type;
+}
OpenPOWER on IntegriCloud