diff options
Diffstat (limited to 'contrib/gcc/expr.c')
-rw-r--r-- | contrib/gcc/expr.c | 7619 |
1 files changed, 3128 insertions, 4491 deletions
diff --git a/contrib/gcc/expr.c b/contrib/gcc/expr.c index b8bcc86..2b8b085 100644 --- a/contrib/gcc/expr.c +++ b/contrib/gcc/expr.c @@ -1,24 +1,23 @@ /* Convert tree expression to rtl instructions, for GNU compiler. Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, - 2000, 2001 Free Software Foundation, Inc. + 2000, 2001, 2002 Free Software Foundation, Inc. -This file is part of GNU CC. +This file is part of GCC. -GNU CC is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 2, or (at your option) -any later version. +GCC is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free +Software Foundation; either version 2, or (at your option) any later +version. -GNU CC is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. +GCC is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +for more details. You should have received a copy of the GNU General Public License -along with GNU CC; see the file COPYING. If not, write to -the Free Software Foundation, 59 Temple Place - Suite 330, -Boston, MA 02111-1307, USA. */ - +along with GCC; see the file COPYING. If not, write to the Free +Software Foundation, 59 Temple Place - Suite 330, Boston, MA +02111-1307, USA. */ #include "config.h" #include "system.h" @@ -31,18 +30,21 @@ Boston, MA 02111-1307, USA. */ #include "hard-reg-set.h" #include "except.h" #include "function.h" -#include "insn-flags.h" -#include "insn-codes.h" #include "insn-config.h" -/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ +#include "insn-attr.h" +/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ #include "expr.h" +#include "optabs.h" +#include "libfuncs.h" #include "recog.h" +#include "reload.h" #include "output.h" #include "typeclass.h" -#include "defaults.h" #include "toplev.h" - -#define CEIL(x,y) (((x) + (y) - 1) / (y)) +#include "ggc.h" +#include "langhooks.h" +#include "intl.h" +#include "tm_p.h" /* Decide whether a function's arguments should be processed from first to last or from last to first. @@ -53,7 +55,7 @@ Boston, MA 02111-1307, USA. */ #ifdef PUSH_ROUNDING #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) -#define PUSH_ARGS_REVERSED /* If it's last to first */ +#define PUSH_ARGS_REVERSED /* If it's last to first. */ #endif #endif @@ -79,47 +81,8 @@ Boston, MA 02111-1307, USA. */ the same indirect address eventually. */ int cse_not_expected; -/* Nonzero to generate code for all the subroutines within an - expression before generating the upper levels of the expression. - Nowadays this is never zero. */ -int do_preexpand_calls = 1; - -/* Number of units that we should eventually pop off the stack. - These are the arguments to function calls that have already returned. */ -int pending_stack_adjust; - -/* Under some ABIs, it is the caller's responsibility to pop arguments - pushed for function calls. A naive implementation would simply pop - the arguments immediately after each call. However, if several - function calls are made in a row, it is typically cheaper to pop - all the arguments after all of the calls are complete since a - single pop instruction can be used. Therefore, GCC attempts to - defer popping the arguments until absolutely necessary. (For - example, at the end of a conditional, the arguments must be popped, - since code outside the conditional won't know whether or not the - arguments need to be popped.) - - When INHIBIT_DEFER_POP is non-zero, however, the compiler does not - attempt to defer pops. Instead, the stack is popped immediately - after each call. Rather then setting this variable directly, use - NO_DEFER_POP and OK_DEFER_POP. */ -int inhibit_defer_pop; - -/* Nonzero means __builtin_saveregs has already been done in this function. - The value is the pseudoreg containing the value __builtin_saveregs - returned. */ -static rtx saveregs_value; - -/* Similarly for __builtin_apply_args. */ -static rtx apply_args_value; - -/* Don't check memory usage, since code is being emitted to check a memory - usage. Used when current_function_check_memory_usage is true, to avoid - infinite recursion. */ -static int in_check_memory_usage; - -/* Postincrements that still need to be expanded. */ -static rtx pending_chain; +/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */ +static tree placeholder_list = 0; /* This structure is used by move_by_pieces to describe the move to be performed. */ @@ -129,80 +92,71 @@ struct move_by_pieces rtx to_addr; int autinc_to; int explicit_inc_to; - int to_struct; rtx from; rtx from_addr; int autinc_from; int explicit_inc_from; - int from_struct; - int len; - int offset; + unsigned HOST_WIDE_INT len; + HOST_WIDE_INT offset; int reverse; }; -/* This structure is used by clear_by_pieces to describe the clear to +/* This structure is used by store_by_pieces to describe the clear to be performed. */ -struct clear_by_pieces +struct store_by_pieces { rtx to; rtx to_addr; int autinc_to; int explicit_inc_to; - int to_struct; - int len; - int offset; + unsigned HOST_WIDE_INT len; + HOST_WIDE_INT offset; + rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); + PTR constfundata; int reverse; }; extern struct obstack permanent_obstack; -extern rtx arg_pointer_save_area; - -static rtx get_push_address PROTO ((int)); - -static rtx enqueue_insn PROTO((rtx, rtx)); -static void init_queue PROTO((void)); -static int move_by_pieces_ninsns PROTO((unsigned int, int)); -static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode, - struct move_by_pieces *)); -static void clear_by_pieces PROTO((rtx, int, int)); -static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode, - struct clear_by_pieces *)); -static int is_zeros_p PROTO((tree)); -static int mostly_zeros_p PROTO((tree)); -static void store_constructor_field PROTO((rtx, int, int, enum machine_mode, - tree, tree, int)); -static void store_constructor PROTO((tree, rtx, int)); -static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree, - enum machine_mode, int, int, - int, int)); -static enum memory_use_mode - get_memory_usage_from_modifier PROTO((enum expand_modifier)); -static tree save_noncopied_parts PROTO((tree, tree)); -static tree init_noncopied_parts PROTO((tree, tree)); -static int safe_from_p PROTO((rtx, tree, int)); -static int fixed_type_p PROTO((tree)); -static rtx var_rtx PROTO((tree)); -static int get_pointer_alignment PROTO((tree, unsigned)); -static tree string_constant PROTO((tree, tree *)); -static tree c_strlen PROTO((tree)); -static rtx get_memory_rtx PROTO((tree)); -static rtx expand_builtin PROTO((tree, rtx, rtx, - enum machine_mode, int)); -static int apply_args_size PROTO((void)); -static int apply_result_size PROTO((void)); -static rtx result_vector PROTO((int, rtx)); -static rtx expand_builtin_setjmp PROTO((tree, rtx)); -static rtx expand_builtin_apply_args PROTO((void)); -static rtx expand_builtin_apply PROTO((rtx, rtx, rtx)); -static void expand_builtin_return PROTO((rtx)); -static rtx expand_increment PROTO((tree, int, int)); -static void preexpand_calls PROTO((tree)); -static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx)); -static void do_jump_by_parts_equality PROTO((tree, rtx, rtx)); -static void do_jump_for_compare PROTO((rtx, rtx, rtx)); -static rtx compare PROTO((tree, enum rtx_code, enum rtx_code)); -static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int)); + +static rtx enqueue_insn PARAMS ((rtx, rtx)); +static unsigned HOST_WIDE_INT move_by_pieces_ninsns + PARAMS ((unsigned HOST_WIDE_INT, + unsigned int)); +static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode, + struct move_by_pieces *)); +static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT, + enum machine_mode)); +static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT, + unsigned int)); +static void store_by_pieces_1 PARAMS ((struct store_by_pieces *, + unsigned int)); +static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...), + enum machine_mode, + struct store_by_pieces *)); +static rtx get_subtarget PARAMS ((rtx)); +static int is_zeros_p PARAMS ((tree)); +static int mostly_zeros_p PARAMS ((tree)); +static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT, + HOST_WIDE_INT, enum machine_mode, + tree, tree, int, int)); +static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT)); +static rtx store_field PARAMS ((rtx, HOST_WIDE_INT, + HOST_WIDE_INT, enum machine_mode, + tree, enum machine_mode, int, tree, + int)); +static rtx var_rtx PARAMS ((tree)); +static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree)); +static rtx expand_increment PARAMS ((tree, int, int)); +static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx)); +static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx)); +static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, + rtx, rtx)); +static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int)); +#ifdef PUSH_ROUNDING +static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree)); +#endif +static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx)); /* Record for each mode whether we can move a register directly to or from an object of that mode in memory. If we can't, we won't try @@ -218,16 +172,16 @@ static char direct_store[NUM_MACHINE_MODES]; #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti) #define MOVE_RATIO 2 #else -/* If we are optimizing for space (-Os), cut down the default move ratio */ +/* If we are optimizing for space (-Os), cut down the default move ratio. */ #define MOVE_RATIO (optimize_size ? 3 : 15) #endif #endif /* This macro is used to determine whether move_by_pieces should be called - to perform a structure copy. */ + to perform a structure copy. */ #ifndef MOVE_BY_PIECES_P -#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \ - (SIZE, ALIGN) < MOVE_RATIO) +#define MOVE_BY_PIECES_P(SIZE, ALIGN) \ + (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO) #endif /* This array records the insn_code of insns to perform block moves. */ @@ -239,15 +193,7 @@ enum insn_code clrstr_optab[NUM_MACHINE_MODES]; /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */ #ifndef SLOW_UNALIGNED_ACCESS -#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT -#endif - -/* Register mappings for target machines without register windows. */ -#ifndef INCOMING_REGNO -#define INCOMING_REGNO(OUT) (OUT) -#endif -#ifndef OUTGOING_REGNO -#define OUTGOING_REGNO(IN) (IN) +#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT #endif /* This is run once per compilation to set up which modes can be used @@ -260,15 +206,9 @@ init_expr_once () enum machine_mode mode; int num_clobbers; rtx mem, mem1; - char *free_point; start_sequence (); - /* Since we are on the permanent obstack, we must be sure we save this - spot AFTER we call start_sequence, since it will reuse the rtl it - makes. */ - free_point = (char *) oballoc (0); - /* Try indexing by frame ptr and try by stack ptr. It is known that on the Convex the stack ptr isn't a valid index. With luck, one or the other is valid on any machine. */ @@ -324,7 +264,6 @@ init_expr_once () } end_sequence (); - obfree (free_point); } /* This is run at the start of compiling a function. */ @@ -332,50 +271,44 @@ init_expr_once () void init_expr () { - init_queue (); + cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status)); + pending_chain = 0; pending_stack_adjust = 0; + stack_pointer_delta = 0; inhibit_defer_pop = 0; saveregs_value = 0; apply_args_value = 0; forced_labels = 0; } -/* Save all variables describing the current status into the structure *P. - This is used before starting a nested function. */ +void +mark_expr_status (p) + struct expr_status *p; +{ + if (p == NULL) + return; + + ggc_mark_rtx (p->x_saveregs_value); + ggc_mark_rtx (p->x_apply_args_value); + ggc_mark_rtx (p->x_forced_labels); +} void -save_expr_status (p) - struct function *p; +free_expr_status (f) + struct function *f; { - p->pending_chain = pending_chain; - p->pending_stack_adjust = pending_stack_adjust; - p->inhibit_defer_pop = inhibit_defer_pop; - p->saveregs_value = saveregs_value; - p->apply_args_value = apply_args_value; - p->forced_labels = forced_labels; - - pending_chain = NULL_RTX; - pending_stack_adjust = 0; - inhibit_defer_pop = 0; - saveregs_value = 0; - apply_args_value = 0; - forced_labels = 0; + free (f->expr); + f->expr = NULL; } -/* Restore all variables describing the current status from the structure *P. - This is used after a nested function. */ +/* Small sanity check that the queue is empty at the end of a function. */ void -restore_expr_status (p) - struct function *p; +finish_expr_for_function () { - pending_chain = p->pending_chain; - pending_stack_adjust = p->pending_stack_adjust; - inhibit_defer_pop = p->inhibit_defer_pop; - saveregs_value = p->saveregs_value; - apply_args_value = p->apply_args_value; - forced_labels = p->forced_labels; + if (pending_chain) + abort (); } /* Manage the queue of increment instructions to be output @@ -392,9 +325,8 @@ static rtx enqueue_insn (var, body) rtx var, body; { - pending_chain = gen_rtx_QUEUED (GET_MODE (var), - var, NULL_RTX, NULL_RTX, body, - pending_chain); + pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX, + body, pending_chain); return pending_chain; } @@ -415,10 +347,10 @@ enqueue_insn (var, body) rtx protect_from_queue (x, modify) - register rtx x; + rtx x; int modify; { - register RTX_CODE code = GET_CODE (x); + RTX_CODE code = GET_CODE (x); #if 0 /* A QUEUED can hang around after the queue is forced out. */ /* Shortcut for most common case. */ @@ -436,22 +368,23 @@ protect_from_queue (x, modify) if (code == MEM && GET_MODE (x) != BLKmode && GET_CODE (XEXP (x, 0)) == QUEUED && !modify) { - register rtx y = XEXP (x, 0); - register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y)); - - RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x); - MEM_COPY_ATTRIBUTES (new, x); - MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x); + rtx y = XEXP (x, 0); + rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y)); if (QUEUED_INSN (y)) { - register rtx temp = gen_reg_rtx (GET_MODE (new)); + rtx temp = gen_reg_rtx (GET_MODE (x)); + emit_insn_before (gen_move_insn (temp, new), QUEUED_INSN (y)); return temp; } - return new; + + /* Copy the address into a pseudo, so that the returned value + remains correct across calls to emit_queue. */ + return replace_equiv_address (new, copy_to_reg (XEXP (new, 0))); } + /* Otherwise, recursively protect the subexpressions of all the kinds of rtx's that can contain a QUEUED. */ if (code == MEM) @@ -476,9 +409,11 @@ protect_from_queue (x, modify) } return x; } - /* If the increment has not happened, use the variable itself. */ + /* If the increment has not happened, use the variable itself. Copy it + into a new pseudo so that the value remains correct across calls to + emit_queue. */ if (QUEUED_INSN (x) == 0) - return QUEUED_VAR (x); + return copy_to_reg (QUEUED_VAR (x)); /* If the increment has happened and a pre-increment copy exists, use that copy. */ if (QUEUED_COPY (x) != 0) @@ -500,7 +435,7 @@ int queued_subexp_p (x) rtx x; { - register enum rtx_code code = GET_CODE (x); + enum rtx_code code = GET_CODE (x); switch (code) { case QUEUED: @@ -522,7 +457,7 @@ queued_subexp_p (x) void emit_queue () { - register rtx p; + rtx p; while ((p = pending_chain)) { rtx body = QUEUED_BODY (p); @@ -537,13 +472,6 @@ emit_queue () pending_chain = QUEUED_NEXT (p); } } - -static void -init_queue () -{ - if (pending_chain) - abort (); -} /* Copy data from FROM to TO, where the machine modes are not the same. Both modes may be integer, or both may be floating. @@ -552,7 +480,7 @@ init_queue () void convert_move (to, from, unsignedp) - register rtx to, from; + rtx to, from; int unsignedp; { enum machine_mode to_mode = GET_MODE (to); @@ -591,9 +519,26 @@ convert_move (to, from, unsignedp) return; } + if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) + { + if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode)) + abort (); + + if (VECTOR_MODE_P (to_mode)) + from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); + else + to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); + + emit_move_insn (to, from); + return; + } + + if (to_real != from_real) + abort (); + if (to_real) { - rtx value; + rtx value, insns; if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)) { @@ -605,7 +550,7 @@ convert_move (to, from, unsignedp) return; } } - + #ifdef HAVE_trunchfqf2 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode) { @@ -767,7 +712,7 @@ convert_move (to, from, unsignedp) case TFmode: libcall = extendsftf2_libfunc; break; - + default: break; } @@ -787,7 +732,7 @@ convert_move (to, from, unsignedp) case TFmode: libcall = extenddftf2_libfunc; break; - + default: break; } @@ -803,7 +748,7 @@ convert_move (to, from, unsignedp) case DFmode: libcall = truncxfdf2_libfunc; break; - + default: break; } @@ -819,12 +764,12 @@ convert_move (to, from, unsignedp) case DFmode: libcall = trunctfdf2_libfunc; break; - + default: break; } break; - + default: break; } @@ -833,9 +778,13 @@ convert_move (to, from, unsignedp) /* This conversion is not implemented yet. */ abort (); - value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode, + start_sequence (); + value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, 1, from, from_mode); - emit_move_insn (to, value); + insns = get_insns (); + end_sequence (); + emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode, + from)); return; } @@ -906,11 +855,11 @@ convert_move (to, from, unsignedp) { #ifdef HAVE_slt if (HAVE_slt - && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode + && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode && STORE_FLAG_VALUE == -1) { emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, - lowpart_mode, 0, 0); + lowpart_mode, 0); fill_value = gen_reg_rtx (word_mode); emit_insn (gen_slt (fill_value)); } @@ -961,7 +910,7 @@ convert_move (to, from, unsignedp) return; } - /* Handle pointer conversion */ /* SPEE 900220 */ + /* Handle pointer conversion. */ /* SPEE 900220. */ if (to_mode == PQImode) { if (from_mode != QImode) @@ -1022,12 +971,19 @@ convert_move (to, from, unsignedp) else { #ifdef HAVE_extendpsisi2 - if (HAVE_extendpsisi2) + if (! unsignedp && HAVE_extendpsisi2) { emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN); return; } #endif /* HAVE_extendpsisi2 */ +#ifdef HAVE_zero_extendpsisi2 + if (unsignedp && HAVE_zero_extendpsisi2) + { + emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN); + return; + } +#endif /* HAVE_zero_extendpsisi2 */ abort (); } } @@ -1096,6 +1052,9 @@ convert_move (to, from, unsignedp) if ((code = can_extend_p (to_mode, from_mode, unsignedp)) != CODE_FOR_nothing) { + if (flag_force_mem) + from = force_not_mem (from); + emit_unop_insn (code, to, from, equiv_code); return; } @@ -1111,7 +1070,8 @@ convert_move (to, from, unsignedp) if (((can_extend_p (to_mode, intermediate, unsignedp) != CODE_FOR_nothing) || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) - && TRULY_NOOP_TRUNCATION (to_mode, intermediate))) + && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), + GET_MODE_BITSIZE (intermediate)))) && (can_extend_p (intermediate, from_mode, unsignedp) != CODE_FOR_nothing)) { @@ -1121,13 +1081,13 @@ convert_move (to, from, unsignedp) } /* No suitable intermediate mode. - Generate what we need with shifts. */ + Generate what we need with shifts. */ shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode) - GET_MODE_BITSIZE (from_mode), 0); from = gen_lowpart (to_mode, force_reg (from_mode, from)); tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, to, unsignedp); - tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, + tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, to, unsignedp); if (tmp != to) emit_move_insn (to, tmp); @@ -1135,7 +1095,7 @@ convert_move (to, from, unsignedp) } } - /* Support special truncate insns for certain modes. */ + /* Support special truncate insns for certain modes. */ if (from_mode == DImode && to_mode == SImode) { @@ -1319,7 +1279,7 @@ convert_modes (mode, oldmode, x, unsignedp) rtx x; int unsignedp; { - register rtx temp; + rtx temp; /* If FROM is a SUBREG that indicates that we have already done at least the required extension, strip it. */ @@ -1331,7 +1291,7 @@ convert_modes (mode, oldmode, x, unsignedp) if (GET_MODE (x) != VOIDmode) oldmode = GET_MODE (x); - + if (mode == oldmode) return x; @@ -1392,7 +1352,7 @@ convert_modes (mode, oldmode, x, unsignedp) && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) val |= (HOST_WIDE_INT) (-1) << width; - return GEN_INT (val); + return GEN_INT (trunc_int_for_mode (val, mode)); } return gen_lowpart (mode, x); @@ -1403,43 +1363,63 @@ convert_modes (mode, oldmode, x, unsignedp) return temp; } - /* This macro is used to determine what the largest unit size that - move_by_pieces can use is. */ + move_by_pieces can use is. */ /* MOVE_MAX_PIECES is the number of bytes at a time which we can move efficiently, as opposed to MOVE_MAX which is the maximum - number of bhytes we can move with a single instruction. */ + number of bytes we can move with a single instruction. */ #ifndef MOVE_MAX_PIECES #define MOVE_MAX_PIECES MOVE_MAX #endif -/* Generate several move instructions to copy LEN bytes - from block FROM to block TO. (These are MEM rtx's with BLKmode). - The caller must pass FROM and TO - through protect_from_queue before calling. - ALIGN (in bytes) is maximum alignment we can assume. */ +/* Generate several move instructions to copy LEN bytes from block FROM to + block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM + and TO through protect_from_queue before calling. + + If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is + used to push FROM to the stack. + + ALIGN is maximum alignment we can assume. */ void move_by_pieces (to, from, len, align) rtx to, from; - int len, align; + unsigned HOST_WIDE_INT len; + unsigned int align; { struct move_by_pieces data; - rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0); - int max_size = MOVE_MAX_PIECES + 1; + rtx to_addr, from_addr = XEXP (from, 0); + unsigned int max_size = MOVE_MAX_PIECES + 1; enum machine_mode mode = VOIDmode, tmode; enum insn_code icode; data.offset = 0; - data.to_addr = to_addr; data.from_addr = from_addr; - data.to = to; + if (to) + { + to_addr = XEXP (to, 0); + data.to = to; + data.autinc_to + = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC + || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); + data.reverse + = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); + } + else + { + to_addr = NULL_RTX; + data.to = NULL_RTX; + data.autinc_to = 1; +#ifdef STACK_GROWS_DOWNWARD + data.reverse = 1; +#else + data.reverse = 0; +#endif + } + data.to_addr = to_addr; data.from = from; - data.autinc_to - = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC - || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); data.autinc_from = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC || GET_CODE (from_addr) == POST_INC @@ -1447,21 +1427,16 @@ move_by_pieces (to, from, len, align) data.explicit_inc_from = 0; data.explicit_inc_to = 0; - data.reverse - = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); if (data.reverse) data.offset = len; data.len = len; - data.to_struct = MEM_IN_STRUCT_P (to); - data.from_struct = MEM_IN_STRUCT_P (from); - /* If copying requires more than two move insns, copy addresses to registers (to make displacements shorter) and use post-increment if available. */ if (!(data.autinc_from && data.autinc_to) && move_by_pieces_ninsns (len, align) > 2) { - /* Find the mode of the largest move... */ + /* Find the mode of the largest move... */ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) if (GET_MODE_SIZE (tmode) < max_size) @@ -1497,9 +1472,9 @@ move_by_pieces (to, from, len, align) data.to_addr = copy_addr_to_reg (to_addr); } - if (! SLOW_UNALIGNED_ACCESS - || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT) - align = MOVE_MAX; + if (! SLOW_UNALIGNED_ACCESS (word_mode, align) + || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) + align = MOVE_MAX * BITS_PER_UNIT; /* First move what we can in the largest integer mode, then go to successively smaller modes. */ @@ -1515,9 +1490,7 @@ move_by_pieces (to, from, len, align) break; icode = mov_optab->handlers[(int) mode].insn_code; - if (icode != CODE_FOR_nothing - && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT, - GET_MODE_SIZE (mode))) + if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) move_by_pieces_1 (GEN_FCN (icode), mode, &data); max_size = GET_MODE_SIZE (mode); @@ -1529,19 +1502,19 @@ move_by_pieces (to, from, len, align) } /* Return number of insns required to move L bytes by pieces. - ALIGN (in bytes) is maximum alignment we can assume. */ + ALIGN (in bits) is maximum alignment we can assume. */ -static int +static unsigned HOST_WIDE_INT move_by_pieces_ninsns (l, align) - unsigned int l; - int align; + unsigned HOST_WIDE_INT l; + unsigned int align; { - register int n_insns = 0; - int max_size = MOVE_MAX + 1; + unsigned HOST_WIDE_INT n_insns = 0; + unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1; - if (! SLOW_UNALIGNED_ACCESS - || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT) - align = MOVE_MAX; + if (! SLOW_UNALIGNED_ACCESS (word_mode, align) + || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) + align = MOVE_MAX * BITS_PER_UNIT; while (max_size > 1) { @@ -1557,14 +1530,14 @@ move_by_pieces_ninsns (l, align) break; icode = mov_optab->handlers[(int) mode].insn_code; - if (icode != CODE_FOR_nothing - && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT, - GET_MODE_SIZE (mode))) + if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); max_size = GET_MODE_SIZE (mode); } + if (l) + abort (); return n_insns; } @@ -1574,44 +1547,58 @@ move_by_pieces_ninsns (l, align) static void move_by_pieces_1 (genfun, mode, data) - rtx (*genfun) PROTO ((rtx, ...)); + rtx (*genfun) PARAMS ((rtx, ...)); enum machine_mode mode; struct move_by_pieces *data; { - register int size = GET_MODE_SIZE (mode); - register rtx to1, from1; + unsigned int size = GET_MODE_SIZE (mode); + rtx to1 = NULL_RTX, from1; while (data->len >= size) { - if (data->reverse) data->offset -= size; + if (data->reverse) + data->offset -= size; - to1 = (data->autinc_to - ? gen_rtx_MEM (mode, data->to_addr) - : copy_rtx (change_address (data->to, mode, - plus_constant (data->to_addr, - data->offset)))); - MEM_IN_STRUCT_P (to1) = data->to_struct; + if (data->to) + { + if (data->autinc_to) + to1 = adjust_automodify_address (data->to, mode, data->to_addr, + data->offset); + else + to1 = adjust_address (data->to, mode, data->offset); + } - from1 - = (data->autinc_from - ? gen_rtx_MEM (mode, data->from_addr) - : copy_rtx (change_address (data->from, mode, - plus_constant (data->from_addr, - data->offset)))); - MEM_IN_STRUCT_P (from1) = data->from_struct; + if (data->autinc_from) + from1 = adjust_automodify_address (data->from, mode, data->from_addr, + data->offset); + else + from1 = adjust_address (data->from, mode, data->offset); if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) - emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size))); + emit_insn (gen_add2_insn (data->to_addr, + GEN_INT (-(HOST_WIDE_INT)size))); if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) - emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size))); + emit_insn (gen_add2_insn (data->from_addr, + GEN_INT (-(HOST_WIDE_INT)size))); + + if (data->to) + emit_insn ((*genfun) (to1, from1)); + else + { +#ifdef PUSH_ROUNDING + emit_single_push_insn (mode, from1, NULL); +#else + abort (); +#endif + } - emit_insn ((*genfun) (to1, from1)); if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); - if (! data->reverse) data->offset += size; + if (! data->reverse) + data->offset += size; data->len -= size; } @@ -1624,23 +1611,22 @@ move_by_pieces_1 (genfun, mode, data) Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode. SIZE is an rtx that says how long they are. - ALIGN is the maximum alignment we can assume they have, - measured in bytes. + ALIGN is the maximum alignment we can assume they have. Return the address of the new block, if memcpy is called and returns it, 0 otherwise. */ rtx -emit_block_move (x, y, size, align) +emit_block_move (x, y, size) rtx x, y; rtx size; - int align; { rtx retval = 0; #ifdef TARGET_MEM_FUNCTIONS static tree fn; tree call_expr, arg_list; #endif + unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); if (GET_MODE (x) != BLKmode) abort (); @@ -1667,13 +1653,17 @@ emit_block_move (x, y, size, align) including more than one in the machine description unless the more limited one has some advantage. */ - rtx opalign = GEN_INT (align); + rtx opalign = GEN_INT (align / BITS_PER_UNIT); enum machine_mode mode; + /* Since this is a move insn, we don't care about volatility. */ + volatile_ok = 1; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) { enum insn_code code = movstr_optab[(int) mode]; + insn_operand_predicate_fn pred; if (code != CODE_FOR_nothing /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT @@ -1684,27 +1674,27 @@ emit_block_move (x, y, size, align) && ((unsigned HOST_WIDE_INT) INTVAL (size) <= (GET_MODE_MASK (mode) >> 1))) || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) - && (insn_operand_predicate[(int) code][0] == 0 - || (*insn_operand_predicate[(int) code][0]) (x, BLKmode)) - && (insn_operand_predicate[(int) code][1] == 0 - || (*insn_operand_predicate[(int) code][1]) (y, BLKmode)) - && (insn_operand_predicate[(int) code][3] == 0 - || (*insn_operand_predicate[(int) code][3]) (opalign, - VOIDmode))) + && ((pred = insn_data[(int) code].operand[0].predicate) == 0 + || (*pred) (x, BLKmode)) + && ((pred = insn_data[(int) code].operand[1].predicate) == 0 + || (*pred) (y, BLKmode)) + && ((pred = insn_data[(int) code].operand[3].predicate) == 0 + || (*pred) (opalign, VOIDmode))) { rtx op2; rtx last = get_last_insn (); rtx pat; op2 = convert_to_mode (mode, size, 1); - if (insn_operand_predicate[(int) code][2] != 0 - && ! (*insn_operand_predicate[(int) code][2]) (op2, mode)) + pred = insn_data[(int) code].operand[2].predicate; + if (pred != 0 && ! (*pred) (op2, mode)) op2 = copy_to_mode_reg (mode, op2); pat = GEN_FCN ((int) code) (x, y, op2, opalign); if (pat) { emit_insn (pat); + volatile_ok = 0; return 0; } else @@ -1712,6 +1702,8 @@ emit_block_move (x, y, size, align) } } + volatile_ok = 0; + /* X, Y, or SIZE may have been passed through protect_from_queue. It is unsafe to save the value generated by protect_from_queue @@ -1725,7 +1717,7 @@ emit_block_move (x, y, size, align) To avoid this problem we go ahead and emit code to copy X, Y & SIZE into new pseudos. We can then place those new pseudos into an RTL_EXPR and use them later, even after a call to - emit_queue. + emit_queue. Note this is not strictly needed for library calls since they do not call emit_queue before loading their arguments. However, @@ -1751,7 +1743,7 @@ emit_block_move (x, y, size, align) examine the return value from memcpy. For targets where libcalls and normal calls have different conventions - for returning pointers, we could end up generating incorrect code. + for returning pointers, we could end up generating incorrect code. So instead of using a libcall sequence we build up a suitable CALL_EXPR and expand the call in the normal fashion. */ @@ -1762,20 +1754,19 @@ emit_block_move (x, y, size, align) /* This was copied from except.c, I don't know if all this is necessary in this context or not. */ fn = get_identifier ("memcpy"); - push_obstacks_nochange (); - end_temporary_allocation (); fntype = build_pointer_type (void_type_node); fntype = build_function_type (fntype, NULL_TREE); fn = build_decl (FUNCTION_DECL, fn, fntype); + ggc_add_tree_root (&fn, 1); DECL_EXTERNAL (fn) = 1; TREE_PUBLIC (fn) = 1; DECL_ARTIFICIAL (fn) = 1; - make_decl_rtl (fn, NULL_PTR, 1); + TREE_NOTHROW (fn) = 1; + make_decl_rtl (fn, NULL); assemble_external (fn); - pop_obstacks (); } - /* We need to make an argument list for the function call. + /* We need to make an argument list for the function call. memcpy has three arguments, the first two are void * addresses and the last is a size_t byte count for the copy. */ @@ -1797,12 +1788,18 @@ emit_block_move (x, y, size, align) retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); #else - emit_library_call (bcopy_libfunc, 0, + emit_library_call (bcopy_libfunc, LCT_NORMAL, VOIDmode, 3, y, Pmode, x, Pmode, convert_to_mode (TYPE_MODE (integer_type_node), size, TREE_UNSIGNED (integer_type_node)), TYPE_MODE (integer_type_node)); #endif + + /* If we are initializing a readonly value, show the above call + clobbered it. Otherwise, a load from it may erroneously be hoisted + from a loop. */ + if (RTX_UNCHANGING_P (x)) + emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); } return retval; @@ -1820,7 +1817,7 @@ move_block_to_reg (regno, x, nregs, mode) { int i; #ifdef HAVE_load_multiple - rtx pat; + rtx pat; rtx last; #endif @@ -1856,7 +1853,6 @@ move_block_to_reg (regno, x, nregs, mode) The number of registers to be filled is NREGS. SIZE indicates the number of bytes in the object X. */ - void move_block_from_reg (regno, x, nregs, size) int regno; @@ -1866,25 +1862,30 @@ move_block_from_reg (regno, x, nregs, size) { int i; #ifdef HAVE_store_multiple - rtx pat; + rtx pat; rtx last; #endif enum machine_mode mode; + if (nregs == 0) + return; + /* If SIZE is that of a mode no bigger than a word, just use that mode's store operation. */ if (size <= UNITS_PER_WORD - && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode) + && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode + && !FUNCTION_ARG_REG_LITTLE_ENDIAN) { - emit_move_insn (change_address (x, mode, NULL), - gen_rtx_REG (mode, regno)); + emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno)); return; } - + /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned to the left before storing to memory. Note that the previous test doesn't handle all cases (e.g. SIZE == 3). */ - if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN) + if (size < UNITS_PER_WORD + && BYTES_BIG_ENDIAN + && !FUNCTION_ARG_REG_LITTLE_ENDIAN) { rtx tem = operand_subword (x, 0, 1, BLKmode); rtx shift; @@ -1930,9 +1931,8 @@ move_block_from_reg (regno, x, nregs, size) /* Emit code to move a block SRC to a block DST, where DST is non-consecutive registers represented by a PARALLEL. SSIZE represents the total size of - block SRC in bytes, or -1 if not known. ALIGN is the known alignment of - SRC in bits. */ -/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that + block SRC in bytes, or -1 if not known. */ +/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that the balance will be in what would be the low-order memory addresses, i.e. left justified for big endian, right justified for little endian. This happens to be true for the targets currently using this support. If this @@ -1940,9 +1940,9 @@ move_block_from_reg (regno, x, nregs, size) would be needed. */ void -emit_group_load (dst, orig_src, ssize, align) +emit_group_load (dst, orig_src, ssize) rtx dst, orig_src; - int align, ssize; + int ssize; { rtx *tmps, src; int start, i; @@ -1957,60 +1957,83 @@ emit_group_load (dst, orig_src, ssize, align) else start = 1; - tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0)); - - /* If we won't be loading directly from memory, protect the real source - from strange tricks we might play. */ - src = orig_src; - if (GET_CODE (src) != MEM) - { - src = gen_reg_rtx (GET_MODE (orig_src)); - emit_move_insn (src, orig_src); - } + tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0)); /* Process the pieces. */ for (i = start; i < XVECLEN (dst, 0); i++) { enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); - int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); - int bytelen = GET_MODE_SIZE (mode); + HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); + unsigned int bytelen = GET_MODE_SIZE (mode); int shift = 0; /* Handle trailing fragments that run over the size of the struct. */ - if (ssize >= 0 && bytepos + bytelen > ssize) + if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) { shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; bytelen = ssize - bytepos; if (bytelen <= 0) - abort(); + abort (); + } + + /* If we won't be loading directly from memory, protect the real source + from strange tricks we might play; but make sure that the source can + be loaded directly into the destination. */ + src = orig_src; + if (GET_CODE (orig_src) != MEM + && (!CONSTANT_P (orig_src) + || (GET_MODE (orig_src) != mode + && GET_MODE (orig_src) != VOIDmode))) + { + if (GET_MODE (orig_src) == VOIDmode) + src = gen_reg_rtx (mode); + else + src = gen_reg_rtx (GET_MODE (orig_src)); + + emit_move_insn (src, orig_src); } /* Optimize the access just a bit. */ if (GET_CODE (src) == MEM - && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode) - && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 + && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode) + && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 && bytelen == GET_MODE_SIZE (mode)) { tmps[i] = gen_reg_rtx (mode); - emit_move_insn (tmps[i], - change_address (src, mode, - plus_constant (XEXP (src, 0), - bytepos))); + emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); } - else + else if (GET_CODE (src) == CONCAT) { - tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT, - bytepos*BITS_PER_UNIT, 1, NULL_RTX, - mode, mode, align, ssize); + if (bytepos == 0 + && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))) + tmps[i] = XEXP (src, 0); + else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) + && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))) + tmps[i] = XEXP (src, 1); + else if (bytepos == 0) + { + rtx mem = assign_stack_temp (GET_MODE (src), + GET_MODE_SIZE (GET_MODE (src)), 0); + emit_move_insn (mem, src); + tmps[i] = adjust_address (mem, mode, 0); + } + else + abort (); } + else if (CONSTANT_P (src) + || (GET_CODE (src) == REG && GET_MODE (src) == mode)) + tmps[i] = src; + else + tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, + bytepos * BITS_PER_UNIT, 1, NULL_RTX, + mode, mode, ssize); if (BYTES_BIG_ENDIAN && shift) - { - expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift), - tmps[i], 0, OPTAB_WIDEN); - } + expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift), + tmps[i], 0, OPTAB_WIDEN); } - emit_queue(); + + emit_queue (); /* Copy the extracted pieces into the proper (probable) hard regs. */ for (i = start; i < XVECLEN (dst, 0); i++) @@ -2019,12 +2042,12 @@ emit_group_load (dst, orig_src, ssize, align) /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive registers represented by a PARALLEL. SSIZE represents the total size of - block DST, or -1 if not known. ALIGN is the known alignment of DST. */ + block DST, or -1 if not known. */ void -emit_group_store (orig_dst, src, ssize, align) +emit_group_store (orig_dst, src, ssize) rtx orig_dst, src; - int ssize, align; + int ssize; { rtx *tmps, dst; int start, i; @@ -2039,7 +2062,7 @@ emit_group_store (orig_dst, src, ssize, align) else start = 1; - tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0)); + tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0)); /* Copy the (probable) hard regs into pseudos. */ for (i = start; i < XVECLEN (src, 0); i++) @@ -2048,7 +2071,7 @@ emit_group_store (orig_dst, src, ssize, align) tmps[i] = gen_reg_rtx (GET_MODE (reg)); emit_move_insn (tmps[i], reg); } - emit_queue(); + emit_queue (); /* If we won't be storing directly into memory, protect the real destination from strange tricks we might play. */ @@ -2068,8 +2091,8 @@ emit_group_store (orig_dst, src, ssize, align) the temporary. */ temp = assign_stack_temp (GET_MODE (dst), ssize, 0); - emit_group_store (temp, src, ssize, align); - emit_group_load (dst, temp, ssize, align); + emit_group_store (temp, src, ssize); + emit_group_load (dst, temp, ssize); return; } else if (GET_CODE (dst) != MEM) @@ -2078,24 +2101,16 @@ emit_group_store (orig_dst, src, ssize, align) /* Make life a bit easier for combine. */ emit_move_insn (dst, const0_rtx); } - else if (! MEM_IN_STRUCT_P (dst)) - { - /* store_bit_field requires that memory operations have - mem_in_struct_p set; we might not. */ - - dst = copy_rtx (orig_dst); - MEM_SET_IN_STRUCT_P (dst, 1); - } /* Process the pieces. */ for (i = start; i < XVECLEN (src, 0); i++) { - int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); + HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); enum machine_mode mode = GET_MODE (tmps[i]); - int bytelen = GET_MODE_SIZE (mode); + unsigned int bytelen = GET_MODE_SIZE (mode); /* Handle trailing fragments that run over the size of the struct. */ - if (ssize >= 0 && bytepos + bytelen > ssize) + if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) { if (BYTES_BIG_ENDIAN) { @@ -2108,22 +2123,16 @@ emit_group_store (orig_dst, src, ssize, align) /* Optimize the access just a bit. */ if (GET_CODE (dst) == MEM - && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode) - && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 + && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode) + && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 && bytelen == GET_MODE_SIZE (mode)) - { - emit_move_insn (change_address (dst, mode, - plus_constant (XEXP (dst, 0), - bytepos)), - tmps[i]); - } + emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]); else - { - store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT, - mode, tmps[i], align, ssize); - } + store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, + mode, tmps[i], ssize); } - emit_queue(); + + emit_queue (); /* Copy from the pseudo into the (probable) hard reg. */ if (GET_CODE (dst) == REG) @@ -2137,80 +2146,87 @@ emit_group_store (orig_dst, src, ssize, align) The primary purpose of this routine is to handle functions that return BLKmode structures in registers. Some machines (the PA for example) want to return all small structures - in registers regardless of the structure's alignment. - */ + in registers regardless of the structure's alignment. */ rtx -copy_blkmode_from_reg(tgtblk,srcreg,type) +copy_blkmode_from_reg (tgtblk, srcreg, type) rtx tgtblk; rtx srcreg; tree type; { - int bytes = int_size_in_bytes (type); - rtx src = NULL, dst = NULL; - int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD); - int bitpos, xbitpos, big_endian_correction = 0; - - if (tgtblk == 0) - { - tgtblk = assign_stack_temp (BLKmode, bytes, 0); - MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type)); - preserve_temp_slots (tgtblk); - } - - /* This code assumes srcreg is at least a full word. If it isn't, - copy it into a new pseudo which is a full word. */ - if (GET_MODE (srcreg) != BLKmode - && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) - srcreg = convert_to_mode (word_mode, srcreg, - TREE_UNSIGNED (type)); - - /* Structures whose size is not a multiple of a word are aligned - to the least significant byte (to the right). On a BYTES_BIG_ENDIAN - machine, this means we must skip the empty high order bytes when - calculating the bit offset. */ - if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD) - big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) - * BITS_PER_UNIT)); - - /* Copy the structure BITSIZE bites at a time. - - We could probably emit more efficient code for machines - which do not use strict alignment, but it doesn't seem - worth the effort at the current time. */ - for (bitpos = 0, xbitpos = big_endian_correction; - bitpos < bytes * BITS_PER_UNIT; - bitpos += bitsize, xbitpos += bitsize) - { - - /* We need a new source operand each time xbitpos is on a - word boundary and when xbitpos == big_endian_correction - (the first time through). */ - if (xbitpos % BITS_PER_WORD == 0 - || xbitpos == big_endian_correction) - src = operand_subword_force (srcreg, - xbitpos / BITS_PER_WORD, - BLKmode); - - /* We need a new destination operand each time bitpos is on - a word boundary. */ - if (bitpos % BITS_PER_WORD == 0) - dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); - - /* Use xbitpos for the source extraction (right justified) and - xbitpos for the destination store (left justified). */ - store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, - extract_bit_field (src, bitsize, - xbitpos % BITS_PER_WORD, 1, - NULL_RTX, word_mode, - word_mode, - bitsize / BITS_PER_UNIT, - BITS_PER_WORD), - bitsize / BITS_PER_UNIT, BITS_PER_WORD); - } - return tgtblk; -} + unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); + rtx src = NULL, dst = NULL; + unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); + unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0; + if (tgtblk == 0) + { + tgtblk = assign_temp (build_qualified_type (type, + (TYPE_QUALS (type) + | TYPE_QUAL_CONST)), + 0, 1, 1); + preserve_temp_slots (tgtblk); + } + + /* This code assumes srcreg is at least a full word. If it isn't, copy it + into a new pseudo which is a full word. + + If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy, + the wrong part of the register gets copied so we fake a type conversion + in place. */ + if (GET_MODE (srcreg) != BLKmode + && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) + { + if (FUNCTION_ARG_REG_LITTLE_ENDIAN) + srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0); + else + srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type)); + } + + /* Structures whose size is not a multiple of a word are aligned + to the least significant byte (to the right). On a BYTES_BIG_ENDIAN + machine, this means we must skip the empty high order bytes when + calculating the bit offset. */ + if (BYTES_BIG_ENDIAN + && !FUNCTION_ARG_REG_LITTLE_ENDIAN + && bytes % UNITS_PER_WORD) + big_endian_correction + = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); + + /* Copy the structure BITSIZE bites at a time. + + We could probably emit more efficient code for machines which do not use + strict alignment, but it doesn't seem worth the effort at the current + time. */ + for (bitpos = 0, xbitpos = big_endian_correction; + bitpos < bytes * BITS_PER_UNIT; + bitpos += bitsize, xbitpos += bitsize) + { + /* We need a new source operand each time xbitpos is on a + word boundary and when xbitpos == big_endian_correction + (the first time through). */ + if (xbitpos % BITS_PER_WORD == 0 + || xbitpos == big_endian_correction) + src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, + GET_MODE (srcreg)); + + /* We need a new destination operand each time bitpos is on + a word boundary. */ + if (bitpos % BITS_PER_WORD == 0) + dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); + + /* Use xbitpos for the source extraction (right justified) and + xbitpos for the destination store (left justified). */ + store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, + extract_bit_field (src, bitsize, + xbitpos % BITS_PER_WORD, 1, + NULL_RTX, word_mode, word_mode, + BITS_PER_WORD), + BITS_PER_WORD); + } + + return tgtblk; +} /* Add a USE expression for REG to the (possibly empty) list pointed to by CALL_FUSAGE. REG must denote a hard register. */ @@ -2221,7 +2237,7 @@ use_reg (call_fusage, reg) { if (GET_CODE (reg) != REG || REGNO (reg) >= FIRST_PSEUDO_REGISTER) - abort(); + abort (); *call_fusage = gen_rtx_EXPR_LIST (VOIDmode, @@ -2269,70 +2285,200 @@ use_group_regs (call_fusage, regs) } } -/* Generate several move instructions to clear LEN bytes of block TO. - (A MEM rtx with BLKmode). The caller must pass TO through - protect_from_queue before calling. ALIGN (in bytes) is maximum alignment - we can assume. */ + +int +can_store_by_pieces (len, constfun, constfundata, align) + unsigned HOST_WIDE_INT len; + rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); + PTR constfundata; + unsigned int align; +{ + unsigned HOST_WIDE_INT max_size, l; + HOST_WIDE_INT offset = 0; + enum machine_mode mode, tmode; + enum insn_code icode; + int reverse; + rtx cst; + + if (! MOVE_BY_PIECES_P (len, align)) + return 0; + + if (! SLOW_UNALIGNED_ACCESS (word_mode, align) + || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) + align = MOVE_MAX * BITS_PER_UNIT; + + /* We would first store what we can in the largest integer mode, then go to + successively smaller modes. */ + + for (reverse = 0; + reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); + reverse++) + { + l = len; + mode = VOIDmode; + max_size = MOVE_MAX_PIECES + 1; + while (max_size > 1) + { + for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); + tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) + if (GET_MODE_SIZE (tmode) < max_size) + mode = tmode; + + if (mode == VOIDmode) + break; + + icode = mov_optab->handlers[(int) mode].insn_code; + if (icode != CODE_FOR_nothing + && align >= GET_MODE_ALIGNMENT (mode)) + { + unsigned int size = GET_MODE_SIZE (mode); + + while (l >= size) + { + if (reverse) + offset -= size; + + cst = (*constfun) (constfundata, offset, mode); + if (!LEGITIMATE_CONSTANT_P (cst)) + return 0; + + if (!reverse) + offset += size; + + l -= size; + } + } + + max_size = GET_MODE_SIZE (mode); + } + + /* The code above should have handled everything. */ + if (l != 0) + abort (); + } + + return 1; +} + +/* Generate several move instructions to store LEN bytes generated by + CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a + pointer which will be passed as argument in every CONSTFUN call. + ALIGN is maximum alignment we can assume. */ + +void +store_by_pieces (to, len, constfun, constfundata, align) + rtx to; + unsigned HOST_WIDE_INT len; + rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); + PTR constfundata; + unsigned int align; +{ + struct store_by_pieces data; + + if (! MOVE_BY_PIECES_P (len, align)) + abort (); + to = protect_from_queue (to, 1); + data.constfun = constfun; + data.constfundata = constfundata; + data.len = len; + data.to = to; + store_by_pieces_1 (&data, align); +} + +/* Generate several move instructions to clear LEN bytes of block TO. (A MEM + rtx with BLKmode). The caller must pass TO through protect_from_queue + before calling. ALIGN is maximum alignment we can assume. */ static void clear_by_pieces (to, len, align) rtx to; - int len, align; + unsigned HOST_WIDE_INT len; + unsigned int align; +{ + struct store_by_pieces data; + + data.constfun = clear_by_pieces_1; + data.constfundata = NULL; + data.len = len; + data.to = to; + store_by_pieces_1 (&data, align); +} + +/* Callback routine for clear_by_pieces. + Return const0_rtx unconditionally. */ + +static rtx +clear_by_pieces_1 (data, offset, mode) + PTR data ATTRIBUTE_UNUSED; + HOST_WIDE_INT offset ATTRIBUTE_UNUSED; + enum machine_mode mode ATTRIBUTE_UNUSED; { - struct clear_by_pieces data; - rtx to_addr = XEXP (to, 0); - int max_size = MOVE_MAX_PIECES + 1; + return const0_rtx; +} + +/* Subroutine of clear_by_pieces and store_by_pieces. + Generate several move instructions to store LEN bytes of block TO. (A MEM + rtx with BLKmode). The caller must pass TO through protect_from_queue + before calling. ALIGN is maximum alignment we can assume. */ + +static void +store_by_pieces_1 (data, align) + struct store_by_pieces *data; + unsigned int align; +{ + rtx to_addr = XEXP (data->to, 0); + unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1; enum machine_mode mode = VOIDmode, tmode; enum insn_code icode; - data.offset = 0; - data.to_addr = to_addr; - data.to = to; - data.autinc_to + data->offset = 0; + data->to_addr = to_addr; + data->autinc_to = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); - data.explicit_inc_to = 0; - data.reverse + data->explicit_inc_to = 0; + data->reverse = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); - if (data.reverse) data.offset = len; - data.len = len; - - data.to_struct = MEM_IN_STRUCT_P (to); + if (data->reverse) + data->offset = data->len; - /* If copying requires more than two move insns, + /* If storing requires more than two move insns, copy addresses to registers (to make displacements shorter) and use post-increment if available. */ - if (!data.autinc_to - && move_by_pieces_ninsns (len, align) > 2) + if (!data->autinc_to + && move_by_pieces_ninsns (data->len, align) > 2) { - /* Determine the main mode we'll be using */ + /* Determine the main mode we'll be using. */ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) if (GET_MODE_SIZE (tmode) < max_size) mode = tmode; - if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) + if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) { - data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); - data.autinc_to = 1; - data.explicit_inc_to = -1; + data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); + data->autinc_to = 1; + data->explicit_inc_to = -1; } - if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) + + if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse + && ! data->autinc_to) { - data.to_addr = copy_addr_to_reg (to_addr); - data.autinc_to = 1; - data.explicit_inc_to = 1; + data->to_addr = copy_addr_to_reg (to_addr); + data->autinc_to = 1; + data->explicit_inc_to = 1; } - if (!data.autinc_to && CONSTANT_P (to_addr)) - data.to_addr = copy_addr_to_reg (to_addr); + + if ( !data->autinc_to && CONSTANT_P (to_addr)) + data->to_addr = copy_addr_to_reg (to_addr); } - if (! SLOW_UNALIGNED_ACCESS - || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT) - align = MOVE_MAX; + if (! SLOW_UNALIGNED_ACCESS (word_mode, align) + || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) + align = MOVE_MAX * BITS_PER_UNIT; - /* First move what we can in the largest integer mode, then go to + /* First store what we can in the largest integer mode, then go to successively smaller modes. */ while (max_size > 1) @@ -2346,75 +2492,81 @@ clear_by_pieces (to, len, align) break; icode = mov_optab->handlers[(int) mode].insn_code; - if (icode != CODE_FOR_nothing - && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT, - GET_MODE_SIZE (mode))) - clear_by_pieces_1 (GEN_FCN (icode), mode, &data); + if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) + store_by_pieces_2 (GEN_FCN (icode), mode, data); max_size = GET_MODE_SIZE (mode); } /* The code above should have handled everything. */ - if (data.len != 0) + if (data->len != 0) abort (); } -/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate +/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate with move instructions for mode MODE. GENFUN is the gen_... function to make a move insn for that mode. DATA has all the other info. */ static void -clear_by_pieces_1 (genfun, mode, data) - rtx (*genfun) PROTO ((rtx, ...)); +store_by_pieces_2 (genfun, mode, data) + rtx (*genfun) PARAMS ((rtx, ...)); enum machine_mode mode; - struct clear_by_pieces *data; + struct store_by_pieces *data; { - register int size = GET_MODE_SIZE (mode); - register rtx to1; + unsigned int size = GET_MODE_SIZE (mode); + rtx to1, cst; while (data->len >= size) { - if (data->reverse) data->offset -= size; + if (data->reverse) + data->offset -= size; - to1 = (data->autinc_to - ? gen_rtx_MEM (mode, data->to_addr) - : copy_rtx (change_address (data->to, mode, - plus_constant (data->to_addr, - data->offset)))); - MEM_IN_STRUCT_P (to1) = data->to_struct; + if (data->autinc_to) + to1 = adjust_automodify_address (data->to, mode, data->to_addr, + data->offset); + else + to1 = adjust_address (data->to, mode, data->offset); if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) - emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size))); + emit_insn (gen_add2_insn (data->to_addr, + GEN_INT (-(HOST_WIDE_INT) size))); + + cst = (*data->constfun) (data->constfundata, data->offset, mode); + emit_insn ((*genfun) (to1, cst)); - emit_insn ((*genfun) (to1, const0_rtx)); if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); - if (! data->reverse) data->offset += size; + if (! data->reverse) + data->offset += size; data->len -= size; } } -/* Write zeros through the storage of OBJECT. - If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is - the maximum alignment we can is has, measured in bytes. - - If we call a function that returns the length of the block, return it. */ +/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is + its length in bytes. */ rtx -clear_storage (object, size, align) +clear_storage (object, size) rtx object; rtx size; - int align; { #ifdef TARGET_MEM_FUNCTIONS static tree fn; tree call_expr, arg_list; #endif rtx retval = 0; - - if (GET_MODE (object) == BLKmode) + unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object) + : GET_MODE_ALIGNMENT (GET_MODE (object))); + + /* If OBJECT is not BLKmode and SIZE is the same size as its mode, + just move a zero. Otherwise, do this a piece at a time. */ + if (GET_MODE (object) != BLKmode + && GET_CODE (size) == CONST_INT + && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size)) + emit_move_insn (object, CONST0_RTX (GET_MODE (object))); + else { object = protect_from_queue (object, 1); size = protect_from_queue (size, 0); @@ -2422,20 +2574,20 @@ clear_storage (object, size, align) if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) clear_by_pieces (object, INTVAL (size), align); - else { /* Try the most limited insn first, because there's no point including more than one in the machine description unless the more limited one has some advantage. */ - rtx opalign = GEN_INT (align); + rtx opalign = GEN_INT (align / BITS_PER_UNIT); enum machine_mode mode; for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) { enum insn_code code = clrstr_optab[(int) mode]; + insn_operand_predicate_fn pred; if (code != CODE_FOR_nothing /* We don't need MODE to be narrower than @@ -2446,21 +2598,18 @@ clear_storage (object, size, align) && ((unsigned HOST_WIDE_INT) INTVAL (size) <= (GET_MODE_MASK (mode) >> 1))) || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) - && (insn_operand_predicate[(int) code][0] == 0 - || (*insn_operand_predicate[(int) code][0]) (object, - BLKmode)) - && (insn_operand_predicate[(int) code][2] == 0 - || (*insn_operand_predicate[(int) code][2]) (opalign, - VOIDmode))) + && ((pred = insn_data[(int) code].operand[0].predicate) == 0 + || (*pred) (object, BLKmode)) + && ((pred = insn_data[(int) code].operand[2].predicate) == 0 + || (*pred) (opalign, VOIDmode))) { rtx op1; rtx last = get_last_insn (); rtx pat; op1 = convert_to_mode (mode, size, 1); - if (insn_operand_predicate[(int) code][1] != 0 - && ! (*insn_operand_predicate[(int) code][1]) (op1, - mode)) + pred = insn_data[(int) code].operand[1].predicate; + if (pred != 0 && ! (*pred) (op1, mode)) op1 = copy_to_mode_reg (mode, op1); pat = GEN_FCN ((int) code) (object, op1, opalign); @@ -2504,7 +2653,6 @@ clear_storage (object, size, align) size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size); #endif - #ifdef TARGET_MEM_FUNCTIONS /* It is incorrect to use the libcall calling conventions to call memset in this context. @@ -2514,7 +2662,7 @@ clear_storage (object, size, align) For targets where libcalls and normal calls have different conventions for returning pointers, we could end up generating - incorrect code. + incorrect code. So instead of using a libcall sequence we build up a suitable CALL_EXPR and expand the call in the normal fashion. */ @@ -2525,23 +2673,22 @@ clear_storage (object, size, align) /* This was copied from except.c, I don't know if all this is necessary in this context or not. */ fn = get_identifier ("memset"); - push_obstacks_nochange (); - end_temporary_allocation (); fntype = build_pointer_type (void_type_node); fntype = build_function_type (fntype, NULL_TREE); fn = build_decl (FUNCTION_DECL, fn, fntype); + ggc_add_tree_root (&fn, 1); DECL_EXTERNAL (fn) = 1; TREE_PUBLIC (fn) = 1; DECL_ARTIFICIAL (fn) = 1; - make_decl_rtl (fn, NULL_PTR, 1); + TREE_NOTHROW (fn) = 1; + make_decl_rtl (fn, NULL); assemble_external (fn); - pop_obstacks (); } - /* We need to make an argument list for the function call. + /* We need to make an argument list for the function call. memset has three arguments, the first is a void * addresses, the - second a integer with the initialization value, the last is a + second an integer with the initialization value, the last is a size_t byte count for the copy. */ arg_list = build_tree_list (NULL_TREE, @@ -2549,7 +2696,7 @@ clear_storage (object, size, align) object)); TREE_CHAIN (arg_list) = build_tree_list (NULL_TREE, - make_tree (integer_type_node, const0_rtx)); + make_tree (integer_type_node, const0_rtx)); TREE_CHAIN (TREE_CHAIN (arg_list)) = build_tree_list (NULL_TREE, make_tree (sizetype, size)); TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE; @@ -2563,14 +2710,18 @@ clear_storage (object, size, align) retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); #else - emit_library_call (bzero_libfunc, 0, + emit_library_call (bzero_libfunc, LCT_NORMAL, VOIDmode, 2, object, Pmode, size, TYPE_MODE (integer_type_node)); #endif + + /* If we are initializing a readonly value, show the above call + clobbered it. Otherwise, a load from it may erroneously be + hoisted from a loop. */ + if (RTX_UNCHANGING_P (object)) + emit_insn (gen_rtx_CLOBBER (VOIDmode, object)); } } - else - emit_move_insn (object, CONST0_RTX (GET_MODE (object))); return retval; } @@ -2587,6 +2738,8 @@ emit_move_insn (x, y) rtx x, y; { enum machine_mode mode = GET_MODE (x); + rtx y_cst = NULL_RTX; + rtx last_insn; x = protect_from_queue (x, 1); y = protect_from_queue (y, 0); @@ -2598,7 +2751,10 @@ emit_move_insn (x, y) if (GET_CODE (y) == CONSTANT_P_RTX) ; else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y)) - y = force_const_mem (mode, y); + { + y_cst = y; + y = force_const_mem (mode, y); + } /* If X or Y are memory references, verify that their addresses are valid for the machine. */ @@ -2607,18 +2763,23 @@ emit_move_insn (x, y) && ! push_operand (x, GET_MODE (x))) || (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (x, 0))))) - x = change_address (x, VOIDmode, XEXP (x, 0)); + x = validize_mem (x); if (GET_CODE (y) == MEM && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) || (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (y, 0))))) - y = change_address (y, VOIDmode, XEXP (y, 0)); + y = validize_mem (y); if (mode == BLKmode) abort (); - return emit_move_insn_1 (x, y); + last_insn = emit_move_insn_1 (x, y); + + if (y_cst && GET_CODE (x) == REG) + set_unique_reg_note (last_insn, REG_EQUAL, y_cst); + + return last_insn; } /* Low level part of emit_move_insn. @@ -2632,10 +2793,9 @@ emit_move_insn_1 (x, y) enum machine_mode mode = GET_MODE (x); enum machine_mode submode; enum mode_class class = GET_MODE_CLASS (mode); - int i; - if (mode >= MAX_MACHINE_MODE) - abort (); + if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE) + abort (); if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) return @@ -2654,6 +2814,55 @@ emit_move_insn_1 (x, y) /* Don't split destination if it is a stack push. */ int stack = push_operand (x, GET_MODE (x)); +#ifdef PUSH_ROUNDING + /* In case we output to the stack, but the size is smaller machine can + push exactly, we need to use move instructions. */ + if (stack + && (PUSH_ROUNDING (GET_MODE_SIZE (submode)) + != GET_MODE_SIZE (submode))) + { + rtx temp; + HOST_WIDE_INT offset1, offset2; + + /* Do not use anti_adjust_stack, since we don't want to update + stack_pointer_delta. */ + temp = expand_binop (Pmode, +#ifdef STACK_GROWS_DOWNWARD + sub_optab, +#else + add_optab, +#endif + stack_pointer_rtx, + GEN_INT + (PUSH_ROUNDING + (GET_MODE_SIZE (GET_MODE (x)))), + stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); + + if (temp != stack_pointer_rtx) + emit_move_insn (stack_pointer_rtx, temp); + +#ifdef STACK_GROWS_DOWNWARD + offset1 = 0; + offset2 = GET_MODE_SIZE (submode); +#else + offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))); + offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))) + + GET_MODE_SIZE (submode)); +#endif + + emit_move_insn (change_address (x, submode, + gen_rtx_PLUS (Pmode, + stack_pointer_rtx, + GEN_INT (offset1))), + gen_realpart (submode, y)); + emit_move_insn (change_address (x, submode, + gen_rtx_PLUS (Pmode, + stack_pointer_rtx, + GEN_INT (offset2))), + gen_imagpart (submode, y)); + } + else +#endif /* If this is a stack, push the highpart first, so it will be in the argument order. @@ -2665,17 +2874,17 @@ emit_move_insn_1 (x, y) regardless of machine's endianness. */ #ifdef STACK_GROWS_DOWNWARD emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) - (gen_rtx_MEM (submode, (XEXP (x, 0))), + (gen_rtx_MEM (submode, XEXP (x, 0)), gen_imagpart (submode, y))); emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) - (gen_rtx_MEM (submode, (XEXP (x, 0))), + (gen_rtx_MEM (submode, XEXP (x, 0)), gen_realpart (submode, y))); #else emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) - (gen_rtx_MEM (submode, (XEXP (x, 0))), + (gen_rtx_MEM (submode, XEXP (x, 0)), gen_realpart (submode, y))); emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) - (gen_rtx_MEM (submode, (XEXP (x, 0))), + (gen_rtx_MEM (submode, XEXP (x, 0)), gen_imagpart (submode, y))); #endif } @@ -2693,39 +2902,42 @@ emit_move_insn_1 (x, y) memory and reload. FIXME, we should see about using extract and insert on integer registers, but complex short and complex char variables should be rarely used. */ - if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD + if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD && (reload_in_progress | reload_completed) == 0) { - int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER); - int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER); + int packed_dest_p + = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER); + int packed_src_p + = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER); if (packed_dest_p || packed_src_p) { enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT) ? MODE_FLOAT : MODE_INT); - enum machine_mode reg_mode = - mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1); + enum machine_mode reg_mode + = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1); if (reg_mode != BLKmode) { rtx mem = assign_stack_temp (reg_mode, GET_MODE_SIZE (mode), 0); + rtx cmem = adjust_address (mem, mode, 0); - rtx cmem = change_address (mem, mode, NULL_RTX); - - current_function_cannot_inline - = "function using short complex types cannot be inline"; + cfun->cannot_inline + = N_("function using short complex types cannot be inline"); if (packed_dest_p) { rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0); + emit_move_insn_1 (cmem, y); return emit_move_insn_1 (sreg, mem); } else { rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0); + emit_move_insn_1 (mem, sreg); return emit_move_insn_1 (x, cmem); } @@ -2746,9 +2958,7 @@ emit_move_insn_1 (x, y) && ! (reload_in_progress || reload_completed) && (GET_CODE (realpart_x) == SUBREG || GET_CODE (imagpart_x) == SUBREG)) - { - emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); - } + emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) (realpart_x, realpart_y)); @@ -2765,25 +2975,67 @@ emit_move_insn_1 (x, y) else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD) { rtx last_insn = 0; - rtx seq; + rtx seq, inner; int need_clobber; - + int i; + #ifdef PUSH_ROUNDING /* If X is a push on the stack, do the push now and replace X with a reference to the stack pointer. */ if (push_operand (x, GET_MODE (x))) { - anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x)))); - x = change_address (x, VOIDmode, stack_pointer_rtx); + rtx temp; + enum rtx_code code; + + /* Do not use anti_adjust_stack, since we don't want to update + stack_pointer_delta. */ + temp = expand_binop (Pmode, +#ifdef STACK_GROWS_DOWNWARD + sub_optab, +#else + add_optab, +#endif + stack_pointer_rtx, + GEN_INT + (PUSH_ROUNDING + (GET_MODE_SIZE (GET_MODE (x)))), + stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); + + if (temp != stack_pointer_rtx) + emit_move_insn (stack_pointer_rtx, temp); + + code = GET_CODE (XEXP (x, 0)); + + /* Just hope that small offsets off SP are OK. */ + if (code == POST_INC) + temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, + GEN_INT (-((HOST_WIDE_INT) + GET_MODE_SIZE (GET_MODE (x))))); + else if (code == POST_DEC) + temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, + GEN_INT (GET_MODE_SIZE (GET_MODE (x)))); + else + temp = stack_pointer_rtx; + + x = change_address (x, VOIDmode, temp); } #endif - + + /* If we are in reload, see if either operand is a MEM whose address + is scheduled for replacement. */ + if (reload_in_progress && GET_CODE (x) == MEM + && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) + x = replace_equiv_address_nv (x, inner); + if (reload_in_progress && GET_CODE (y) == MEM + && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) + y = replace_equiv_address_nv (y, inner); + start_sequence (); need_clobber = 0; for (i = 0; - i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; + i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; i++) { rtx xpart = operand_subword (x, i, 1, mode); @@ -2818,9 +3070,7 @@ emit_move_insn_1 (x, y) if (x != y && ! (reload_in_progress || reload_completed) && need_clobber != 0) - { - emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); - } + emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); emit_insn (seq); @@ -2846,7 +3096,7 @@ push_block (size, extra, below) rtx size; int extra, below; { - register rtx temp; + rtx temp; size = convert_modes (Pmode, ptr_mode, size, 1); if (CONSTANT_P (size)) @@ -2855,62 +3105,97 @@ push_block (size, extra, below) anti_adjust_stack (size); else { - rtx temp = copy_to_mode_reg (Pmode, size); + temp = copy_to_mode_reg (Pmode, size); if (extra != 0) temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), temp, 0, OPTAB_LIB_WIDEN); anti_adjust_stack (temp); } -#if defined (STACK_GROWS_DOWNWARD) \ - || (defined (ARGS_GROW_DOWNWARD) \ - && !defined (ACCUMULATE_OUTGOING_ARGS)) - - /* Return the lowest stack address when STACK or ARGS grow downward and - we are not aaccumulating outgoing arguments (the c4x port uses such - conventions). */ - temp = virtual_outgoing_args_rtx; - if (extra != 0 && below) - temp = plus_constant (temp, extra); +#ifndef STACK_GROWS_DOWNWARD + if (0) #else - if (GET_CODE (size) == CONST_INT) - temp = plus_constant (virtual_outgoing_args_rtx, - - INTVAL (size) - (below ? 0 : extra)); - else if (extra != 0 && !below) - temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, - negate_rtx (Pmode, plus_constant (size, extra))); - else - temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, - negate_rtx (Pmode, size)); + if (1) #endif + { + temp = virtual_outgoing_args_rtx; + if (extra != 0 && below) + temp = plus_constant (temp, extra); + } + else + { + if (GET_CODE (size) == CONST_INT) + temp = plus_constant (virtual_outgoing_args_rtx, + -INTVAL (size) - (below ? 0 : extra)); + else if (extra != 0 && !below) + temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, + negate_rtx (Pmode, plus_constant (size, extra))); + else + temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, + negate_rtx (Pmode, size)); + } return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); } -rtx -gen_push_operand () -{ - return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); -} +#ifdef PUSH_ROUNDING -/* Return an rtx for the address of the beginning of a as-if-it-was-pushed - block of SIZE bytes. */ +/* Emit single push insn. */ -static rtx -get_push_address (size) - int size; +static void +emit_single_push_insn (mode, x, type) + rtx x; + enum machine_mode mode; + tree type; { - register rtx temp; - - if (STACK_PUSH_CODE == POST_DEC) - temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size)); - else if (STACK_PUSH_CODE == POST_INC) - temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size)); + rtx dest_addr; + unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); + rtx dest; + enum insn_code icode; + insn_operand_predicate_fn pred; + + stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); + /* If there is push pattern, use it. Otherwise try old way of throwing + MEM representing push operation to move expander. */ + icode = push_optab->handlers[(int) mode].insn_code; + if (icode != CODE_FOR_nothing) + { + if (((pred = insn_data[(int) icode].operand[0].predicate) + && !((*pred) (x, mode)))) + x = force_reg (mode, x); + emit_insn (GEN_FCN (icode) (x)); + return; + } + if (GET_MODE_SIZE (mode) == rounded_size) + dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); else - temp = stack_pointer_rtx; + { +#ifdef STACK_GROWS_DOWNWARD + dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, + GEN_INT (-(HOST_WIDE_INT) rounded_size)); +#else + dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, + GEN_INT (rounded_size)); +#endif + dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); + } + + dest = gen_rtx_MEM (mode, dest_addr); - return copy_to_reg (temp); + if (type != 0) + { + set_mem_attributes (dest, type, 1); + + if (flag_optimize_sibling_calls) + /* Function incoming arguments may overlap with sibling call + outgoing arguments and we cannot allow reordering of reads + from function arguments with stores to outgoing arguments + of sibling calls. */ + set_mem_alias_set (dest, 0); + } + emit_move_insn (dest, x); } +#endif /* Generate code to push X onto the stack, assuming it has mode MODE and type TYPE. @@ -2919,7 +3204,7 @@ get_push_address (size) SIZE is an rtx for the size of data to be copied (in bytes), needed only if X is BLKmode. - ALIGN (in bytes) is maximum alignment we can assume. + ALIGN (in bits) is maximum alignment we can assume. If PARTIAL and REG are both nonzero, then copy that many of the first words of X into registers starting with REG, and push the rest of X. @@ -2946,18 +3231,20 @@ get_push_address (size) void emit_push_insn (x, mode, type, size, align, partial, reg, extra, - args_addr, args_so_far, reg_parm_stack_space) - register rtx x; + args_addr, args_so_far, reg_parm_stack_space, + alignment_pad) + rtx x; enum machine_mode mode; tree type; rtx size; - int align; + unsigned int align; int partial; rtx reg; int extra; rtx args_addr; rtx args_so_far; int reg_parm_stack_space; + rtx alignment_pad; { rtx xinner; enum direction stack_direction @@ -2972,8 +3259,9 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, Default is below for small data on big-endian machines; else above. */ enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); - /* Invert direction if stack is post-update. */ - if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC) + /* Invert direction if stack is post-decrement. + FIXME: why? */ + if (STACK_PUSH_CODE == POST_DEC) if (where_pad != none) where_pad = (where_pad == downward ? upward : downward); @@ -2983,11 +3271,11 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, { /* Copy a block into the stack, entirely or partially. */ - register rtx temp; + rtx temp; int used = partial * UNITS_PER_WORD; int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); int skip; - + if (size == 0) abort (); @@ -2997,8 +3285,7 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, because registers will take care of them. */ if (partial != 0) - xinner = change_address (xinner, BLKmode, - plus_constant (XEXP (xinner, 0), used)); + xinner = adjust_address (xinner, BLKmode, used); /* If the partial register-part of the arg counts in its stack size, skip the part of stack space corresponding to the registers. @@ -3011,15 +3298,17 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, and if there is no difficulty with push insns that skip bytes on the stack for alignment purposes. */ if (args_addr == 0 + && PUSH_ARGS && GET_CODE (size) == CONST_INT && skip == 0 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) /* Here we avoid the case of a structure whose weak alignment forces many pushes of a small amount of data, and such small pushes do rounding that causes trouble. */ - && ((! SLOW_UNALIGNED_ACCESS) - || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT - || PUSH_ROUNDING (align) == align) + && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) + || align >= BIGGEST_ALIGNMENT + || (PUSH_ROUNDING (align / BITS_PER_UNIT) + == (align / BITS_PER_UNIT))) && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) { /* Push padding now if padding above and stack grows down, @@ -3029,34 +3318,13 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, && where_pad != none && where_pad != stack_direction) anti_adjust_stack (GEN_INT (extra)); - move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner, - INTVAL (size) - used, align); - - if (current_function_check_memory_usage && ! in_check_memory_usage) - { - rtx temp; - - in_check_memory_usage = 1; - temp = get_push_address (INTVAL(size) - used); - if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type)) - emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, - temp, Pmode, - XEXP (xinner, 0), Pmode, - GEN_INT (INTVAL(size) - used), - TYPE_MODE (sizetype)); - else - emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3, - temp, Pmode, - GEN_INT (INTVAL(size) - used), - TYPE_MODE (sizetype), - GEN_INT (MEMORY_USE_RW), - TYPE_MODE (integer_type_node)); - in_check_memory_usage = 0; - } + move_by_pieces (NULL, xinner, INTVAL (size) - used, align); } else -#endif /* PUSH_ROUNDING */ +#endif /* PUSH_ROUNDING */ { + rtx target; + /* Otherwise make space on the stack and copy the data to the address of that space. */ @@ -3089,68 +3357,57 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, args_addr, args_so_far), skip)); - if (current_function_check_memory_usage && ! in_check_memory_usage) + target = gen_rtx_MEM (BLKmode, temp); + + if (type != 0) { - rtx target; - - in_check_memory_usage = 1; - target = copy_to_reg (temp); - if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type)) - emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, - target, Pmode, - XEXP (xinner, 0), Pmode, - size, TYPE_MODE (sizetype)); - else - emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3, - target, Pmode, - size, TYPE_MODE (sizetype), - GEN_INT (MEMORY_USE_RW), - TYPE_MODE (integer_type_node)); - in_check_memory_usage = 0; + set_mem_attributes (target, type, 1); + /* Function incoming arguments may overlap with sibling call + outgoing arguments and we cannot allow reordering of reads + from function arguments with stores to outgoing arguments + of sibling calls. */ + set_mem_alias_set (target, 0); } + else + set_mem_align (target, align); /* TEMP is the address of the block. Copy the data there. */ if (GET_CODE (size) == CONST_INT - && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))) + && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)) { - move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner, - INTVAL (size), align); + move_by_pieces (target, xinner, INTVAL (size), align); goto ret; } else { - rtx opalign = GEN_INT (align); + rtx opalign = GEN_INT (align / BITS_PER_UNIT); enum machine_mode mode; - rtx target = gen_rtx_MEM (BLKmode, temp); for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) { enum insn_code code = movstr_optab[(int) mode]; + insn_operand_predicate_fn pred; if (code != CODE_FOR_nothing && ((GET_CODE (size) == CONST_INT && ((unsigned HOST_WIDE_INT) INTVAL (size) <= (GET_MODE_MASK (mode) >> 1))) || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) - && (insn_operand_predicate[(int) code][0] == 0 - || ((*insn_operand_predicate[(int) code][0]) - (target, BLKmode))) - && (insn_operand_predicate[(int) code][1] == 0 - || ((*insn_operand_predicate[(int) code][1]) - (xinner, BLKmode))) - && (insn_operand_predicate[(int) code][3] == 0 - || ((*insn_operand_predicate[(int) code][3]) - (opalign, VOIDmode)))) + && (!(pred = insn_data[(int) code].operand[0].predicate) + || ((*pred) (target, BLKmode))) + && (!(pred = insn_data[(int) code].operand[1].predicate) + || ((*pred) (xinner, BLKmode))) + && (!(pred = insn_data[(int) code].operand[3].predicate) + || ((*pred) (opalign, VOIDmode)))) { rtx op2 = convert_to_mode (mode, size, 1); rtx last = get_last_insn (); rtx pat; - if (insn_operand_predicate[(int) code][2] != 0 - && ! ((*insn_operand_predicate[(int) code][2]) - (op2, mode))) + pred = insn_data[(int) code].operand[2].predicate; + if (pred != 0 && ! (*pred) (op2, mode)) op2 = copy_to_mode_reg (mode, op2); pat = GEN_FCN ((int) code) (target, xinner, @@ -3166,27 +3423,28 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, } } -#ifndef ACCUMULATE_OUTGOING_ARGS - /* If the source is referenced relative to the stack pointer, - copy it to another register to stabilize it. We do not need - to do this if we know that we won't be changing sp. */ + if (!ACCUMULATE_OUTGOING_ARGS) + { + /* If the source is referenced relative to the stack pointer, + copy it to another register to stabilize it. We do not need + to do this if we know that we won't be changing sp. */ - if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) - || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) - temp = copy_to_reg (temp); -#endif + if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) + || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) + temp = copy_to_reg (temp); + } /* Make inhibit_defer_pop nonzero around the library call to force it to pop the bcopy-arguments right away. */ NO_DEFER_POP; #ifdef TARGET_MEM_FUNCTIONS - emit_library_call (memcpy_libfunc, 0, + emit_library_call (memcpy_libfunc, LCT_NORMAL, VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode, convert_to_mode (TYPE_MODE (sizetype), size, TREE_UNSIGNED (sizetype)), TYPE_MODE (sizetype)); #else - emit_library_call (bcopy_libfunc, 0, + emit_library_call (bcopy_libfunc, LCT_NORMAL, VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode, convert_to_mode (TYPE_MODE (integer_type_node), size, @@ -3255,12 +3513,13 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, 0, args_addr, GEN_INT (args_offset + ((i - not_stack + skip) * UNITS_PER_WORD)), - reg_parm_stack_space); + reg_parm_stack_space, alignment_pad); } else { rtx addr; rtx target = NULL_RTX; + rtx dest; /* Push padding now if padding above and stack grows down, or if padding below and stack grows up. @@ -3270,45 +3529,34 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, anti_adjust_stack (GEN_INT (extra)); #ifdef PUSH_ROUNDING - if (args_addr == 0) - addr = gen_push_operand (); + if (args_addr == 0 && PUSH_ARGS) + emit_single_push_insn (mode, x, type); else #endif { if (GET_CODE (args_so_far) == CONST_INT) addr = memory_address (mode, - plus_constant (args_addr, + plus_constant (args_addr, INTVAL (args_so_far))); - else + else addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, args_so_far)); target = addr; - } - - emit_move_insn (gen_rtx_MEM (mode, addr), x); + dest = gen_rtx_MEM (mode, addr); + if (type != 0) + { + set_mem_attributes (dest, type, 1); + /* Function incoming arguments may overlap with sibling call + outgoing arguments and we cannot allow reordering of reads + from function arguments with stores to outgoing arguments + of sibling calls. */ + set_mem_alias_set (dest, 0); + } - if (current_function_check_memory_usage && ! in_check_memory_usage) - { - in_check_memory_usage = 1; - if (target == 0) - target = get_push_address (GET_MODE_SIZE (mode)); - - if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type)) - emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, - target, Pmode, - XEXP (x, 0), Pmode, - GEN_INT (GET_MODE_SIZE (mode)), - TYPE_MODE (sizetype)); - else - emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3, - target, Pmode, - GEN_INT (GET_MODE_SIZE (mode)), - TYPE_MODE (sizetype), - GEN_INT (MEMORY_USE_RW), - TYPE_MODE (integer_type_node)); - in_check_memory_usage = 0; + emit_move_insn (dest, x); } + } ret: @@ -3320,15 +3568,38 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra, /* Handle calls that pass values in multiple non-contiguous locations. The Irix 6 ABI has examples of this. */ if (GET_CODE (reg) == PARALLEL) - emit_group_load (reg, x, -1, align); /* ??? size? */ + emit_group_load (reg, x, -1); /* ??? size? */ else move_block_to_reg (REGNO (reg), x, partial, mode); } if (extra && args_addr == 0 && where_pad == stack_direction) anti_adjust_stack (GEN_INT (extra)); + + if (alignment_pad && args_addr == 0) + anti_adjust_stack (alignment_pad); } +/* Return X if X can be used as a subtarget in a sequence of arithmetic + operations. */ + +static rtx +get_subtarget (x) + rtx x; +{ + return ((x == 0 + /* Only registers can be subtargets. */ + || GET_CODE (x) != REG + /* If the register is readonly, it can't be set more than once. */ + || RTX_UNCHANGING_P (x) + /* Don't use hard regs to avoid extending their life. */ + || REGNO (x) < FIRST_PSEUDO_REGISTER + /* Avoid subtargets inside loops, + since they hide some invariant expressions. */ + || preserve_subexpressions_p ()) + ? 0 : x); +} + /* Expand an assignment that stores the value of FROM into TO. If WANT_VALUE is nonzero, return an rtx for the value of TO. (This may contain a QUEUED rtx; @@ -3344,9 +3615,9 @@ rtx expand_assignment (to, from, want_value, suggest_reg) tree to, from; int want_value; - int suggest_reg; + int suggest_reg ATTRIBUTE_UNUSED; { - register rtx to_rtx = 0; + rtx to_rtx = 0; rtx result; /* Don't crash if the lhs of the assignment was erroneous. */ @@ -3364,20 +3635,19 @@ expand_assignment (to, from, want_value, suggest_reg) problem. */ if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF - || TREE_CODE (to) == ARRAY_REF) + || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF) { enum machine_mode mode1; - int bitsize; - int bitpos; + HOST_WIDE_INT bitsize, bitpos; + rtx orig_to_rtx; tree offset; int unsignedp; int volatilep = 0; tree tem; - int alignment; push_temp_slots (); tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, - &unsignedp, &volatilep, &alignment); + &unsignedp, &volatilep); /* If we are going to use store_bit_field and extract_bit_field, make sure to_rtx will be safe for multiple use. */ @@ -3385,7 +3655,8 @@ expand_assignment (to, from, want_value, suggest_reg) if (mode1 == VOIDmode && want_value) tem = stabilize_reference (tem); - to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT); + orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0); + if (offset != 0) { rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); @@ -3394,108 +3665,92 @@ expand_assignment (to, from, want_value, suggest_reg) abort (); if (GET_MODE (offset_rtx) != ptr_mode) - { + offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); + #ifdef POINTERS_EXTEND_UNSIGNED - offset_rtx = convert_memory_address (ptr_mode, offset_rtx); -#else - offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); + if (GET_MODE (offset_rtx) != Pmode) + offset_rtx = convert_memory_address (Pmode, offset_rtx); #endif - } /* A constant address in TO_RTX can have VOIDmode, we must not try to call force_reg for that case. Avoid that case. */ if (GET_CODE (to_rtx) == MEM && GET_MODE (to_rtx) == BLKmode && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode - && bitsize - && (bitpos % bitsize) == 0 + && bitsize > 0 + && (bitpos % bitsize) == 0 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 - && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1)) + && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) { - rtx temp = change_address (to_rtx, mode1, - plus_constant (XEXP (to_rtx, 0), - (bitpos / - BITS_PER_UNIT))); + rtx temp + = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); + if (GET_CODE (XEXP (temp, 0)) == REG) to_rtx = temp; else - to_rtx = change_address (to_rtx, mode1, - force_reg (GET_MODE (XEXP (temp, 0)), - XEXP (temp, 0))); + to_rtx = (replace_equiv_address + (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)), + XEXP (temp, 0)))); bitpos = 0; } - to_rtx = change_address (to_rtx, VOIDmode, - gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0), - force_reg (ptr_mode, offset_rtx))); + to_rtx = offset_address (to_rtx, offset_rtx, + highest_pow2_factor (offset)); } - if (volatilep) + + if (GET_CODE (to_rtx) == MEM) { - if (GET_CODE (to_rtx) == MEM) - { - /* When the offset is zero, to_rtx is the address of the - structure we are storing into, and hence may be shared. - We must make a new MEM before setting the volatile bit. */ - if (offset == 0) - to_rtx = copy_rtx (to_rtx); + tree old_expr = MEM_EXPR (to_rtx); - MEM_VOLATILE_P (to_rtx) = 1; - } -#if 0 /* This was turned off because, when a field is volatile - in an object which is not volatile, the object may be in a register, - and then we would abort over here. */ - else - abort (); -#endif + /* If the field is at offset zero, we could have been given the + DECL_RTX of the parent struct. Don't munge it. */ + to_rtx = shallow_copy_rtx (to_rtx); + + set_mem_attributes (to_rtx, to, 0); + + /* If we changed MEM_EXPR, that means we're now referencing + the COMPONENT_REF, which means that MEM_OFFSET must be + relative to that field. But we've not yet reflected BITPOS + in TO_RTX. This will be done in store_field. Adjust for + that by biasing MEM_OFFSET by -bitpos. */ + if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx) + && (bitpos / BITS_PER_UNIT) != 0) + set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx)) + - (bitpos / BITS_PER_UNIT))); + } + + /* Deal with volatile and readonly fields. The former is only done + for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ + if (volatilep && GET_CODE (to_rtx) == MEM) + { + if (to_rtx == orig_to_rtx) + to_rtx = copy_rtx (to_rtx); + MEM_VOLATILE_P (to_rtx) = 1; } if (TREE_CODE (to) == COMPONENT_REF && TREE_READONLY (TREE_OPERAND (to, 1))) { - if (offset == 0) + if (to_rtx == orig_to_rtx) to_rtx = copy_rtx (to_rtx); - RTX_UNCHANGING_P (to_rtx) = 1; } - /* Check the access. */ - if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM) + if (GET_CODE (to_rtx) == MEM && ! can_address_p (to)) { - rtx to_addr; - int size; - int best_mode_size; - enum machine_mode best_mode; - - best_mode = get_best_mode (bitsize, bitpos, - TYPE_ALIGN (TREE_TYPE (tem)), - mode1, volatilep); - if (best_mode == VOIDmode) - best_mode = QImode; - - best_mode_size = GET_MODE_BITSIZE (best_mode); - to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT)); - size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size); - size *= GET_MODE_SIZE (best_mode); - - /* Check the access right of the pointer. */ - if (size) - emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, - to_addr, Pmode, - GEN_INT (size), TYPE_MODE (sizetype), - GEN_INT (MEMORY_USE_WO), - TYPE_MODE (integer_type_node)); + if (to_rtx == orig_to_rtx) + to_rtx = copy_rtx (to_rtx); + MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; } result = store_field (to_rtx, bitsize, bitpos, mode1, from, (want_value - /* Spurious cast makes HPUX compiler happy. */ - ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to)) + /* Spurious cast for HPUX compiler. */ + ? ((enum machine_mode) + TYPE_MODE (TREE_TYPE (to))) : VOIDmode), - unsignedp, - /* Required alignment of containing datum. */ - alignment, - int_size_in_bytes (TREE_TYPE (tem)), - get_alias_set (to)); + unsignedp, TREE_TYPE (tem), get_alias_set (to)); + preserve_temp_slots (result); free_temp_slots (); pop_temp_slots (); @@ -3515,34 +3770,33 @@ expand_assignment (to, from, want_value, suggest_reg) val = setjmp (buf) on machines where reference to val requires loading up part of an address in a separate insn. - Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be - a promoted variable where the zero- or sign- extension needs to be done. - Handling this in the normal way is safe because no computation is done - before the call. */ + Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG + since it might be a promoted variable where the zero- or sign- extension + needs to be done. Handling this in the normal way is safe because no + computation is done before the call. */ if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from) && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST - && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG)) + && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) + && GET_CODE (DECL_RTL (to)) == REG)) { rtx value; push_temp_slots (); value = expand_expr (from, NULL_RTX, VOIDmode, 0); if (to_rtx == 0) - to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO); + to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); /* Handle calls that return values in multiple non-contiguous locations. The Irix 6 ABI has examples of this. */ if (GET_CODE (to_rtx) == PARALLEL) - emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)), - TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT); + emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from))); else if (GET_MODE (to_rtx) == BLKmode) - emit_block_move (to_rtx, value, expr_size (from), - TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT); + emit_block_move (to_rtx, value, expr_size (from)); else { #ifdef POINTERS_EXTEND_UNSIGNED - if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE - || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE) + if (POINTER_TYPE_P (TREE_TYPE (to)) + && GET_MODE (to_rtx) != GET_MODE (value)) value = convert_memory_address (GET_MODE (to_rtx), value); #endif emit_move_insn (to_rtx, value); @@ -3557,20 +3811,22 @@ expand_assignment (to, from, want_value, suggest_reg) Don't re-expand if it was expanded already (in COMPONENT_REF case). */ if (to_rtx == 0) - { - to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO); - if (GET_CODE (to_rtx) == MEM) - MEM_ALIAS_SET (to_rtx) = get_alias_set (to); - } + to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); /* Don't move directly into a return register. */ - if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG) + if (TREE_CODE (to) == RESULT_DECL + && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL)) { rtx temp; push_temp_slots (); temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); - emit_move_insn (to_rtx, temp); + + if (GET_CODE (to_rtx) == PARALLEL) + emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from))); + else + emit_move_insn (to_rtx, temp); + preserve_temp_slots (to_rtx); free_temp_slots (); pop_temp_slots (); @@ -3588,27 +3844,17 @@ expand_assignment (to, from, want_value, suggest_reg) push_temp_slots (); size = expr_size (from); - from_rtx = expand_expr (from, NULL_RTX, VOIDmode, - EXPAND_MEMORY_USE_DONT); - - /* Copy the rights of the bitmap. */ - if (current_function_check_memory_usage) - emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, - XEXP (to_rtx, 0), Pmode, - XEXP (from_rtx, 0), Pmode, - convert_to_mode (TYPE_MODE (sizetype), - size, TREE_UNSIGNED (sizetype)), - TYPE_MODE (sizetype)); + from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0); #ifdef TARGET_MEM_FUNCTIONS - emit_library_call (memcpy_libfunc, 0, + emit_library_call (memmove_libfunc, LCT_NORMAL, VOIDmode, 3, XEXP (to_rtx, 0), Pmode, XEXP (from_rtx, 0), Pmode, convert_to_mode (TYPE_MODE (sizetype), size, TREE_UNSIGNED (sizetype)), TYPE_MODE (sizetype)); #else - emit_library_call (bcopy_libfunc, 0, + emit_library_call (bcopy_libfunc, LCT_NORMAL, VOIDmode, 3, XEXP (from_rtx, 0), Pmode, XEXP (to_rtx, 0), Pmode, convert_to_mode (TYPE_MODE (integer_type_node), @@ -3657,12 +3903,13 @@ expand_assignment (to, from, want_value, suggest_reg) rtx store_expr (exp, target, want_value) - register tree exp; - register rtx target; + tree exp; + rtx target; int want_value; { - register rtx temp; + rtx temp; int dont_return_target = 0; + int dont_store_target = 0; if (TREE_CODE (exp) == COMPOUND_EXPR) { @@ -3731,10 +3978,17 @@ store_expr (exp, target, want_value) Don't do this if TARGET is volatile because we are supposed to write it and then read it. */ { - temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target, - GET_MODE (target), 0); + temp = expand_expr (exp, target, GET_MODE (target), 0); if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode) - temp = copy_to_reg (temp); + { + /* If TEMP is already in the desired TARGET, only copy it from + memory and don't store it there again. */ + if (temp == target + || (rtx_equal_p (temp, target) + && ! side_effects_p (temp) && ! side_effects_p (target))) + dont_store_target = 1; + temp = copy_to_reg (temp); + } dont_return_target = 1; } else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) @@ -3764,7 +4018,7 @@ store_expr (exp, target, want_value) SUBREG_PROMOTED_UNSIGNED_P (target)), exp); } - + temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); /* If TEMP is a volatile MEM and we want a result value, make @@ -3778,12 +4032,36 @@ store_expr (exp, target, want_value) /* If TEMP is a VOIDmode constant, use convert_modes to make sure that we properly convert it. */ if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) - temp = convert_modes (GET_MODE (SUBREG_REG (target)), - TYPE_MODE (TREE_TYPE (exp)), temp, - SUBREG_PROMOTED_UNSIGNED_P (target)); + { + temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), + temp, SUBREG_PROMOTED_UNSIGNED_P (target)); + temp = convert_modes (GET_MODE (SUBREG_REG (target)), + GET_MODE (target), temp, + SUBREG_PROMOTED_UNSIGNED_P (target)); + } convert_move (SUBREG_REG (target), temp, SUBREG_PROMOTED_UNSIGNED_P (target)); + + /* If we promoted a constant, change the mode back down to match + target. Otherwise, the caller might get confused by a result whose + mode is larger than expected. */ + + if (want_value && GET_MODE (temp) != GET_MODE (target)) + { + if (GET_MODE (temp) != VOIDmode) + { + temp = gen_lowpart_SUBREG (GET_MODE (target), temp); + SUBREG_PROMOTED_VAR_P (temp) = 1; + SUBREG_PROMOTED_UNSIGNED_P (temp) + = SUBREG_PROMOTED_UNSIGNED_P (target); + } + else + temp = convert_modes (GET_MODE (target), + GET_MODE (SUBREG_REG (target)), + temp, SUBREG_PROMOTED_UNSIGNED_P (target)); + } + return want_value ? temp : NULL_RTX; } else @@ -3814,26 +4092,9 @@ store_expr (exp, target, want_value) temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), temp, TREE_UNSIGNED (TREE_TYPE (exp))); - if (current_function_check_memory_usage - && GET_CODE (target) == MEM - && AGGREGATE_TYPE_P (TREE_TYPE (exp))) - { - if (GET_CODE (temp) == MEM) - emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, - XEXP (target, 0), Pmode, - XEXP (temp, 0), Pmode, - expr_size (exp), TYPE_MODE (sizetype)); - else - emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, - XEXP (target, 0), Pmode, - expr_size (exp), TYPE_MODE (sizetype), - GEN_INT (MEMORY_USE_WO), - TYPE_MODE (integer_type_node)); - } - /* If value was not generated in the target, store it there. - Convert the value to TARGET's type first if nec. */ - /* If TEMP and TARGET compare equal according to rtx_equal_p, but + Convert the value to TARGET's type first if necessary. + If TEMP and TARGET compare equal according to rtx_equal_p, but one or both of them are volatile memory refs, we have to distinguish two cases: - expand_expr has used TARGET. In this case, we must not generate @@ -3848,7 +4109,8 @@ store_expr (exp, target, want_value) if ((! rtx_equal_p (temp, target) || (temp != target && (side_effects_p (temp) || side_effects_p (target)))) - && TREE_CODE (exp) != ERROR_MARK) + && TREE_CODE (exp) != ERROR_MARK + && ! dont_store_target) { target = protect_from_queue (target, 1); if (GET_MODE (temp) != GET_MODE (target) @@ -3869,88 +4131,59 @@ store_expr (exp, target, want_value) else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) { - /* Handle copying a string constant into an array. - The string constant may be shorter than the array. - So copy just the string's actual length, and clear the rest. */ - rtx size; - rtx addr; + /* Handle copying a string constant into an array. The string + constant may be shorter than the array. So copy just the string's + actual length, and clear the rest. First get the size of the data + type of the string, which is actually the size of the target. */ + rtx size = expr_size (exp); - /* Get the size of the data type of the string, - which is actually the size of the target. */ - size = expr_size (exp); if (GET_CODE (size) == CONST_INT && INTVAL (size) < TREE_STRING_LENGTH (exp)) - emit_block_move (target, temp, size, - TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); + emit_block_move (target, temp, size); else { /* Compute the size of the data to copy from the string. */ tree copy_size = size_binop (MIN_EXPR, make_tree (sizetype, size), - convert (sizetype, - build_int_2 (TREE_STRING_LENGTH (exp), 0))); + size_int (TREE_STRING_LENGTH (exp))); rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX, VOIDmode, 0); rtx label = 0; /* Copy that much. */ - emit_block_move (target, temp, copy_size_rtx, - TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); + copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0); + emit_block_move (target, temp, copy_size_rtx); /* Figure out how much is left in TARGET that we have to clear. Do all calculations in ptr_mode. */ - - addr = XEXP (target, 0); - addr = convert_modes (ptr_mode, Pmode, addr, 1); - if (GET_CODE (copy_size_rtx) == CONST_INT) { - addr = plus_constant (addr, TREE_STRING_LENGTH (exp)); - size = plus_constant (size, - TREE_STRING_LENGTH (exp)); + size = plus_constant (size, -INTVAL (copy_size_rtx)); + target = adjust_address (target, BLKmode, + INTVAL (copy_size_rtx)); } else { - addr = force_reg (ptr_mode, addr); - addr = expand_binop (ptr_mode, add_optab, addr, - copy_size_rtx, NULL_RTX, 0, - OPTAB_LIB_WIDEN); - size = expand_binop (ptr_mode, sub_optab, size, copy_size_rtx, NULL_RTX, 0, OPTAB_LIB_WIDEN); +#ifdef POINTERS_EXTEND_UNSIGNED + if (GET_MODE (copy_size_rtx) != Pmode) + copy_size_rtx = convert_memory_address (Pmode, + copy_size_rtx); +#endif + + target = offset_address (target, copy_size_rtx, + highest_pow2_factor (copy_size)); label = gen_label_rtx (); emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, - GET_MODE (size), 0, 0, label); + GET_MODE (size), 0, label); } if (size != const0_rtx) - { - /* Be sure we can write on ADDR. */ - if (current_function_check_memory_usage) - emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, - addr, Pmode, - size, TYPE_MODE (sizetype), - GEN_INT (MEMORY_USE_WO), - TYPE_MODE (integer_type_node)); -#ifdef TARGET_MEM_FUNCTIONS - emit_library_call (memset_libfunc, 0, VOIDmode, 3, - addr, ptr_mode, - const0_rtx, TYPE_MODE (integer_type_node), - convert_to_mode (TYPE_MODE (sizetype), - size, - TREE_UNSIGNED (sizetype)), - TYPE_MODE (sizetype)); -#else - emit_library_call (bzero_libfunc, 0, VOIDmode, 2, - addr, ptr_mode, - convert_to_mode (TYPE_MODE (integer_type_node), - size, - TREE_UNSIGNED (integer_type_node)), - TYPE_MODE (integer_type_node)); -#endif - } + clear_storage (target, size); if (label) emit_label (label); @@ -3959,11 +4192,9 @@ store_expr (exp, target, want_value) /* Handle calls that return values in multiple non-contiguous locations. The Irix 6 ABI has examples of this. */ else if (GET_CODE (target) == PARALLEL) - emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)), - TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); + emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp))); else if (GET_MODE (temp) == BLKmode) - emit_block_move (target, temp, expr_size (exp), - TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); + emit_block_move (target, temp, expr_size (exp)); else emit_move_insn (target, temp); } @@ -3982,7 +4213,7 @@ store_expr (exp, target, want_value) && ! (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)) return copy_to_reg (target); - + else return target; } @@ -4000,10 +4231,11 @@ is_zeros_p (exp) case CONVERT_EXPR: case NOP_EXPR: case NON_LVALUE_EXPR: + case VIEW_CONVERT_EXPR: return is_zeros_p (TREE_OPERAND (exp, 0)); case INTEGER_CST: - return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0; + return integer_zerop (exp); case COMPLEX_CST: return @@ -4020,7 +4252,7 @@ is_zeros_p (exp) return 0; return 1; - + default: return 0; } @@ -4063,6 +4295,7 @@ mostly_zeros_p (exp) TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. TYPE is the type of the CONSTRUCTOR, not the element type. CLEARED is as for store_constructor. + ALIAS_SET is the alias set to use for any stores. This provides a recursive shortcut back to store_constructor when it isn't necessary to go through store_field. This is so that we can pass through @@ -4070,13 +4303,15 @@ mostly_zeros_p (exp) clear a substructure if the outer structure has already been cleared. */ static void -store_constructor_field (target, bitsize, bitpos, - mode, exp, type, cleared) +store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared, + alias_set) rtx target; - int bitsize, bitpos; + unsigned HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; enum machine_mode mode; tree exp, type; int cleared; + int alias_set; { if (TREE_CODE (exp) == CONSTRUCTOR && bitpos % BITS_PER_UNIT == 0 @@ -4085,83 +4320,95 @@ store_constructor_field (target, bitsize, bitpos, generate unnecessary clear instructions anyways. */ && (bitpos == 0 || GET_CODE (target) == MEM)) { - if (bitpos != 0) - target = change_address (target, VOIDmode, - plus_constant (XEXP (target, 0), - bitpos / BITS_PER_UNIT)); - store_constructor (exp, target, cleared); + if (GET_CODE (target) == MEM) + target + = adjust_address (target, + GET_MODE (target) == BLKmode + || 0 != (bitpos + % GET_MODE_ALIGNMENT (GET_MODE (target))) + ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); + + + /* Update the alias set, if required. */ + if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target) + && MEM_ALIAS_SET (target) != 0) + { + target = copy_rtx (target); + set_mem_alias_set (target, alias_set); + } + + store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); } else - store_field (target, bitsize, bitpos, mode, exp, - VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT, - int_size_in_bytes (type), 0); + store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type, + alias_set); } /* Store the value of constructor EXP into the rtx TARGET. - TARGET is either a REG or a MEM. - CLEARED is true if TARGET is known to have been zero'd. */ + TARGET is either a REG or a MEM; we know it cannot conflict, since + safe_from_p has been called. + CLEARED is true if TARGET is known to have been zero'd. + SIZE is the number of bytes of TARGET we are allowed to modify: this + may not be the same as the size of EXP if we are assigning to a field + which has been packed to exclude padding bits. */ static void -store_constructor (exp, target, cleared) +store_constructor (exp, target, cleared, size) tree exp; rtx target; int cleared; + HOST_WIDE_INT size; { tree type = TREE_TYPE (exp); - rtx exp_size = expr_size (exp); - - /* We know our target cannot conflict, since safe_from_p has been called. */ -#if 0 - /* Don't try copying piece by piece into a hard register - since that is vulnerable to being clobbered by EXP. - Instead, construct in a pseudo register and then copy it all. */ - if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER) - { - rtx temp = gen_reg_rtx (GET_MODE (target)); - store_constructor (exp, temp, 0); - emit_move_insn (target, temp); - return; - } +#ifdef WORD_REGISTER_OPERATIONS + HOST_WIDE_INT exp_size = int_size_in_bytes (type); #endif if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == QUAL_UNION_TYPE) { - register tree elt; + tree elt; - /* Inform later passes that the whole union value is dead. */ - if (TREE_CODE (type) == UNION_TYPE - || TREE_CODE (type) == QUAL_UNION_TYPE) - emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); + /* We either clear the aggregate or indicate the value is dead. */ + if ((TREE_CODE (type) == UNION_TYPE + || TREE_CODE (type) == QUAL_UNION_TYPE) + && ! cleared + && ! CONSTRUCTOR_ELTS (exp)) + /* If the constructor is empty, clear the union. */ + { + clear_storage (target, expr_size (exp)); + cleared = 1; + } /* If we are building a static constructor into a register, set the initial value as zero so we can fold the value into a constant. But if more than one register is involved, this probably loses. */ - else if (GET_CODE (target) == REG && TREE_STATIC (exp) + else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp) && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) { - if (! cleared) - emit_move_insn (target, CONST0_RTX (GET_MODE (target))); - + emit_move_insn (target, CONST0_RTX (GET_MODE (target))); cleared = 1; } /* If the constructor has fewer fields than the structure or if we are initializing the structure to mostly zeros, - clear the whole structure first. */ - else if ((list_length (CONSTRUCTOR_ELTS (exp)) - != list_length (TYPE_FIELDS (type))) - || mostly_zeros_p (exp)) - { - if (! cleared) - clear_storage (target, expr_size (exp), - TYPE_ALIGN (type) / BITS_PER_UNIT); - + clear the whole structure first. Don't do this if TARGET is a + register whose mode size isn't equal to SIZE since clear_storage + can't handle this case. */ + else if (! cleared && size > 0 + && ((list_length (CONSTRUCTOR_ELTS (exp)) + != fields_length (type)) + || mostly_zeros_p (exp)) + && (GET_CODE (target) != REG + || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) + == size))) + { + clear_storage (target, GEN_INT (size)); cleared = 1; } - else - /* Inform later passes that the old value is dead. */ + + if (! cleared) emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); /* Store each element of the constructor into @@ -4169,13 +4416,13 @@ store_constructor (exp, target, cleared) for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) { - register tree field = TREE_PURPOSE (elt); + tree field = TREE_PURPOSE (elt); tree value = TREE_VALUE (elt); - register enum machine_mode mode; - int bitsize; - int bitpos = 0; + enum machine_mode mode; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos = 0; int unsignedp; - tree pos, constant = 0, offset = 0; + tree offset; rtx to_rtx = target; /* Just ignore missing fields. @@ -4184,26 +4431,28 @@ store_constructor (exp, target, cleared) if (field == 0) continue; - if (cleared && is_zeros_p (TREE_VALUE (elt))) + if (cleared && is_zeros_p (value)) continue; - bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)); + if (host_integerp (DECL_SIZE (field), 1)) + bitsize = tree_low_cst (DECL_SIZE (field), 1); + else + bitsize = -1; + unsignedp = TREE_UNSIGNED (field); mode = DECL_MODE (field); if (DECL_BIT_FIELD (field)) mode = VOIDmode; - pos = DECL_FIELD_BITPOS (field); - if (TREE_CODE (pos) == INTEGER_CST) - constant = pos; - else if (TREE_CODE (pos) == PLUS_EXPR - && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST) - constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0); + offset = DECL_FIELD_OFFSET (field); + if (host_integerp (offset, 0) + && host_integerp (bit_position (field), 0)) + { + bitpos = int_bit_position (field); + offset = 0; + } else - offset = pos; - - if (constant) - bitpos = TREE_INT_CST_LOW (constant); + bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); if (offset) { @@ -4213,27 +4462,22 @@ store_constructor (exp, target, cleared) offset = build (WITH_RECORD_EXPR, sizetype, offset, make_tree (TREE_TYPE (exp), target)); - offset = size_binop (FLOOR_DIV_EXPR, offset, - size_int (BITS_PER_UNIT)); - offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); if (GET_CODE (to_rtx) != MEM) abort (); - if (GET_MODE (offset_rtx) != ptr_mode) - { + if (GET_MODE (offset_rtx) != ptr_mode) + offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); + #ifdef POINTERS_EXTEND_UNSIGNED - offset_rtx = convert_memory_address (ptr_mode, offset_rtx); -#else - offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); + if (GET_MODE (offset_rtx) != Pmode) + offset_rtx = convert_memory_address (Pmode, offset_rtx); #endif - } - to_rtx - = change_address (to_rtx, VOIDmode, - gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0), - force_reg (ptr_mode, offset_rtx))); + to_rtx = offset_address (to_rtx, offset_rtx, + highest_pow2_factor (offset)); } + if (TREE_READONLY (field)) { if (GET_CODE (to_rtx) == MEM) @@ -4247,21 +4491,22 @@ store_constructor (exp, target, cleared) start of a word, try to widen it to a full word. This special case allows us to output C++ member function initializations in a form that the optimizers can understand. */ - if (constant - && GET_CODE (target) == REG + if (GET_CODE (target) == REG && bitsize < BITS_PER_WORD && bitpos % BITS_PER_WORD == 0 && GET_MODE_CLASS (mode) == MODE_INT && TREE_CODE (value) == INTEGER_CST - && GET_CODE (exp_size) == CONST_INT - && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT) + && exp_size >= 0 + && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) { tree type = TREE_TYPE (value); + if (TYPE_PRECISION (type) < BITS_PER_WORD) { type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type)); value = convert (type, value); } + if (BYTES_BIG_ENDIAN) value = fold (build (LSHIFT_EXPR, type, value, @@ -4270,19 +4515,39 @@ store_constructor (exp, target, cleared) mode = word_mode; } #endif - store_constructor_field (to_rtx, bitsize, bitpos, - mode, value, type, cleared); + + if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx) + && DECL_NONADDRESSABLE_P (field)) + { + to_rtx = copy_rtx (to_rtx); + MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; + } + + store_constructor_field (to_rtx, bitsize, bitpos, mode, + value, type, cleared, + get_alias_set (TREE_TYPE (field))); } } else if (TREE_CODE (type) == ARRAY_TYPE) { - register tree elt; - register int i; + tree elt; + int i; int need_to_clear; tree domain = TYPE_DOMAIN (type); - HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain)); - HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain)); tree elttype = TREE_TYPE (type); + int const_bounds_p = (TYPE_MIN_VALUE (domain) + && TYPE_MAX_VALUE (domain) + && host_integerp (TYPE_MIN_VALUE (domain), 0) + && host_integerp (TYPE_MAX_VALUE (domain), 0)); + HOST_WIDE_INT minelt = 0; + HOST_WIDE_INT maxelt = 0; + + /* If we have constant bounds for the range of the type, get them. */ + if (const_bounds_p) + { + minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); + maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); + } /* If the constructor has fewer elements than the array, clear the whole array first. Similarly if this is @@ -4292,49 +4557,55 @@ store_constructor (exp, target, cleared) else { HOST_WIDE_INT count = 0, zero_count = 0; - need_to_clear = 0; + need_to_clear = ! const_bounds_p; + /* This loop is a more accurate version of the loop in mostly_zeros_p (it handles RANGE_EXPR in an index). It is also needed to check for missing elements. */ for (elt = CONSTRUCTOR_ELTS (exp); - elt != NULL_TREE; + elt != NULL_TREE && ! need_to_clear; elt = TREE_CHAIN (elt)) { tree index = TREE_PURPOSE (elt); HOST_WIDE_INT this_node_count; + if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) { tree lo_index = TREE_OPERAND (index, 0); tree hi_index = TREE_OPERAND (index, 1); - if (TREE_CODE (lo_index) != INTEGER_CST - || TREE_CODE (hi_index) != INTEGER_CST) + + if (! host_integerp (lo_index, 1) + || ! host_integerp (hi_index, 1)) { need_to_clear = 1; break; } - this_node_count = TREE_INT_CST_LOW (hi_index) - - TREE_INT_CST_LOW (lo_index) + 1; + + this_node_count = (tree_low_cst (hi_index, 1) + - tree_low_cst (lo_index, 1) + 1); } else this_node_count = 1; + count += this_node_count; if (mostly_zeros_p (TREE_VALUE (elt))) zero_count += this_node_count; } + /* Clear the entire array first if there are any missing elements, or if the incidence of zero elements is >= 75%. */ - if (count < maxelt - minelt + 1 - || 4 * zero_count >= 3 * count) + if (! need_to_clear + && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count)) need_to_clear = 1; } - if (need_to_clear) + + if (need_to_clear && size > 0) { if (! cleared) - clear_storage (target, expr_size (exp), - TYPE_ALIGN (type) / BITS_PER_UNIT); + clear_storage (target, GEN_INT (size)); cleared = 1; } - else + else if (REG_P (target)) /* Inform later passes that the old value is dead. */ emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); @@ -4345,9 +4616,9 @@ store_constructor (exp, target, cleared) elt; elt = TREE_CHAIN (elt), i++) { - register enum machine_mode mode; - int bitsize; - int bitpos; + enum machine_mode mode; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; int unsignedp; tree value = TREE_VALUE (elt); tree index = TREE_PURPOSE (elt); @@ -4356,37 +4627,53 @@ store_constructor (exp, target, cleared) if (cleared && is_zeros_p (value)) continue; - mode = TYPE_MODE (elttype); - bitsize = GET_MODE_BITSIZE (mode); unsignedp = TREE_UNSIGNED (elttype); + mode = TYPE_MODE (elttype); + if (mode == BLKmode) + bitsize = (host_integerp (TYPE_SIZE (elttype), 1) + ? tree_low_cst (TYPE_SIZE (elttype), 1) + : -1); + else + bitsize = GET_MODE_BITSIZE (mode); if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) { tree lo_index = TREE_OPERAND (index, 0); tree hi_index = TREE_OPERAND (index, 1); - rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end; + rtx index_r, pos_rtx, hi_r, loop_top, loop_end; struct nesting *loop; HOST_WIDE_INT lo, hi, count; tree position; /* If the range is constant and "small", unroll the loop. */ - if (TREE_CODE (lo_index) == INTEGER_CST - && TREE_CODE (hi_index) == INTEGER_CST - && (lo = TREE_INT_CST_LOW (lo_index), - hi = TREE_INT_CST_LOW (hi_index), + if (const_bounds_p + && host_integerp (lo_index, 0) + && host_integerp (hi_index, 0) + && (lo = tree_low_cst (lo_index, 0), + hi = tree_low_cst (hi_index, 0), count = hi - lo + 1, (GET_CODE (target) != MEM || count <= 2 - || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST - && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count - <= 40 * 8)))) + || (host_integerp (TYPE_SIZE (elttype), 1) + && (tree_low_cst (TYPE_SIZE (elttype), 1) * count + <= 40 * 8))))) { lo -= minelt; hi -= minelt; for (; lo <= hi; lo++) { - bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype)); - store_constructor_field (target, bitsize, bitpos, - mode, value, type, cleared); + bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); + + if (GET_CODE (target) == MEM + && !MEM_KEEP_ALIAS_SET_P (target) + && TYPE_NONALIASED_COMPONENT (type)) + { + target = copy_rtx (target); + MEM_KEEP_ALIAS_SET_P (target) = 1; + } + + store_constructor_field + (target, bitsize, bitpos, mode, value, type, cleared, + get_alias_set (elttype)); } } else @@ -4399,10 +4686,10 @@ store_constructor (exp, target, cleared) index = build_decl (VAR_DECL, NULL_TREE, domain); - DECL_RTL (index) = index_r + index_r = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), &unsignedp, 0)); - + SET_DECL_RTL (index, index_r); if (TREE_CODE (value) == SAVE_EXPR && SAVE_EXPR_RTL (value) == 0) { @@ -4415,17 +4702,21 @@ store_constructor (exp, target, cleared) loop = expand_start_loop (0); /* Assign value to element index. */ - position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype), - size_int (BITS_PER_UNIT)); - position = size_binop (MULT_EXPR, - size_binop (MINUS_EXPR, index, - TYPE_MIN_VALUE (domain)), - position); + position + = convert (ssizetype, + fold (build (MINUS_EXPR, TREE_TYPE (index), + index, TYPE_MIN_VALUE (domain)))); + position = size_binop (MULT_EXPR, position, + convert (ssizetype, + TYPE_SIZE_UNIT (elttype))); + pos_rtx = expand_expr (position, 0, VOIDmode, 0); - addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx); - xtarget = change_address (target, mode, addr); + xtarget = offset_address (target, pos_rtx, + highest_pow2_factor (position)); + xtarget = adjust_address (xtarget, mode, 0); if (TREE_CODE (value) == CONSTRUCTOR) - store_constructor (value, xtarget, cleared); + store_constructor (value, xtarget, cleared, + bitsize / BITS_PER_UNIT); else store_expr (value, xtarget, 0); @@ -4438,50 +4729,57 @@ store_constructor (exp, target, cleared) index, integer_one_node), 0, 0); expand_end_loop (); emit_label (loop_end); - - /* Needed by stupid register allocation. to extend the - lifetime of pseudo-regs used by target past the end - of the loop. */ - emit_insn (gen_rtx_USE (GET_MODE (target), target)); } } - else if ((index != 0 && TREE_CODE (index) != INTEGER_CST) - || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST) + else if ((index != 0 && ! host_integerp (index, 0)) + || ! host_integerp (TYPE_SIZE (elttype), 1)) { - rtx pos_rtx, addr; tree position; if (index == 0) - index = size_int (i); + index = ssize_int (1); if (minelt) - index = size_binop (MINUS_EXPR, index, - TYPE_MIN_VALUE (domain)); - position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype), - size_int (BITS_PER_UNIT)); - position = size_binop (MULT_EXPR, index, position); - pos_rtx = expand_expr (position, 0, VOIDmode, 0); - addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx); - xtarget = change_address (target, mode, addr); + index = convert (ssizetype, + fold (build (MINUS_EXPR, index, + TYPE_MIN_VALUE (domain)))); + + position = size_binop (MULT_EXPR, index, + convert (ssizetype, + TYPE_SIZE_UNIT (elttype))); + xtarget = offset_address (target, + expand_expr (position, 0, VOIDmode, 0), + highest_pow2_factor (position)); + xtarget = adjust_address (xtarget, mode, 0); store_expr (value, xtarget, 0); } else { if (index != 0) - bitpos = ((TREE_INT_CST_LOW (index) - minelt) - * TREE_INT_CST_LOW (TYPE_SIZE (elttype))); + bitpos = ((tree_low_cst (index, 0) - minelt) + * tree_low_cst (TYPE_SIZE (elttype), 1)); else - bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))); - store_constructor_field (target, bitsize, bitpos, - mode, value, type, cleared); + bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); + + if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target) + && TYPE_NONALIASED_COMPONENT (type)) + { + target = copy_rtx (target); + MEM_KEEP_ALIAS_SET_P (target) = 1; + } + + store_constructor_field (target, bitsize, bitpos, mode, value, + type, cleared, get_alias_set (elttype)); + } } } - /* set constructor assignments */ + + /* Set constructor assignments. */ else if (TREE_CODE (type) == SET_TYPE) { tree elt = CONSTRUCTOR_ELTS (exp); - int nbytes = int_size_in_bytes (type), nbits; + unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; tree domain = TYPE_DOMAIN (type); tree domain_min, domain_max, bitlength; @@ -4494,39 +4792,37 @@ store_constructor (exp, target, cleared) Also, if a large set has just a single range, it may also be better to first clear all the first clear the set (using bzero/memset), and set the bits we want. */ - + /* Check for all zeros. */ - if (elt == NULL_TREE) + if (elt == NULL_TREE && size > 0) { if (!cleared) - clear_storage (target, expr_size (exp), - TYPE_ALIGN (type) / BITS_PER_UNIT); + clear_storage (target, GEN_INT (size)); return; } domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); bitlength = size_binop (PLUS_EXPR, - size_binop (MINUS_EXPR, domain_max, domain_min), - size_one_node); + size_diffop (domain_max, domain_min), + ssize_int (1)); - if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST) - abort (); - nbits = TREE_INT_CST_LOW (bitlength); + nbits = tree_low_cst (bitlength, 1); /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that are "complicated" (more than one range), initialize (the - constant parts) by copying from a constant. */ + constant parts) by copying from a constant. */ if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) { - int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); + unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); char *bit_buffer = (char *) alloca (nbits); HOST_WIDE_INT word = 0; - int bit_pos = 0; - int ibit = 0; - int offset = 0; /* In bytes from beginning of set. */ + unsigned int bit_pos = 0; + unsigned int ibit = 0; + unsigned int offset = 0; /* In bytes from beginning of set. */ + elt = get_set_constructor_bits (exp, bit_buffer, nbits); for (;;) { @@ -4537,6 +4833,7 @@ store_constructor (exp, target, cleared) else word |= 1 << bit_pos; } + bit_pos++; ibit++; if (bit_pos >= set_word_size || ibit == nbits) { @@ -4544,20 +4841,19 @@ store_constructor (exp, target, cleared) { rtx datum = GEN_INT (word); rtx to_rtx; + /* The assumption here is that it is safe to use XEXP if the set is multi-word, but not if it's single-word. */ if (GET_CODE (target) == MEM) - { - to_rtx = plus_constant (XEXP (target, 0), offset); - to_rtx = change_address (target, mode, to_rtx); - } - else if (offset == 0) + to_rtx = adjust_address (target, mode, offset); + else if (offset == 0) to_rtx = target; else abort (); emit_move_insn (to_rtx, datum); } + if (ibit == nbits) break; word = 0; @@ -4567,40 +4863,38 @@ store_constructor (exp, target, cleared) } } else if (!cleared) - { - /* Don't bother clearing storage if the set is all ones. */ - if (TREE_CHAIN (elt) != NULL_TREE - || (TREE_PURPOSE (elt) == NULL_TREE - ? nbits != 1 - : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST - || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST - || (TREE_INT_CST_LOW (TREE_VALUE (elt)) - - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1 - != nbits)))) - clear_storage (target, expr_size (exp), - TYPE_ALIGN (type) / BITS_PER_UNIT); - } - + /* Don't bother clearing storage if the set is all ones. */ + if (TREE_CHAIN (elt) != NULL_TREE + || (TREE_PURPOSE (elt) == NULL_TREE + ? nbits != 1 + : ( ! host_integerp (TREE_VALUE (elt), 0) + || ! host_integerp (TREE_PURPOSE (elt), 0) + || (tree_low_cst (TREE_VALUE (elt), 0) + - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 + != (HOST_WIDE_INT) nbits)))) + clear_storage (target, expr_size (exp)); + for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) { - /* start of range of element or NULL */ + /* Start of range of element or NULL. */ tree startbit = TREE_PURPOSE (elt); - /* end of range of element, or element value */ + /* End of range of element, or element value. */ tree endbit = TREE_VALUE (elt); #ifdef TARGET_MEM_FUNCTIONS HOST_WIDE_INT startb, endb; #endif - rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; + rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; bitlength_rtx = expand_expr (bitlength, - NULL_RTX, MEM, EXPAND_CONST_ADDRESS); + NULL_RTX, MEM, EXPAND_CONST_ADDRESS); - /* handle non-range tuple element like [ expr ] */ + /* Handle non-range tuple element like [ expr ]. */ if (startbit == NULL_TREE) { startbit = save_expr (endbit); endbit = startbit; } + startbit = convert (sizetype, startbit); endbit = convert (sizetype, endbit); if (! integer_zerop (domain_min)) @@ -4608,18 +4902,21 @@ store_constructor (exp, target, cleared) startbit = size_binop (MINUS_EXPR, startbit, domain_min); endbit = size_binop (MINUS_EXPR, endbit, domain_min); } - startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, + startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, EXPAND_CONST_ADDRESS); - endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, + endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, EXPAND_CONST_ADDRESS); if (REG_P (target)) { - targetx = assign_stack_temp (GET_MODE (target), - GET_MODE_SIZE (GET_MODE (target)), - 0); + targetx + = assign_temp + ((build_qualified_type (type_for_mode (GET_MODE (target), 0), + TYPE_QUAL_CONST)), + 0, 1, 1); emit_move_insn (targetx, target); } + else if (GET_CODE (target) == MEM) targetx = target; else @@ -4634,7 +4931,7 @@ store_constructor (exp, target, cleared) && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) { - emit_library_call (memset_libfunc, 0, + emit_library_call (memset_libfunc, LCT_NORMAL, VOIDmode, 3, plus_constant (XEXP (targetx, 0), startb / BITS_PER_UNIT), @@ -4645,13 +4942,12 @@ store_constructor (exp, target, cleared) } else #endif - { - emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"), - 0, VOIDmode, 4, XEXP (targetx, 0), Pmode, - bitlength_rtx, TYPE_MODE (sizetype), - startbit_rtx, TYPE_MODE (sizetype), - endbit_rtx, TYPE_MODE (sizetype)); - } + emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"), + LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0), + Pmode, bitlength_rtx, TYPE_MODE (sizetype), + startbit_rtx, TYPE_MODE (sizetype), + endbit_rtx, TYPE_MODE (sizetype)); + if (REG_P (target)) emit_move_insn (target, targetx); } @@ -4673,24 +4969,23 @@ store_constructor (exp, target, cleared) has mode VALUE_MODE if that is convenient to do. In this case, UNSIGNEDP must be nonzero if the value is an unsigned type. - ALIGN is the alignment that TARGET is known to have, measured in bytes. - TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. + TYPE is the type of the underlying object, ALIAS_SET is the alias set for the destination. This value will (in general) be different from that for TARGET, since TARGET is a reference to the containing structure. */ static rtx -store_field (target, bitsize, bitpos, mode, exp, value_mode, - unsignedp, align, total_size, alias_set) +store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type, + alias_set) rtx target; - int bitsize, bitpos; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; enum machine_mode mode; tree exp; enum machine_mode value_mode; int unsignedp; - int align; - int total_size; + tree type; int alias_set; { HOST_WIDE_INT width_mask = 0; @@ -4698,7 +4993,11 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, if (TREE_CODE (exp) == ERROR_MARK) return const0_rtx; - if (bitsize < HOST_BITS_PER_WIDE_INT) + /* If we have nothing to store, do nothing unless the expression has + side-effects. */ + if (bitsize == 0) + return expand_expr (exp, const0_rtx, VOIDmode, 0); + else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT) width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; /* If we are storing into an unaligned field of an aligned union that is @@ -4715,27 +5014,33 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, if (mode == BLKmode && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG)) { - rtx object = assign_stack_temp (GET_MODE (target), - GET_MODE_SIZE (GET_MODE (target)), 0); - rtx blk_object = copy_rtx (object); - - MEM_SET_IN_STRUCT_P (object, 1); - MEM_SET_IN_STRUCT_P (blk_object, 1); - PUT_MODE (blk_object, BLKmode); + rtx object + = assign_temp + (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST), + 0, 1, 1); + rtx blk_object = adjust_address (object, BLKmode, 0); - if (bitsize != GET_MODE_BITSIZE (GET_MODE (target))) + if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) emit_move_insn (object, target); - store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, - align, total_size, alias_set); + store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type, + alias_set); - /* Even though we aren't returning target, we need to - give it the updated value. */ emit_move_insn (target, object); + /* We want to return the BLKmode version of the data. */ return blk_object; } + if (GET_CODE (target) == CONCAT) + { + /* We're storing into a struct containing a single __complex. */ + + if (bitpos != 0) + abort (); + return store_expr (exp, target, 0); + } + /* If the structure is in a register or if the component is a bit field, we cannot use addressing to access it. Use bit-field techniques or SUBREG to store in it. */ @@ -4748,9 +5053,15 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, || GET_CODE (target) == SUBREG /* If the field isn't aligned enough to store as an ordinary memref, store it as a bit field. */ - || (SLOW_UNALIGNED_ACCESS - && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)) - || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0)) + || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)) + && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode) + || bitpos % GET_MODE_ALIGNMENT (mode))) + /* If the RHS and field are a constant size and the size of the + RHS isn't the same size as the bitfield, we must use bitfield + operations. */ + || (bitsize >= 0 + && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST + && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) { rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); @@ -4759,7 +5070,7 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, low-order bits. However, if EXP's type is a record and this is big-endian machine, we want the upper BITSIZE bits. */ if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT - && bitsize < GET_MODE_BITSIZE (GET_MODE (temp)) + && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, size_int (GET_MODE_BITSIZE (GET_MODE (temp)) @@ -4781,24 +5092,22 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, || bitpos % BITS_PER_UNIT != 0) abort (); - target = change_address (target, VOIDmode, - plus_constant (XEXP (target, 0), - bitpos / BITS_PER_UNIT)); - + target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); emit_block_move (target, temp, GEN_INT ((bitsize + BITS_PER_UNIT - 1) - / BITS_PER_UNIT), - 1); + / BITS_PER_UNIT)); return value_mode == VOIDmode ? const0_rtx : target; } /* Store the value in the bitfield. */ - store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size); + store_bit_field (target, bitsize, bitpos, mode, temp, + int_size_in_bytes (type)); + if (value_mode != VOIDmode) { - /* The caller wants an rtx for the value. */ - /* If possible, avoid refetching from the bitfield itself. */ + /* The caller wants an rtx for the value. + If possible, avoid refetching from the bitfield itself. */ if (width_mask != 0 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))) { @@ -4806,7 +5115,14 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, enum machine_mode tmode; if (unsignedp) - return expand_and (temp, GEN_INT (width_mask), NULL_RTX); + return expand_and (temp, + GEN_INT + (trunc_int_for_mode + (width_mask, + GET_MODE (temp) == VOIDmode + ? value_mode + : GET_MODE (temp))), NULL_RTX); + tmode = GET_MODE (temp); if (tmode == VOIDmode) tmode = value_mode; @@ -4814,16 +5130,17 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0); return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0); } + return extract_bit_field (target, bitsize, bitpos, unsignedp, - NULL_RTX, value_mode, 0, align, - total_size); + NULL_RTX, value_mode, VOIDmode, + int_size_in_bytes (type)); } return const0_rtx; } else { rtx addr = XEXP (target, 0); - rtx to_rtx; + rtx to_rtx = target; /* If a value is wanted, it must be the lhs; so make the address stable for multiple use. */ @@ -4835,24 +5152,26 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, && GET_CODE (XEXP (addr, 1)) == CONST_INT && (XEXP (addr, 0) == virtual_incoming_args_rtx || XEXP (addr, 0) == virtual_stack_vars_rtx))) - addr = copy_to_reg (addr); + to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr)); /* Now build a reference to just the desired component. */ - to_rtx = copy_rtx (change_address (target, mode, - plus_constant (addr, - (bitpos - / BITS_PER_UNIT)))); + to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); + + if (to_rtx == target) + to_rtx = copy_rtx (to_rtx); + MEM_SET_IN_STRUCT_P (to_rtx, 1); - MEM_ALIAS_SET (to_rtx) = alias_set; + if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) + set_mem_alias_set (to_rtx, alias_set); return store_expr (exp, to_rtx, value_mode != VOIDmode); } } /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, - or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or - ARRAY_REFs and find the ultimate containing object, which we return. + an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these + codes and find the ultimate containing object, which we return. We set *PBITSIZE to the size in bits that we want, *PBITPOS to the bit position, and *PUNSIGNEDP to the signedness of the field. @@ -4860,9 +5179,6 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, giving the variable offset (in units) in *POFFSET. This offset is in addition to the bit position. If the position is not variable, we store 0 in *POFFSET. - We set *PALIGNMENT to the alignment in bytes of the address that will be - computed. This is the alignment of the thing we return if *POFFSET - is zero, but can be more less strictly aligned if *POFFSET is nonzero. If any of the extraction expressions is volatile, we store 1 in *PVOLATILEP. Otherwise we don't change that. @@ -4873,31 +5189,34 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, If the field describes a variable-sized object, *PMODE is set to VOIDmode and *PBITSIZE is set to -1. An access cannot be made in - this case, but the address of the object can be found. */ + this case, but the address of the object can be found. */ tree get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, - punsignedp, pvolatilep, palignment) + punsignedp, pvolatilep) tree exp; - int *pbitsize; - int *pbitpos; + HOST_WIDE_INT *pbitsize; + HOST_WIDE_INT *pbitpos; tree *poffset; enum machine_mode *pmode; int *punsignedp; int *pvolatilep; - int *palignment; { - tree orig_exp = exp; tree size_tree = 0; enum machine_mode mode = VOIDmode; - tree offset = integer_zero_node; - unsigned int alignment = BIGGEST_ALIGNMENT; + tree offset = size_zero_node; + tree bit_offset = bitsize_zero_node; + tree placeholder_ptr = 0; + tree tem; + /* First get the mode, signedness, and size. We do this from just the + outermost expression. */ if (TREE_CODE (exp) == COMPONENT_REF) { size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) mode = DECL_MODE (TREE_OPERAND (exp, 1)); + *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1)); } else if (TREE_CODE (exp) == BIT_FIELD_REF) @@ -4908,125 +5227,100 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, else { mode = TYPE_MODE (TREE_TYPE (exp)); + *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); + if (mode == BLKmode) size_tree = TYPE_SIZE (TREE_TYPE (exp)); - - *pbitsize = GET_MODE_BITSIZE (mode); - *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); + else + *pbitsize = GET_MODE_BITSIZE (mode); } - - if (size_tree) + + if (size_tree != 0) { - if (TREE_CODE (size_tree) != INTEGER_CST) + if (! host_integerp (size_tree, 1)) mode = BLKmode, *pbitsize = -1; else - *pbitsize = TREE_INT_CST_LOW (size_tree); + *pbitsize = tree_low_cst (size_tree, 1); } /* Compute cumulative bit-offset for nested component-refs and array-refs, and find the ultimate containing object. */ - - *pbitpos = 0; - while (1) { - if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF) + if (TREE_CODE (exp) == BIT_FIELD_REF) + bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2)); + else if (TREE_CODE (exp) == COMPONENT_REF) { - tree pos = (TREE_CODE (exp) == COMPONENT_REF - ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1)) - : TREE_OPERAND (exp, 2)); - tree constant = integer_zero_node, var = pos; + tree field = TREE_OPERAND (exp, 1); + tree this_offset = DECL_FIELD_OFFSET (field); /* If this field hasn't been filled in yet, don't go past it. This should only happen when folding expressions made during type construction. */ - if (pos == 0) + if (this_offset == 0) break; + else if (! TREE_CONSTANT (this_offset) + && contains_placeholder_p (this_offset)) + this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp); - /* Assume here that the offset is a multiple of a unit. - If not, there should be an explicitly added constant. */ - if (TREE_CODE (pos) == PLUS_EXPR - && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST) - constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0); - else if (TREE_CODE (pos) == INTEGER_CST) - constant = pos, var = integer_zero_node; + offset = size_binop (PLUS_EXPR, offset, this_offset); + bit_offset = size_binop (PLUS_EXPR, bit_offset, + DECL_FIELD_BIT_OFFSET (field)); - *pbitpos += TREE_INT_CST_LOW (constant); - offset = size_binop (PLUS_EXPR, offset, - size_binop (EXACT_DIV_EXPR, var, - size_int (BITS_PER_UNIT))); + /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ } - else if (TREE_CODE (exp) == ARRAY_REF) + else if (TREE_CODE (exp) == ARRAY_REF + || TREE_CODE (exp) == ARRAY_RANGE_REF) { - /* This code is based on the code in case ARRAY_REF in expand_expr - below. We assume here that the size of an array element is - always an integral multiple of BITS_PER_UNIT. */ - tree index = TREE_OPERAND (exp, 1); - tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); - tree low_bound - = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node; - tree index_type = TREE_TYPE (index); - tree xindex; + tree array = TREE_OPERAND (exp, 0); + tree domain = TYPE_DOMAIN (TREE_TYPE (array)); + tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0); + tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array))); + + /* We assume all arrays have sizes that are a multiple of a byte. + First subtract the lower bound, if any, in the type of the + index, then convert to sizetype and multiply by the size of the + array element. */ + if (low_bound != 0 && ! integer_zerop (low_bound)) + index = fold (build (MINUS_EXPR, TREE_TYPE (index), + index, low_bound)); + + /* If the index has a self-referential type, pass it to a + WITH_RECORD_EXPR; if the component size is, pass our + component to one. */ + if (! TREE_CONSTANT (index) + && contains_placeholder_p (index)) + index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp); + if (! TREE_CONSTANT (unit_size) + && contains_placeholder_p (unit_size)) + unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array); - if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype)) - { - index = convert (type_for_size (TYPE_PRECISION (sizetype), 0), - index); - index_type = TREE_TYPE (index); - } - - /* Optimize the special-case of a zero lower bound. - - We convert the low_bound to sizetype to avoid some problems - with constant folding. (E.g. suppose the lower bound is 1, - and its mode is QI. Without the conversion, (ARRAY - +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) - +INDEX), which becomes (ARRAY+255+INDEX). Oops!) - - But sizetype isn't quite right either (especially if - the lowbound is negative). FIXME */ - - if (! integer_zerop (low_bound)) - index = fold (build (MINUS_EXPR, index_type, index, - convert (sizetype, low_bound))); - - if (TREE_CODE (index) == INTEGER_CST) - { - index = convert (sbitsizetype, index); - index_type = TREE_TYPE (index); - } + offset = size_binop (PLUS_EXPR, offset, + size_binop (MULT_EXPR, + convert (sizetype, index), + unit_size)); + } - xindex = fold (build (MULT_EXPR, sbitsizetype, index, - convert (sbitsizetype, - TYPE_SIZE (TREE_TYPE (exp))))); + else if (TREE_CODE (exp) == PLACEHOLDER_EXPR) + { + tree new = find_placeholder (exp, &placeholder_ptr); - if (TREE_CODE (xindex) == INTEGER_CST - && TREE_INT_CST_HIGH (xindex) == 0) - *pbitpos += TREE_INT_CST_LOW (xindex); + /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR. + We might have been called from tree optimization where we + haven't set up an object yet. */ + if (new == 0) + break; else - { - /* Either the bit offset calculated above is not constant, or - it overflowed. In either case, redo the multiplication - against the size in units. This is especially important - in the non-constant case to avoid a division at runtime. */ - xindex = fold (build (MULT_EXPR, ssizetype, index, - convert (ssizetype, - TYPE_SIZE_UNIT (TREE_TYPE (exp))))); - - if (contains_placeholder_p (xindex)) - xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp); - - offset = size_binop (PLUS_EXPR, offset, xindex); - } + exp = new; + + continue; } else if (TREE_CODE (exp) != NON_LVALUE_EXPR + && TREE_CODE (exp) != VIEW_CONVERT_EXPR && ! ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR) - && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE - && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) - != UNION_TYPE)) && (TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))) break; @@ -5035,65 +5329,54 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, if (TREE_THIS_VOLATILE (exp)) *pvolatilep = 1; - /* If the offset is non-constant already, then we can't assume any - alignment more than the alignment here. */ - if (! integer_zerop (offset)) - alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp))); - exp = TREE_OPERAND (exp, 0); } - if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd') - alignment = MIN (alignment, DECL_ALIGN (exp)); - else if (TREE_TYPE (exp) != 0) - alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp))); - - if (integer_zerop (offset)) - offset = 0; - - if (offset != 0 && contains_placeholder_p (offset)) - offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp); + /* If OFFSET is constant, see if we can return the whole thing as a + constant bit position. Otherwise, split it up. */ + if (host_integerp (offset, 0) + && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset), + bitsize_unit_node)) + && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset)) + && host_integerp (tem, 0)) + *pbitpos = tree_low_cst (tem, 0), *poffset = 0; + else + *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset; *pmode = mode; - *poffset = offset; - *palignment = alignment / BITS_PER_UNIT; return exp; } -/* Subroutine of expand_exp: compute memory_usage from modifier. */ -static enum memory_use_mode -get_memory_usage_from_modifier (modifier) - enum expand_modifier modifier; +/* Return 1 if T is an expression that get_inner_reference handles. */ + +int +handled_component_p (t) + tree t; { - switch (modifier) + switch (TREE_CODE (t)) { - case EXPAND_NORMAL: - case EXPAND_SUM: - return MEMORY_USE_RO; - break; - case EXPAND_MEMORY_USE_WO: - return MEMORY_USE_WO; - break; - case EXPAND_MEMORY_USE_RW: - return MEMORY_USE_RW; - break; - case EXPAND_MEMORY_USE_DONT: - /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into - MEMORY_USE_DONT, because they are modifiers to a call of - expand_expr in the ADDR_EXPR case of expand_expr. */ - case EXPAND_CONST_ADDRESS: - case EXPAND_INITIALIZER: - return MEMORY_USE_DONT; - case EXPAND_MEMORY_USE_BAD: + case BIT_FIELD_REF: + case COMPONENT_REF: + case ARRAY_REF: + case ARRAY_RANGE_REF: + case NON_LVALUE_EXPR: + case VIEW_CONVERT_EXPR: + return 1; + + case NOP_EXPR: + case CONVERT_EXPR: + return (TYPE_MODE (TREE_TYPE (t)) + == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0)))); + default: - abort (); + return 0; } } -/* Given an rtx VALUE that may contain additions and multiplications, - return an equivalent value that just refers to a register or memory. - This is done by generating instructions to perform the arithmetic - and returning a pseudo-register containing the value. +/* Given an rtx VALUE that may contain additions and multiplications, return + an equivalent value that just refers to a register, memory, or constant. + This is done by generating instructions to perform the arithmetic and + returning a pseudo-register containing the value. The returned value may be a REG, SUBREG, MEM or constant. */ @@ -5101,17 +5384,16 @@ rtx force_operand (value, target) rtx value, target; { - register optab binoptab = 0; + optab binoptab = 0; /* Use a temporary to force order of execution of calls to `force_operand'. */ rtx tmp; - register rtx op2; + rtx op2; /* Use subtarget as the target for operand 0 of a binary operation. */ - register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0); + rtx subtarget = get_subtarget (target); /* Check for a PIC address load. */ - if (flag_pic - && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS) + if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS) && XEXP (value, 0) == pic_offset_table_rtx && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF || GET_CODE (XEXP (value, 1)) == LABEL_REF @@ -5136,7 +5418,7 @@ force_operand (value, target) tmp = force_operand (XEXP (value, 0), subtarget); return expand_mult (GET_MODE (value), tmp, force_operand (op2, NULL_RTX), - target, 0); + target, 1); } if (binoptab) @@ -5170,7 +5452,7 @@ force_operand (value, target) force_operand (XEXP (XEXP (value, 0), 1), 0), target, 0, OPTAB_LIB_WIDEN); } - + tmp = force_operand (XEXP (value, 0), subtarget); return expand_binop (GET_MODE (value), binoptab, tmp, force_operand (op2, NULL_RTX), @@ -5178,67 +5460,25 @@ force_operand (value, target) /* We give UNSIGNEDP = 0 to expand_binop because the only operations we are expanding here are signed ones. */ } - return value; -} - -/* Subroutine of expand_expr: - save the non-copied parts (LIST) of an expr (LHS), and return a list - which can restore these values to their previous values, - should something modify their storage. */ - -static tree -save_noncopied_parts (lhs, list) - tree lhs; - tree list; -{ - tree tail; - tree parts = 0; - - for (tail = list; tail; tail = TREE_CHAIN (tail)) - if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST) - parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail))); - else - { - tree part = TREE_VALUE (tail); - tree part_type = TREE_TYPE (part); - tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part); - rtx target = assign_temp (part_type, 0, 1, 1); - if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0))) - target = change_address (target, TYPE_MODE (part_type), NULL_RTX); - parts = tree_cons (to_be_saved, - build (RTL_EXPR, part_type, NULL_TREE, - (tree) target), - parts); - store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0); - } - return parts; -} - -/* Subroutine of expand_expr: - record the non-copied parts (LIST) of an expr (LHS), and return a list - which specifies the initial values of these parts. */ -static tree -init_noncopied_parts (lhs, list) - tree lhs; - tree list; -{ - tree tail; - tree parts = 0; +#ifdef INSN_SCHEDULING + /* On machines that have insn scheduling, we want all memory reference to be + explicit, so we need to deal with such paradoxical SUBREGs. */ + if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM + && (GET_MODE_SIZE (GET_MODE (value)) + > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) + value + = simplify_gen_subreg (GET_MODE (value), + force_reg (GET_MODE (SUBREG_REG (value)), + force_operand (SUBREG_REG (value), + NULL_RTX)), + GET_MODE (SUBREG_REG (value)), + SUBREG_BYTE (value)); +#endif - for (tail = list; tail; tail = TREE_CHAIN (tail)) - if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST) - parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail))); - else if (TREE_PURPOSE (tail)) - { - tree part = TREE_VALUE (tail); - tree part_type = TREE_TYPE (part); - tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part); - parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts); - } - return parts; + return value; } - + /* Subroutine of expand_expr: return nonzero iff there is no way that EXP can reference X, which is being modified. TOP_P is nonzero if this call is going to be used to determine whether we need a temporary @@ -5247,7 +5487,7 @@ init_noncopied_parts (lhs, list) It is always safe for this routine to return zero since it merely searches for optimization opportunities. */ -static int +int safe_from_p (x, exp, top_p) rtx x; tree exp; @@ -5255,10 +5495,7 @@ safe_from_p (x, exp, top_p) { rtx exp_rtl = 0; int i, nops; - static int save_expr_count; - static int save_expr_size = 0; - static tree *save_expr_rewritten; - static tree save_expr_trees[256]; + static tree save_expr_list; if (x == 0 /* If EXP has varying size, we MUST use a target since we currently @@ -5267,37 +5504,20 @@ safe_from_p (x, exp, top_p) So we assume here that something at a higher level has prevented a clash. This is somewhat bogus, but the best we can do. Only do this when X is BLKmode and when we are at the top level. */ - || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0 + || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) != INTEGER_CST) - && GET_MODE (x) == BLKmode)) + && GET_MODE (x) == BLKmode) + /* If X is in the outgoing argument area, it is always safe. */ + || (GET_CODE (x) == MEM + && (XEXP (x, 0) == virtual_outgoing_args_rtx + || (GET_CODE (XEXP (x, 0)) == PLUS + && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) return 1; - if (top_p && save_expr_size == 0) - { - int rtn; - - save_expr_count = 0; - save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]); - save_expr_rewritten = &save_expr_trees[0]; - - rtn = safe_from_p (x, exp, 1); - - for (i = 0; i < save_expr_count; ++i) - { - if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK) - abort (); - TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR); - } - - save_expr_size = 0; - - return rtn; - } - /* If this is a subreg of a hard register, declare it unsafe, otherwise, find the underlying pseudo. */ if (GET_CODE (x) == SUBREG) @@ -5307,17 +5527,35 @@ safe_from_p (x, exp, top_p) return 0; } - /* If X is a location in the outgoing argument area, it is always safe. */ - if (GET_CODE (x) == MEM - && (XEXP (x, 0) == virtual_outgoing_args_rtx - || (GET_CODE (XEXP (x, 0)) == PLUS - && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))) - return 1; + /* A SAVE_EXPR might appear many times in the expression passed to the + top-level safe_from_p call, and if it has a complex subexpression, + examining it multiple times could result in a combinatorial explosion. + E.g. on an Alpha running at least 200MHz, a Fortran test case compiled + with optimization took about 28 minutes to compile -- even though it was + only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE + and turn that off when we are done. We keep a list of the SAVE_EXPRs + we have processed. Note that the only test of top_p was above. */ + + if (top_p) + { + int rtn; + tree t; + + save_expr_list = 0; + + rtn = safe_from_p (x, exp, 0); + + for (t = save_expr_list; t != 0; t = TREE_CHAIN (t)) + TREE_PRIVATE (TREE_PURPOSE (t)) = 0; + return rtn; + } + + /* Now look at our tree code and possibly recurse. */ switch (TREE_CODE_CLASS (TREE_CODE (exp))) { case 'd': - exp_rtl = DECL_RTL (exp); + exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX; break; case 'c': @@ -5352,26 +5590,40 @@ safe_from_p (x, exp, top_p) switch (TREE_CODE (exp)) { case ADDR_EXPR: - return (staticp (TREE_OPERAND (exp, 0)) - || safe_from_p (x, TREE_OPERAND (exp, 0), 0) - || TREE_STATIC (exp)); + /* If the operand is static or we are static, we can't conflict. + Likewise if we don't conflict with the operand at all. */ + if (staticp (TREE_OPERAND (exp, 0)) + || TREE_STATIC (exp) + || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) + return 1; + + /* Otherwise, the only way this can conflict is if we are taking + the address of a DECL a that address if part of X, which is + very rare. */ + exp = TREE_OPERAND (exp, 0); + if (DECL_P (exp)) + { + if (!DECL_RTL_SET_P (exp) + || GET_CODE (DECL_RTL (exp)) != MEM) + return 0; + else + exp_rtl = XEXP (DECL_RTL (exp), 0); + } + break; case INDIRECT_REF: - if (GET_CODE (x) == MEM) + if (GET_CODE (x) == MEM + && alias_sets_conflict_p (MEM_ALIAS_SET (x), + get_alias_set (exp))) return 0; break; case CALL_EXPR: - exp_rtl = CALL_EXPR_RTL (exp); - if (exp_rtl == 0) - { - /* Assume that the call will clobber all hard registers and - all of memory. */ - if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) - || GET_CODE (x) == MEM) - return 0; - } - + /* Assume that the call will clobber all hard registers and + all of memory. */ + if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) + || GET_CODE (x) == MEM) + return 0; break; case RTL_EXPR: @@ -5385,7 +5637,7 @@ safe_from_p (x, exp, top_p) break; case WITH_CLEANUP_EXPR: - exp_rtl = RTL_EXPR_RTL (exp); + exp_rtl = WITH_CLEANUP_EXPR_RTL (exp); break; case CLEANUP_POINT_EXPR: @@ -5396,37 +5648,20 @@ safe_from_p (x, exp, top_p) if (exp_rtl) break; - /* This SAVE_EXPR might appear many times in the top-level - safe_from_p() expression, and if it has a complex - subexpression, examining it multiple times could result - in a combinatorial explosion. E.g. on an Alpha - running at least 200MHz, a Fortran test case compiled with - optimization took about 28 minutes to compile -- even though - it was only a few lines long, and the complicated line causing - so much time to be spent in the earlier version of safe_from_p() - had only 293 or so unique nodes. - - So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember - where it is so we can turn it back in the top-level safe_from_p() - when we're done. */ - - /* For now, don't bother re-sizing the array. */ - if (save_expr_count >= save_expr_size) - return 0; - save_expr_rewritten[save_expr_count++] = exp; + /* If we've already scanned this, don't do it again. Otherwise, + show we've scanned it and record for clearing the flag if we're + going on. */ + if (TREE_PRIVATE (exp)) + return 1; - nops = tree_code_length[(int) SAVE_EXPR]; - for (i = 0; i < nops; i++) + TREE_PRIVATE (exp) = 1; + if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0)) { - tree operand = TREE_OPERAND (exp, i); - if (operand == NULL_TREE) - continue; - TREE_SET_CODE (exp, ERROR_MARK); - if (!safe_from_p (x, operand, 0)) - return 0; - TREE_SET_CODE (exp, SAVE_EXPR); + TREE_PRIVATE (exp) = 0; + return 0; } - TREE_SET_CODE (exp, ERROR_MARK); + + save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list); return 1; case BIND_EXPR: @@ -5435,9 +5670,9 @@ safe_from_p (x, exp, top_p) return safe_from_p (x, TREE_OPERAND (exp, 1), 0); case METHOD_CALL_EXPR: - /* This takes a rtx argument, but shouldn't appear here. */ + /* This takes an rtx argument, but shouldn't appear here. */ abort (); - + default: break; } @@ -5446,11 +5681,18 @@ safe_from_p (x, exp, top_p) if (exp_rtl) break; - nops = tree_code_length[(int) TREE_CODE (exp)]; + nops = first_rtl_op (TREE_CODE (exp)); for (i = 0; i < nops; i++) if (TREE_OPERAND (exp, i) != 0 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) return 0; + + /* If this is a language-specific tree code, it may require + special handling. */ + if ((unsigned int) TREE_CODE (exp) + >= (unsigned int) LAST_AND_UNUSED_TREE_CODE + && !(*lang_hooks.safe_from_p) (x, exp)) + return 0; } /* If we have an rtl, find any enclosed object. Then see if we conflict @@ -5466,32 +5708,17 @@ safe_from_p (x, exp, top_p) } /* If the rtl is X, then it is not safe. Otherwise, it is unless both - are memory and EXP is not readonly. */ + are memory and they conflict. */ return ! (rtx_equal_p (x, exp_rtl) || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM - && ! TREE_READONLY (exp))); + && true_dependence (exp_rtl, GET_MODE (x), x, + rtx_addr_varies_p))); } /* If we reach here, it is safe. */ return 1; } -/* Subroutine of expand_expr: return nonzero iff EXP is an - expression whose type is statically determinable. */ - -static int -fixed_type_p (exp) - tree exp; -{ - if (TREE_CODE (exp) == PARM_DECL - || TREE_CODE (exp) == VAR_DECL - || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR - || TREE_CODE (exp) == COMPONENT_REF - || TREE_CODE (exp) == ARRAY_REF) - return 1; - return 0; -} - /* Subroutine of expand_expr: return rtx if EXP is a variable or parameter; else return 0. */ @@ -5511,13 +5738,18 @@ var_rtx (exp) } #ifdef MAX_INTEGER_COMPUTATION_MODE + void check_max_integer_computation_mode (exp) - tree exp; + tree exp; { - enum tree_code code = TREE_CODE (exp); + enum tree_code code; enum machine_mode mode; + /* Strip any NOPs that don't change the mode. */ + STRIP_NOPS (exp); + code = TREE_CODE (exp); + /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */ if (code == NOP_EXPR && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) @@ -5532,7 +5764,7 @@ check_max_integer_computation_mode (exp) mode = TYPE_MODE (TREE_TYPE (exp)); if (GET_MODE_CLASS (mode) == MODE_INT && mode > MAX_INTEGER_COMPUTATION_MODE) - fatal ("unsupported wide integer operation"); + internal_error ("unsupported wide integer operation"); } /* Check operand of a unary op. */ @@ -5541,25 +5773,159 @@ check_max_integer_computation_mode (exp) mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && mode > MAX_INTEGER_COMPUTATION_MODE) - fatal ("unsupported wide integer operation"); + internal_error ("unsupported wide integer operation"); } - + /* Check operands of a binary/comparison op. */ if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<') { mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && mode > MAX_INTEGER_COMPUTATION_MODE) - fatal ("unsupported wide integer operation"); + internal_error ("unsupported wide integer operation"); mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))); if (GET_MODE_CLASS (mode) == MODE_INT && mode > MAX_INTEGER_COMPUTATION_MODE) - fatal ("unsupported wide integer operation"); + internal_error ("unsupported wide integer operation"); } } #endif + +/* Return the highest power of two that EXP is known to be a multiple of. + This is used in updating alignment of MEMs in array references. */ + +static HOST_WIDE_INT +highest_pow2_factor (exp) + tree exp; +{ + HOST_WIDE_INT c0, c1; + + switch (TREE_CODE (exp)) + { + case INTEGER_CST: + /* If the integer is expressable in a HOST_WIDE_INT, we can find the + lowest bit that's a one. If the result is zero, return + BIGGEST_ALIGNMENT. We need to handle this case since we can find it + in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows, + we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any + later ICE. */ + if (TREE_CONSTANT_OVERFLOW (exp) + || integer_zerop (exp)) + return BIGGEST_ALIGNMENT; + else if (host_integerp (exp, 0)) + { + c0 = tree_low_cst (exp, 0); + c0 = c0 < 0 ? - c0 : c0; + return c0 & -c0; + } + break; + + case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: + c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); + c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); + return MIN (c0, c1); + + case MULT_EXPR: + c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); + c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); + return c0 * c1; + + case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: + case CEIL_DIV_EXPR: + if (integer_pow2p (TREE_OPERAND (exp, 1)) + && host_integerp (TREE_OPERAND (exp, 1), 1)) + { + c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); + c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); + return MAX (1, c0 / c1); + } + break; + + case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR: + case SAVE_EXPR: case WITH_RECORD_EXPR: + return highest_pow2_factor (TREE_OPERAND (exp, 0)); + + case COMPOUND_EXPR: + return highest_pow2_factor (TREE_OPERAND (exp, 1)); + + case COND_EXPR: + c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); + c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); + return MIN (c0, c1); + + default: + break; + } + + return 1; +} + +/* Return an object on the placeholder list that matches EXP, a + PLACEHOLDER_EXPR. An object "matches" if it is of the type of the + PLACEHOLDER_EXPR or a pointer type to it. For further information, see + tree.def. If no such object is found, return 0. If PLIST is nonzero, it + is a location which initially points to a starting location in the + placeholder list (zero means start of the list) and where a pointer into + the placeholder list at which the object is found is placed. */ +tree +find_placeholder (exp, plist) + tree exp; + tree *plist; +{ + tree type = TREE_TYPE (exp); + tree placeholder_expr; + + for (placeholder_expr + = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list; + placeholder_expr != 0; + placeholder_expr = TREE_CHAIN (placeholder_expr)) + { + tree need_type = TYPE_MAIN_VARIANT (type); + tree elt; + + /* Find the outermost reference that is of the type we want. If none, + see if any object has a type that is a pointer to the type we + want. */ + for (elt = TREE_PURPOSE (placeholder_expr); elt != 0; + elt = ((TREE_CODE (elt) == COMPOUND_EXPR + || TREE_CODE (elt) == COND_EXPR) + ? TREE_OPERAND (elt, 1) + : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' + || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' + || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' + || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') + ? TREE_OPERAND (elt, 0) : 0)) + if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) + { + if (plist) + *plist = placeholder_expr; + return elt; + } + + for (elt = TREE_PURPOSE (placeholder_expr); elt != 0; + elt + = ((TREE_CODE (elt) == COMPOUND_EXPR + || TREE_CODE (elt) == COND_EXPR) + ? TREE_OPERAND (elt, 1) + : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' + || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' + || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' + || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') + ? TREE_OPERAND (elt, 0) : 0)) + if (POINTER_TYPE_P (TREE_TYPE (elt)) + && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) + == need_type)) + { + if (plist) + *plist = placeholder_expr; + return build1 (INDIRECT_REF, need_type, elt); + } + } + + return 0; +} /* expand_expr: generate code for computing expression EXP. An rtx for the computed value is returned. The value is never null. @@ -5598,28 +5964,23 @@ check_max_integer_computation_mode (exp) rtx expand_expr (exp, target, tmode, modifier) - register tree exp; + tree exp; rtx target; enum machine_mode tmode; enum expand_modifier modifier; { - /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. - This is static so it will be accessible to our recursive callees. */ - static tree placeholder_list = 0; - register rtx op0, op1, temp; + rtx op0, op1, temp; tree type = TREE_TYPE (exp); int unsignedp = TREE_UNSIGNED (type); - register enum machine_mode mode; - register enum tree_code code = TREE_CODE (exp); + enum machine_mode mode; + enum tree_code code = TREE_CODE (exp); optab this_optab; rtx subtarget, original_target; int ignore; tree context; - /* Used by check-memory-usage to make modifier read only. */ - enum expand_modifier ro_modifier; - /* Handle ERROR_MARK before anybody tries to access its type. */ - if (TREE_CODE (exp) == ERROR_MARK) + /* Handle ERROR_MARK before anybody tries to access its type. */ + if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK) { op0 = CONST0_RTX (tmode); if (op0 != 0) @@ -5629,30 +5990,14 @@ expand_expr (exp, target, tmode, modifier) mode = TYPE_MODE (type); /* Use subtarget as the target for operand 0 of a binary operation. */ - subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0); + subtarget = get_subtarget (target); original_target = target; ignore = (target == const0_rtx || ((code == NON_LVALUE_EXPR || code == NOP_EXPR || code == CONVERT_EXPR || code == REFERENCE_EXPR - || code == COND_EXPR) + || code == COND_EXPR || code == VIEW_CONVERT_EXPR) && TREE_CODE (type) == VOID_TYPE)); - /* Make a read-only version of the modifier. */ - if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM - || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER) - ro_modifier = modifier; - else - ro_modifier = EXPAND_NORMAL; - - /* Don't use hard regs as subtargets, because the combiner - can only handle pseudo regs. */ - if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER) - subtarget = 0; - /* Avoid subtargets inside loops, - since they hide some invariant expressions. */ - if (preserve_subexpressions_p ()) - subtarget = 0; - /* If we are going to ignore this result, we need only do something if there is a side-effect somewhere in the expression. If there is, short-circuit the most common cases here. Note that we must @@ -5664,66 +6009,87 @@ expand_expr (exp, target, tmode, modifier) if (! TREE_SIDE_EFFECTS (exp)) return const0_rtx; - /* Ensure we reference a volatile object even if value is ignored. */ + /* Ensure we reference a volatile object even if value is ignored, but + don't do this if all we are doing is taking its address. */ if (TREE_THIS_VOLATILE (exp) && TREE_CODE (exp) != FUNCTION_DECL - && mode != VOIDmode && mode != BLKmode) + && mode != VOIDmode && mode != BLKmode + && modifier != EXPAND_CONST_ADDRESS) { - temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier); + temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); if (GET_CODE (temp) == MEM) temp = copy_to_reg (temp); return const0_rtx; } - if (TREE_CODE_CLASS (code) == '1') - return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, - VOIDmode, ro_modifier); - else if (TREE_CODE_CLASS (code) == '2' - || TREE_CODE_CLASS (code) == '<') + if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF + || code == INDIRECT_REF || code == BUFFER_REF) + return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, + modifier); + + else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<' + || code == ARRAY_REF || code == ARRAY_RANGE_REF) { - expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier); - expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier); + expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); + expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); return const0_rtx; } else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) /* If the second operand has no side effects, just evaluate the first. */ - return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, - VOIDmode, ro_modifier); + return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, + modifier); + else if (code == BIT_FIELD_REF) + { + expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); + expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); + expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); + return const0_rtx; + } target = 0; } #ifdef MAX_INTEGER_COMPUTATION_MODE + /* Only check stuff here if the mode we want is different from the mode + of the expression; if it's the same, check_max_integer_computiation_mode + will handle it. Do we really need to check this stuff at all? */ + if (target + && GET_MODE (target) != mode && TREE_CODE (exp) != INTEGER_CST && TREE_CODE (exp) != PARM_DECL && TREE_CODE (exp) != ARRAY_REF + && TREE_CODE (exp) != ARRAY_RANGE_REF && TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != BIT_FIELD_REF && TREE_CODE (exp) != INDIRECT_REF && TREE_CODE (exp) != CALL_EXPR - && TREE_CODE (exp) != VAR_DECL) + && TREE_CODE (exp) != VAR_DECL + && TREE_CODE (exp) != RTL_EXPR) { enum machine_mode mode = GET_MODE (target); if (GET_MODE_CLASS (mode) == MODE_INT && mode > MAX_INTEGER_COMPUTATION_MODE) - fatal ("unsupported wide integer operation"); + internal_error ("unsupported wide integer operation"); } - if (TREE_CODE (exp) != INTEGER_CST + if (tmode != mode + && TREE_CODE (exp) != INTEGER_CST && TREE_CODE (exp) != PARM_DECL && TREE_CODE (exp) != ARRAY_REF + && TREE_CODE (exp) != ARRAY_RANGE_REF && TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != BIT_FIELD_REF && TREE_CODE (exp) != INDIRECT_REF && TREE_CODE (exp) != VAR_DECL && TREE_CODE (exp) != CALL_EXPR + && TREE_CODE (exp) != RTL_EXPR && GET_MODE_CLASS (tmode) == MODE_INT && tmode > MAX_INTEGER_COMPUTATION_MODE) - fatal ("unsupported wide integer operation"); + internal_error ("unsupported wide integer operation"); check_max_integer_computation_mode (exp); #endif @@ -5731,10 +6097,12 @@ expand_expr (exp, target, tmode, modifier) /* If will do cse, generate all results into pseudo registers since 1) that allows cse to find more things and 2) otherwise cse could produce an insn the machine - cannot support. */ + cannot support. And exception is a CONSTRUCTOR into a multi-word + MEM: that's much more likely to be most efficient into the MEM. */ if (! cse_not_expected && mode != BLKmode && target - && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)) + && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER) + && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)) target = subtarget; switch (code) @@ -5747,15 +6115,9 @@ expand_expr (exp, target, tmode, modifier) && function != inline_function_decl && function != 0) { struct function *p = find_function_data (function); - /* Allocate in the memory associated with the function - that the label is in. */ - push_obstacks (p->function_obstack, - p->function_maybepermanent_obstack); - - p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode, - label_rtx (exp), - p->forced_labels); - pop_obstacks (); + p->expr->x_forced_labels + = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp), + p->expr->x_forced_labels); } else { @@ -5764,6 +6126,7 @@ expand_expr (exp, target, tmode, modifier) label_rtx (exp), forced_labels); } + temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, label_rtx (exp))); if (function != current_function_decl @@ -5784,38 +6147,22 @@ expand_expr (exp, target, tmode, modifier) case VAR_DECL: /* If a static var's type was incomplete when the decl was written, but the type is complete now, lay out the decl now. */ - if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0 + if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) { - push_obstacks_nochange (); - end_temporary_allocation (); - layout_decl (exp, 0); - PUT_MODE (DECL_RTL (exp), DECL_MODE (exp)); - pop_obstacks (); - } - - /* Although static-storage variables start off initialized, according to - ANSI C, a memcpy could overwrite them with uninitialized values. So - we check them too. This also lets us check for read-only variables - accessed via a non-const declaration, in case it won't be detected - any other way (e.g., in an embedded system or OS kernel without - memory protection). + rtx value = DECL_RTL_IF_SET (exp); - Aggregates are not checked here; they're handled elsewhere. */ - if (current_function_check_memory_usage && code == VAR_DECL - && GET_CODE (DECL_RTL (exp)) == MEM - && ! AGGREGATE_TYPE_P (TREE_TYPE (exp))) - { - enum memory_use_mode memory_usage; - memory_usage = get_memory_usage_from_modifier (modifier); + layout_decl (exp, 0); - if (memory_usage != MEMORY_USE_DONT) - emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, - XEXP (DECL_RTL (exp), 0), Pmode, - GEN_INT (int_size_in_bytes (type)), - TYPE_MODE (sizetype), - GEN_INT (memory_usage), - TYPE_MODE (integer_type_node)); + /* If the RTL was already set, update its mode and memory + attributes. */ + if (value != 0) + { + PUT_MODE (value, DECL_MODE (exp)); + SET_DECL_RTL (exp, 0); + set_mem_attributes (value, exp, 1); + SET_DECL_RTL (exp, value); + } } /* ... fall through ... */ @@ -5862,11 +6209,13 @@ expand_expr (exp, target, tmode, modifier) abort (); addr = XEXP (DECL_RTL (exp), 0); if (GET_CODE (addr) == MEM) - addr = gen_rtx_MEM (Pmode, - fix_lexical_addr (XEXP (addr, 0), exp)); + addr + = replace_equiv_address (addr, + fix_lexical_addr (XEXP (addr, 0), exp)); else addr = fix_lexical_addr (addr, exp); - temp = change_address (DECL_RTL (exp), mode, addr); + + temp = replace_equiv_address (DECL_RTL (exp), addr); } /* This is the case of an array whose size is to be determined @@ -5875,8 +6224,7 @@ expand_expr (exp, target, tmode, modifier) else if (GET_CODE (DECL_RTL (exp)) == MEM && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG) - temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)), - XEXP (DECL_RTL (exp), 0)); + temp = validize_mem (DECL_RTL (exp)); /* If DECL_RTL is memory, we are in the normal case and either the address is not valid or it is not a register and -fforce-addr @@ -5890,16 +6238,15 @@ expand_expr (exp, target, tmode, modifier) XEXP (DECL_RTL (exp), 0)) || (flag_force_addr && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG))) - temp = change_address (DECL_RTL (exp), VOIDmode, - copy_rtx (XEXP (DECL_RTL (exp), 0))); + temp = replace_equiv_address (DECL_RTL (exp), + copy_rtx (XEXP (DECL_RTL (exp), 0))); /* If we got something, return it. But first, set the alignment - the address is a register. */ + if the address is a register. */ if (temp != 0) { if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG) - mark_reg_pointer (XEXP (temp, 0), - DECL_ALIGN (exp) / BITS_PER_UNIT); + mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); return temp; } @@ -5909,7 +6256,7 @@ expand_expr (exp, target, tmode, modifier) but mark it so that we know that it was already extended. */ if (GET_CODE (DECL_RTL (exp)) == REG - && GET_MODE (DECL_RTL (exp)) != mode) + && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp)) { /* Get the signedness used for this variable. Ensure we get the same mode we got when the variable was declared. */ @@ -5917,7 +6264,7 @@ expand_expr (exp, target, tmode, modifier) != promote_mode (type, DECL_MODE (exp), &unsignedp, 0)) abort (); - temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0); + temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); SUBREG_PROMOTED_VAR_P (temp) = 1; SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp; return temp; @@ -5927,17 +6274,15 @@ expand_expr (exp, target, tmode, modifier) case INTEGER_CST: return immed_double_const (TREE_INT_CST_LOW (exp), - TREE_INT_CST_HIGH (exp), - mode); + TREE_INT_CST_HIGH (exp), mode); case CONST_DECL: - return expand_expr (DECL_INITIAL (exp), target, VOIDmode, - EXPAND_MEMORY_USE_BAD); + return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0); case REAL_CST: /* If optimized, generate immediate CONST_DOUBLE - which will be turned into memory by reload if necessary. - + which will be turned into memory by reload if necessary. + We used to force a register so that loop.c could see it. But this does not allow gen_* patterns to perform optimizations with the constants. It also produces two insns in cases like "x = 1.0;". @@ -5950,7 +6295,7 @@ expand_expr (exp, target, tmode, modifier) case COMPLEX_CST: case STRING_CST: if (! TREE_CST_RTL (exp)) - output_constant_def (exp); + output_constant_def (exp, 1); /* TREE_CST_RTL probably contains a constant address. On RISC machines where a constant address isn't valid, @@ -5962,20 +6307,20 @@ expand_expr (exp, target, tmode, modifier) && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)) || (flag_force_addr && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG))) - return change_address (TREE_CST_RTL (exp), VOIDmode, - copy_rtx (XEXP (TREE_CST_RTL (exp), 0))); + return replace_equiv_address (TREE_CST_RTL (exp), + copy_rtx (XEXP (TREE_CST_RTL (exp), 0))); return TREE_CST_RTL (exp); case EXPR_WITH_FILE_LOCATION: { rtx to_return; - char *saved_input_filename = input_filename; + const char *saved_input_filename = input_filename; int saved_lineno = lineno; input_filename = EXPR_WFL_FILENAME (exp); lineno = EXPR_WFL_LINENO (exp); if (EXPR_WFL_EMIT_LINE_NOTE (exp)) emit_line_note (input_filename, lineno); - /* Possibly avoid switching back and force here */ + /* Possibly avoid switching back and forth here. */ to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier); input_filename = saved_input_filename; lineno = saved_lineno; @@ -6012,15 +6357,19 @@ expand_expr (exp, target, tmode, modifier) } if (temp == 0 || GET_CODE (temp) != MEM) abort (); - return change_address (temp, mode, - fix_lexical_addr (XEXP (temp, 0), exp)); + return + replace_equiv_address (temp, + fix_lexical_addr (XEXP (temp, 0), exp)); } if (SAVE_EXPR_RTL (exp) == 0) { if (mode == VOIDmode) temp = const0_rtx; else - temp = assign_temp (type, 3, 0, 0); + temp = assign_temp (build_qualified_type (type, + (TYPE_QUALS (type) + | TYPE_QUAL_CONST)), + 3, 0, 0); SAVE_EXPR_RTL (exp) = temp; if (!optimize && GET_CODE (temp) == REG) @@ -6035,14 +6384,13 @@ expand_expr (exp, target, tmode, modifier) if (GET_CODE (temp) == REG && GET_MODE (temp) != mode) { - temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0); + temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); SUBREG_PROMOTED_VAR_P (temp) = 1; SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp; } if (temp == const0_rtx) - expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, - EXPAND_MEMORY_USE_BAD); + expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); else store_expr (TREE_OPERAND (exp, 0), temp, 0); @@ -6058,7 +6406,7 @@ expand_expr (exp, target, tmode, modifier) { /* Compute the signedness and make the proper SUBREG. */ promote_mode (type, mode, &unsignedp, 0); - temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0); + temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); SUBREG_PROMOTED_VAR_P (temp) = 1; SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp; return temp; @@ -6076,66 +6424,17 @@ expand_expr (exp, target, tmode, modifier) case PLACEHOLDER_EXPR: { - tree placeholder_expr; - - /* If there is an object on the head of the placeholder list, - see if some object in it of type TYPE or a pointer to it. For - further information, see tree.def. */ - for (placeholder_expr = placeholder_list; - placeholder_expr != 0; - placeholder_expr = TREE_CHAIN (placeholder_expr)) - { - tree need_type = TYPE_MAIN_VARIANT (type); - tree object = 0; - tree old_list = placeholder_list; - tree elt; - - /* Find the outermost reference that is of the type we want. - If none, see if any object has a type that is a pointer to - the type we want. */ - for (elt = TREE_PURPOSE (placeholder_expr); - elt != 0 && object == 0; - elt - = ((TREE_CODE (elt) == COMPOUND_EXPR - || TREE_CODE (elt) == COND_EXPR) - ? TREE_OPERAND (elt, 1) - : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' - || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' - || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' - || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') - ? TREE_OPERAND (elt, 0) : 0)) - if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) - object = elt; - - for (elt = TREE_PURPOSE (placeholder_expr); - elt != 0 && object == 0; - elt - = ((TREE_CODE (elt) == COMPOUND_EXPR - || TREE_CODE (elt) == COND_EXPR) - ? TREE_OPERAND (elt, 1) - : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' - || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' - || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' - || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') - ? TREE_OPERAND (elt, 0) : 0)) - if (POINTER_TYPE_P (TREE_TYPE (elt)) - && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) - == need_type)) - object = build1 (INDIRECT_REF, need_type, elt); - - if (object != 0) - { - /* Expand this object skipping the list entries before - it was found in case it is also a PLACEHOLDER_EXPR. - In that case, we want to translate it using subsequent - entries. */ - placeholder_list = TREE_CHAIN (placeholder_expr); - temp = expand_expr (object, original_target, tmode, - ro_modifier); - placeholder_list = old_list; - return temp; - } - } + tree old_list = placeholder_list; + tree placeholder_expr = 0; + + exp = find_placeholder (exp, &placeholder_expr); + if (exp == 0) + abort (); + + placeholder_list = TREE_CHAIN (placeholder_expr); + temp = expand_expr (exp, original_target, tmode, modifier); + placeholder_list = old_list; + return temp; } /* We can't find the object or there was a missing WITH_RECORD_EXPR. */ @@ -6146,8 +6445,8 @@ expand_expr (exp, target, tmode, modifier) and pop the list. */ placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, placeholder_list); - target = expand_expr (TREE_OPERAND (exp, 0), original_target, - tmode, ro_modifier); + target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode, + modifier); placeholder_list = TREE_CHAIN (placeholder_list); return target; @@ -6159,13 +6458,15 @@ expand_expr (exp, target, tmode, modifier) return const0_rtx; case EXIT_EXPR: - expand_exit_loop_if_false (NULL_PTR, + expand_exit_loop_if_false (NULL, invert_truthvalue (TREE_OPERAND (exp, 0))); return const0_rtx; case LABELED_BLOCK_EXPR: if (LABELED_BLOCK_BODY (exp)) - expand_expr_stmt (LABELED_BLOCK_BODY (exp)); + expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1); + /* Should perhaps use expand_label, but this is simpler and safer. */ + do_pending_stack_adjust (); emit_label (label_rtx (LABELED_BLOCK_LABEL (exp))); return const0_rtx; @@ -6178,7 +6479,7 @@ expand_expr (exp, target, tmode, modifier) case LOOP_EXPR: push_temp_slots (); expand_start_loop (1); - expand_expr_stmt (TREE_OPERAND (exp, 0)); + expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1); expand_end_loop (); pop_temp_slots (); @@ -6191,7 +6492,7 @@ expand_expr (exp, target, tmode, modifier) /* Need to open a binding contour here because if there are any cleanups they must be contained here. */ - expand_start_bindings (0); + expand_start_bindings (2); /* Mark the corresponding BLOCK for output in its proper place. */ if (TREE_OPERAND (exp, 2) != 0 @@ -6201,7 +6502,7 @@ expand_expr (exp, target, tmode, modifier) /* If VARS have not yet been expanded, expand them now. */ while (vars) { - if (DECL_RTL (vars) == 0) + if (!DECL_RTL_SET_P (vars)) { vars_need_expansion = 1; expand_decl (vars); @@ -6210,7 +6511,7 @@ expand_expr (exp, target, tmode, modifier) vars = TREE_CHAIN (vars); } - temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier); + temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier); expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0); @@ -6235,9 +6536,10 @@ expand_expr (exp, target, tmode, modifier) if (ignore) { tree elt; + for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) - expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, - EXPAND_MEMORY_USE_BAD); + expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0); + return const0_rtx; } @@ -6252,108 +6554,68 @@ expand_expr (exp, target, tmode, modifier) && ((mode == BLKmode && ! (target != 0 && safe_from_p (target, exp, 1))) || TREE_ADDRESSABLE (exp) - || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST - && (!MOVE_BY_PIECES_P - (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT, - TYPE_ALIGN (type) / BITS_PER_UNIT)) + || (host_integerp (TYPE_SIZE_UNIT (type), 1) + && (! MOVE_BY_PIECES_P + (tree_low_cst (TYPE_SIZE_UNIT (type), 1), + TYPE_ALIGN (type))) && ! mostly_zeros_p (exp)))) || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp))) { - rtx constructor = output_constant_def (exp); + rtx constructor = output_constant_def (exp, 1); + if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER - && modifier != EXPAND_SUM - && (! memory_address_p (GET_MODE (constructor), - XEXP (constructor, 0)) - || (flag_force_addr - && GET_CODE (XEXP (constructor, 0)) != REG))) - constructor = change_address (constructor, VOIDmode, - XEXP (constructor, 0)); + && modifier != EXPAND_SUM) + constructor = validize_mem (constructor); + return constructor; } - else { /* Handle calls that pass values in multiple non-contiguous locations. The Irix 6 ABI has examples of this. */ if (target == 0 || ! safe_from_p (target, exp, 1) || GET_CODE (target) == PARALLEL) - { - if (mode != BLKmode && ! TREE_ADDRESSABLE (exp)) - target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); - else - target = assign_temp (type, 0, 1, 1); - } - - if (TREE_READONLY (exp)) - { - if (GET_CODE (target) == MEM) - target = copy_rtx (target); - - RTX_UNCHANGING_P (target) = 1; - } - - store_constructor (exp, target, 0); + target + = assign_temp (build_qualified_type (type, + (TYPE_QUALS (type) + | (TREE_READONLY (exp) + * TYPE_QUAL_CONST))), + 0, TREE_ADDRESSABLE (exp), 1); + + store_constructor (exp, target, 0, + int_size_in_bytes (TREE_TYPE (exp))); return target; } case INDIRECT_REF: { tree exp1 = TREE_OPERAND (exp, 0); - tree exp2; tree index; - tree string = string_constant (exp1, &index); - int i; - + tree string = string_constant (exp1, &index); + /* Try to optimize reads from const strings. */ if (string && TREE_CODE (string) == STRING_CST && TREE_CODE (index) == INTEGER_CST - && !TREE_INT_CST_HIGH (index) - && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string) + && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 && GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1 - && modifier != EXPAND_MEMORY_USE_WO) - return GEN_INT (TREE_STRING_POINTER (string)[i]); + && modifier != EXPAND_WRITE) + return + GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]); op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); op0 = memory_address (mode, op0); - - if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp))) - { - enum memory_use_mode memory_usage; - memory_usage = get_memory_usage_from_modifier (modifier); - - if (memory_usage != MEMORY_USE_DONT) - { - in_check_memory_usage = 1; - emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, - op0, Pmode, - GEN_INT (int_size_in_bytes (type)), - TYPE_MODE (sizetype), - GEN_INT (memory_usage), - TYPE_MODE (integer_type_node)); - in_check_memory_usage = 0; - } - } - temp = gen_rtx_MEM (mode, op0); + set_mem_attributes (temp, exp, 0); + + /* If we are writing to this object and its type is a record with + readonly fields, we must mark it as readonly so it will + conflict with readonly references to those fields. */ + if (modifier == EXPAND_WRITE && readonly_fields_p (type)) + RTX_UNCHANGING_P (temp) = 1; - if (AGGREGATE_TYPE_P (TREE_TYPE (exp)) - || (TREE_CODE (exp1) == ADDR_EXPR - && (exp2 = TREE_OPERAND (exp1, 0)) - && AGGREGATE_TYPE_P (TREE_TYPE (exp2)))) - MEM_SET_IN_STRUCT_P (temp, 1); - - MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile; - MEM_ALIAS_SET (temp) = get_alias_set (exp); - - /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY - here, because, in C and C++, the fact that a location is accessed - through a pointer to const does not mean that the value there can - never change. Languages where it can never change should - also set TREE_STATIC. */ - RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp); return temp; } @@ -6365,8 +6627,7 @@ expand_expr (exp, target, tmode, modifier) tree array = TREE_OPERAND (exp, 0); tree domain = TYPE_DOMAIN (TREE_TYPE (array)); tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node; - tree index = TREE_OPERAND (exp, 1); - tree index_type = TREE_TYPE (index); + tree index = convert (sizetype, TREE_OPERAND (exp, 1)); HOST_WIDE_INT i; /* Optimize the special-case of a zero lower bound. @@ -6375,50 +6636,52 @@ expand_expr (exp, target, tmode, modifier) with constant folding. (E.g. suppose the lower bound is 1, and its mode is QI. Without the conversion, (ARRAY +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) - +INDEX), which becomes (ARRAY+255+INDEX). Oops!) - - But sizetype isn't quite right either (especially if - the lowbound is negative). FIXME */ + +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ if (! integer_zerop (low_bound)) - index = fold (build (MINUS_EXPR, index_type, index, - convert (sizetype, low_bound))); + index = size_diffop (index, convert (sizetype, low_bound)); /* Fold an expression like: "foo"[2]. This is not done in fold so it won't happen inside &. Don't fold if this is for wide characters since it's too difficult to do correctly and this is a very rare case. */ - if (TREE_CODE (array) == STRING_CST + if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER + && TREE_CODE (array) == STRING_CST && TREE_CODE (index) == INTEGER_CST - && !TREE_INT_CST_HIGH (index) - && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array) + && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0 && GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1) - return GEN_INT (TREE_STRING_POINTER (array)[i]); + return + GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]); /* If this is a constant index into a constant array, just get the value from the array. Handle both the cases when we have an explicit constructor and when our operand is a variable that was declared const. */ - if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)) + if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER + && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array) + && TREE_CODE (index) == INTEGER_CST + && 0 > compare_tree_int (index, + list_length (CONSTRUCTOR_ELTS + (TREE_OPERAND (exp, 0))))) { - if (TREE_CODE (index) == INTEGER_CST - && TREE_INT_CST_HIGH (index) == 0) - { - tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); - - i = TREE_INT_CST_LOW (index); - while (elem && i--) - elem = TREE_CHAIN (elem); - if (elem) - return expand_expr (fold (TREE_VALUE (elem)), target, - tmode, ro_modifier); - } + tree elem; + + for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), + i = TREE_INT_CST_LOW (index); + elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem)) + ; + + if (elem) + return expand_expr (fold (TREE_VALUE (elem)), target, tmode, + modifier); } - + else if (optimize >= 1 + && modifier != EXPAND_CONST_ADDRESS + && modifier != EXPAND_INITIALIZER && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK) @@ -6427,38 +6690,46 @@ expand_expr (exp, target, tmode, modifier) { tree init = DECL_INITIAL (array); - i = TREE_INT_CST_LOW (index); if (TREE_CODE (init) == CONSTRUCTOR) { - tree elem = CONSTRUCTOR_ELTS (init); + tree elem; + + for (elem = CONSTRUCTOR_ELTS (init); + (elem + && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); + elem = TREE_CHAIN (elem)) + ; - while (elem - && !tree_int_cst_equal (TREE_PURPOSE (elem), index)) - elem = TREE_CHAIN (elem); - if (elem) + if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem))) return expand_expr (fold (TREE_VALUE (elem)), target, - tmode, ro_modifier); + tmode, modifier); } else if (TREE_CODE (init) == STRING_CST - && TREE_INT_CST_HIGH (index) == 0 - && (TREE_INT_CST_LOW (index) - < TREE_STRING_LENGTH (init))) - return (GEN_INT - (TREE_STRING_POINTER - (init)[TREE_INT_CST_LOW (index)])); + && 0 > compare_tree_int (index, + TREE_STRING_LENGTH (init))) + { + tree type = TREE_TYPE (TREE_TYPE (init)); + enum machine_mode mode = TYPE_MODE (type); + + if (GET_MODE_CLASS (mode) == MODE_INT + && GET_MODE_SIZE (mode) == 1) + return (GEN_INT + (TREE_STRING_POINTER + (init)[TREE_INT_CST_LOW (index)])); + } } } } - - /* ... fall through ... */ + /* Fall through. */ case COMPONENT_REF: case BIT_FIELD_REF: + case ARRAY_RANGE_REF: /* If the operand is a CONSTRUCTOR, we can just extract the appropriate field if it is present. Don't do this if we have already written the data since we want to refer to that copy and varasm.c assumes that's what we'll do. */ - if (code != ARRAY_REF + if (code == COMPONENT_REF && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0) { @@ -6480,10 +6751,11 @@ expand_expr (exp, target, tmode, modifier) && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt))) <= HOST_BITS_PER_WIDE_INT)))) { - op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); + op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); if (DECL_BIT_FIELD (TREE_PURPOSE (elt))) { - int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt)); + HOST_WIDE_INT bitsize + = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt))); if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt)))) { @@ -6511,14 +6783,12 @@ expand_expr (exp, target, tmode, modifier) { enum machine_mode mode1; - int bitsize; - int bitpos; + HOST_WIDE_INT bitsize, bitpos; tree offset; int volatilep = 0; - int alignment; tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, - &mode1, &unsignedp, &volatilep, - &alignment); + &mode1, &unsignedp, &volatilep); + rtx orig_op0; /* If we got back the original object, something is wrong. Perhaps we are evaluating an expression too early. In any event, don't @@ -6529,22 +6799,25 @@ expand_expr (exp, target, tmode, modifier) /* If TEM's type is a union of variable size, pass TARGET to the inner computation, since it will need a temporary and TARGET is known to have to do. This occurs in unchecked conversion in Ada. */ - - op0 = expand_expr (tem, - (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE - && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) - != INTEGER_CST) - ? target : NULL_RTX), - VOIDmode, - modifier == EXPAND_INITIALIZER - ? modifier : EXPAND_NORMAL); + + orig_op0 = op0 + = expand_expr (tem, + (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE + && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) + != INTEGER_CST) + ? target : NULL_RTX), + VOIDmode, + (modifier == EXPAND_INITIALIZER + || modifier == EXPAND_CONST_ADDRESS) + ? modifier : EXPAND_NORMAL); /* If this is a constant, put it into a register if it is a - legitimate constant and memory if it isn't. */ + legitimate constant and OFFSET is 0 and memory if it isn't. */ if (CONSTANT_P (op0)) { enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); - if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)) + if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) + && offset == 0) op0 = force_reg (mode, op0); else op0 = validize_mem (force_const_mem (mode, op0)); @@ -6554,77 +6827,77 @@ expand_expr (exp, target, tmode, modifier) { rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); + /* If this object is in a register, put it into memory. + This case can't occur in C, but can in Ada if we have + unchecked conversion of an expression from a scalar type to + an array or record type. */ + if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG + || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF) + { + /* If the operand is a SAVE_EXPR, we can deal with this by + forcing the SAVE_EXPR into memory. */ + if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR) + { + put_var_into_stack (TREE_OPERAND (exp, 0)); + op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0)); + } + else + { + tree nt + = build_qualified_type (TREE_TYPE (tem), + (TYPE_QUALS (TREE_TYPE (tem)) + | TYPE_QUAL_CONST)); + rtx memloc = assign_temp (nt, 1, 1, 1); + + emit_move_insn (memloc, op0); + op0 = memloc; + } + } + if (GET_CODE (op0) != MEM) abort (); if (GET_MODE (offset_rtx) != ptr_mode) - { + offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); + #ifdef POINTERS_EXTEND_UNSIGNED - offset_rtx = convert_memory_address (ptr_mode, offset_rtx); -#else - offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); + if (GET_MODE (offset_rtx) != Pmode) + offset_rtx = convert_memory_address (Pmode, offset_rtx); #endif - } - /* A constant address in TO_RTX can have VOIDmode, we must not try + /* A constant address in OP0 can have VOIDmode, we must not try to call force_reg for that case. Avoid that case. */ if (GET_CODE (op0) == MEM && GET_MODE (op0) == BLKmode && GET_MODE (XEXP (op0, 0)) != VOIDmode - && bitsize - && (bitpos % bitsize) == 0 + && bitsize != 0 + && (bitpos % bitsize) == 0 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 - && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1)) + && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) { - rtx temp = change_address (op0, mode1, - plus_constant (XEXP (op0, 0), - (bitpos / - BITS_PER_UNIT))); + rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); + if (GET_CODE (XEXP (temp, 0)) == REG) op0 = temp; else - op0 = change_address (op0, mode1, - force_reg (GET_MODE (XEXP (temp, 0)), - XEXP (temp, 0))); + op0 = (replace_equiv_address + (op0, + force_reg (GET_MODE (XEXP (temp, 0)), + XEXP (temp, 0)))); bitpos = 0; } - - op0 = change_address (op0, VOIDmode, - gen_rtx_PLUS (ptr_mode, XEXP (op0, 0), - force_reg (ptr_mode, offset_rtx))); + op0 = offset_address (op0, offset_rtx, + highest_pow2_factor (offset)); } /* Don't forget about volatility even if this is a bitfield. */ if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0)) { - op0 = copy_rtx (op0); - MEM_VOLATILE_P (op0) = 1; - } - - /* Check the access. */ - if (current_function_check_memory_usage && GET_CODE (op0) == MEM) - { - enum memory_use_mode memory_usage; - memory_usage = get_memory_usage_from_modifier (modifier); + if (op0 == orig_op0) + op0 = copy_rtx (op0); - if (memory_usage != MEMORY_USE_DONT) - { - rtx to; - int size; - - to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT)); - size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1; - - /* Check the access right of the pointer. */ - if (size > BITS_PER_UNIT) - emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, - to, Pmode, - GEN_INT (size / BITS_PER_UNIT), - TYPE_MODE (sizetype), - GEN_INT (memory_usage), - TYPE_MODE (integer_type_node)); - } + MEM_VOLATILE_P (op0) = 1; } /* In cases where an aligned union has an unaligned object @@ -6632,25 +6905,36 @@ expand_expr (exp, target, tmode, modifier) an integer-mode (e.g., SImode) object. Handle this case by doing the extract into an object as wide as the field (which we know to be the width of a basic mode), then - storing into memory, and changing the mode to BLKmode. - If we ultimately want the address (EXPAND_CONST_ADDRESS or - EXPAND_INITIALIZER), then we must not copy to a temporary. */ + storing into memory, and changing the mode to BLKmode. */ if (mode1 == VOIDmode || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG - || (modifier != EXPAND_CONST_ADDRESS - && modifier != EXPAND_INITIALIZER - && ((mode1 != BLKmode && ! direct_load[(int) mode1] - && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT - && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) - /* If the field isn't aligned enough to fetch as a memref, - fetch it as a bit field. */ - || (SLOW_UNALIGNED_ACCESS - && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode)) - || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))) + || (mode1 != BLKmode && ! direct_load[(int) mode1] + && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT + && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT + && modifier != EXPAND_CONST_ADDRESS + && modifier != EXPAND_INITIALIZER) + /* If the field isn't aligned enough to fetch as a memref, + fetch it as a bit field. */ + || (mode1 != BLKmode + && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)) + && ((TYPE_ALIGN (TREE_TYPE (tem)) + < GET_MODE_ALIGNMENT (mode)) + || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))) + /* If the type and the field are a constant size and the + size of the type isn't the same size as the bitfield, + we must use bitfield operations. */ + || (bitsize >= 0 + && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) + == INTEGER_CST) + && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), + bitsize))) { enum machine_mode ext_mode = mode; - if (ext_mode == BLKmode) + if (ext_mode == BLKmode + && ! (target != 0 && GET_CODE (op0) == MEM + && GET_CODE (target) == MEM + && bitpos % BITS_PER_UNIT == 0)) ext_mode = mode_for_size (bitsize, MODE_INT, 1); if (ext_mode == BLKmode) @@ -6662,28 +6946,24 @@ expand_expr (exp, target, tmode, modifier) || bitpos % BITS_PER_UNIT != 0) abort (); - op0 = change_address (op0, VOIDmode, - plus_constant (XEXP (op0, 0), - bitpos / BITS_PER_UNIT)); + op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT); if (target == 0) target = assign_temp (type, 0, 1, 1); emit_block_move (target, op0, GEN_INT ((bitsize + BITS_PER_UNIT - 1) - / BITS_PER_UNIT), - 1); - + / BITS_PER_UNIT)); + return target; } op0 = validize_mem (op0); if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG) - mark_reg_pointer (XEXP (op0, 0), alignment); + mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, target, ext_mode, ext_mode, - alignment, int_size_in_bytes (TREE_TYPE (tem))); /* If the result is a record type and BITSIZE is narrower than @@ -6691,7 +6971,7 @@ expand_expr (exp, target, tmode, modifier) machine, we must put the field into the high-order bits. */ if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT - && bitsize < GET_MODE_BITSIZE (GET_MODE (op0))) + && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, size_int (GET_MODE_BITSIZE (GET_MODE (op0)) - bitsize), @@ -6699,13 +6979,14 @@ expand_expr (exp, target, tmode, modifier) if (mode == BLKmode) { - rtx new = assign_stack_temp (ext_mode, - bitsize / BITS_PER_UNIT, 0); + rtx new = assign_temp (build_qualified_type + (type_for_mode (ext_mode, 0), + TYPE_QUAL_CONST), 0, 1, 1); emit_move_insn (new, op0); op0 = copy_rtx (new); PUT_MODE (op0, BLKmode); - MEM_SET_IN_STRUCT_P (op0, 1); + set_mem_attributes (op0, exp, 1); } return op0; @@ -6719,20 +7000,17 @@ expand_expr (exp, target, tmode, modifier) /* Get a reference to just this component. */ if (modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) - op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0), - (bitpos / BITS_PER_UNIT))); + op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); else - op0 = change_address (op0, mode1, - plus_constant (XEXP (op0, 0), - (bitpos / BITS_PER_UNIT))); + op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); - if (GET_CODE (op0) == MEM) - MEM_ALIAS_SET (op0) = get_alias_set (exp); + if (op0 == orig_op0) + op0 = copy_rtx (op0); + set_mem_attributes (op0, exp, 0); if (GET_CODE (XEXP (op0, 0)) == REG) - mark_reg_pointer (XEXP (op0, 0), alignment); + mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); - MEM_SET_IN_STRUCT_P (op0, 1); MEM_VOLATILE_P (op0) |= volatilep; if (mode == mode1 || mode1 == BLKmode || mode1 == tmode || modifier == EXPAND_CONST_ADDRESS @@ -6745,6 +7023,43 @@ expand_expr (exp, target, tmode, modifier) return target; } + case VTABLE_REF: + { + rtx insn, before = get_last_insn (), vtbl_ref; + + /* Evaluate the interior expression. */ + subtarget = expand_expr (TREE_OPERAND (exp, 0), target, + tmode, modifier); + + /* Get or create an instruction off which to hang a note. */ + if (REG_P (subtarget)) + { + target = subtarget; + insn = get_last_insn (); + if (insn == before) + abort (); + if (! INSN_P (insn)) + insn = prev_nonnote_insn (insn); + } + else + { + target = gen_reg_rtx (GET_MODE (subtarget)); + insn = emit_move_insn (target, subtarget); + } + + /* Collect the data for the note. */ + vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0); + vtbl_ref = plus_constant (vtbl_ref, + tree_low_cst (TREE_OPERAND (exp, 2), 0)); + /* Discard the initial CONST that was added. */ + vtbl_ref = XEXP (vtbl_ref, 0); + + REG_NOTES (insn) + = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn)); + + return target; + } + /* Intended for a reference to a buffer of a file-object in Pascal. But it's not certain that a special tree code will really be necessary for these. INDIRECT_REF might work for them. */ @@ -6777,8 +7092,6 @@ expand_expr (exp, target, tmode, modifier) rtx rlow; rtx diff, quo, rem, addr, bit, result; - preexpand_calls (exp); - /* If domain is empty, answer is no. Likewise if index is constant and out of bounds. */ if (((TREE_CODE (set_high_bound) == INTEGER_CST @@ -6803,23 +7116,19 @@ expand_expr (exp, target, tmode, modifier) if (! (GET_CODE (index_val) == CONST_INT && GET_CODE (lo_r) == CONST_INT)) - { - emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX, - GET_MODE (index_val), iunsignedp, 0, op1); - } + emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX, + GET_MODE (index_val), iunsignedp, op1); if (! (GET_CODE (index_val) == CONST_INT && GET_CODE (hi_r) == CONST_INT)) - { - emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX, - GET_MODE (index_val), iunsignedp, 0, op1); - } + emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX, + GET_MODE (index_val), iunsignedp, op1); /* Calculate the element number of bit zero in the first word of the set. */ if (GET_CODE (lo_r) == CONST_INT) rlow = GEN_INT (INTVAL (lo_r) - & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT)); + & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)); else rlow = expand_binop (index_mode, and_optab, lo_r, GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)), @@ -6838,7 +7147,7 @@ expand_expr (exp, target, tmode, modifier) setaddr, NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN)); - /* Extract the bit we want to examine */ + /* Extract the bit we want to examine. */ bit = expand_shift (RSHIFT_EXPR, byte_mode, gen_rtx_MEM (byte_mode, addr), make_tree (TREE_TYPE (index), rem), @@ -6859,26 +7168,26 @@ expand_expr (exp, target, tmode, modifier) } case WITH_CLEANUP_EXPR: - if (RTL_EXPR_RTL (exp) == 0) + if (WITH_CLEANUP_EXPR_RTL (exp) == 0) { - RTL_EXPR_RTL (exp) - = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier); - expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2)); + WITH_CLEANUP_EXPR_RTL (exp) + = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); + expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1)); /* That's it for this cleanup. */ - TREE_OPERAND (exp, 2) = 0; + TREE_OPERAND (exp, 1) = 0; } - return RTL_EXPR_RTL (exp); + return WITH_CLEANUP_EXPR_RTL (exp); case CLEANUP_POINT_EXPR: { /* Start a new binding layer that will keep track of all cleanup actions to be performed. */ - expand_start_bindings (0); + expand_start_bindings (2); target_temp_slot_level = temp_slot_level; - op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier); + op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); /* If we're going to use this value, load it up now. */ if (! ignore) op0 = force_not_mem (op0); @@ -6893,12 +7202,13 @@ expand_expr (exp, target, tmode, modifier) && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL) && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) - return expand_builtin (exp, target, subtarget, tmode, ignore); - - /* If this call was expanded already by preexpand_calls, - just return the result we got. */ - if (CALL_EXPR_RTL (exp) != 0) - return CALL_EXPR_RTL (exp); + { + if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) + == BUILT_IN_FRONTEND) + return (*lang_expand_expr) (exp, original_target, tmode, modifier); + else + return expand_builtin (exp, target, subtarget, tmode, ignore); + } return expand_call (exp, target, ignore); @@ -6906,29 +7216,42 @@ expand_expr (exp, target, tmode, modifier) case NOP_EXPR: case CONVERT_EXPR: case REFERENCE_EXPR: + if (TREE_OPERAND (exp, 0) == error_mark_node) + return const0_rtx; + if (TREE_CODE (type) == UNION_TYPE) { tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); - if (target == 0) + + /* If both input and output are BLKmode, this conversion isn't doing + anything except possibly changing memory attribute. */ + if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) { - if (mode != BLKmode) - target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); - else - target = assign_temp (type, 0, 1, 1); + rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, + modifier); + + result = copy_rtx (result); + set_mem_attributes (result, exp, 0); + return result; } + if (target == 0) + target = assign_temp (type, 0, 1, 1); + if (GET_CODE (target) == MEM) /* Store data into beginning of memory target. */ store_expr (TREE_OPERAND (exp, 0), - change_address (target, TYPE_MODE (valtype), 0), 0); + adjust_address (target, TYPE_MODE (valtype), 0), 0); else if (GET_CODE (target) == REG) /* Store this field into a union of the proper type. */ - store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0, - TYPE_MODE (valtype), TREE_OPERAND (exp, 0), - VOIDmode, 0, 1, - int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))), - 0); + store_field (target, + MIN ((int_size_in_bytes (TREE_TYPE + (TREE_OPERAND (exp, 0))) + * BITS_PER_UNIT), + (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), + 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), + VOIDmode, 0, type, 0); else abort (); @@ -6939,7 +7262,7 @@ expand_expr (exp, target, tmode, modifier) if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) { op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, - ro_modifier); + modifier); /* If the signedness of the conversion differs and OP0 is a promoted SUBREG, clear that indication since we now @@ -6973,11 +7296,85 @@ expand_expr (exp, target, tmode, modifier) TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); return target; + case VIEW_CONVERT_EXPR: + op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); + + /* If the input and output modes are both the same, we are done. + Otherwise, if neither mode is BLKmode and both are within a word, we + can use gen_lowpart. If neither is true, make sure the operand is + in memory and convert the MEM to the new mode. */ + if (TYPE_MODE (type) == GET_MODE (op0)) + ; + else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode + && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD + && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD) + op0 = gen_lowpart (TYPE_MODE (type), op0); + else if (GET_CODE (op0) != MEM) + { + /* If the operand is not a MEM, force it into memory. Since we + are going to be be changing the mode of the MEM, don't call + force_const_mem for constants because we don't allow pool + constants to change mode. */ + tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); + + if (TREE_ADDRESSABLE (exp)) + abort (); + + if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) + target + = assign_stack_temp_for_type + (TYPE_MODE (inner_type), + GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); + + emit_move_insn (target, op0); + op0 = target; + } + + /* At this point, OP0 is in the correct mode. If the output type is such + that the operand is known to be aligned, indicate that it is. + Otherwise, we need only be concerned about alignment for non-BLKmode + results. */ + if (GET_CODE (op0) == MEM) + { + op0 = copy_rtx (op0); + + if (TYPE_ALIGN_OK (type)) + set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); + else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT + && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) + { + tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); + HOST_WIDE_INT temp_size + = MAX (int_size_in_bytes (inner_type), + (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); + rtx new = assign_stack_temp_for_type (TYPE_MODE (type), + temp_size, 0, type); + rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); + + if (TREE_ADDRESSABLE (exp)) + abort (); + + if (GET_MODE (op0) == BLKmode) + emit_block_move (new_with_op0_mode, op0, + GEN_INT (GET_MODE_SIZE (TYPE_MODE (type)))); + else + emit_move_insn (new_with_op0_mode, op0); + + op0 = new; + } + + op0 = adjust_address (op0, TYPE_MODE (type), 0); + } + + return op0; + case PLUS_EXPR: /* We come here from MINUS_EXPR when the second operand is a constant. */ plus_expr: - this_optab = add_optab; + this_optab = ! unsignedp && flag_trapv + && (GET_MODE_CLASS (mode) == MODE_INT) + ? addv_optab : add_optab; /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and something else, make sure we add the register to the constant and @@ -7012,15 +7409,25 @@ expand_expr (exp, target, tmode, modifier) If this is an EXPAND_SUM call, always return the sum. */ if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER - || mode == ptr_mode) + || (mode == ptr_mode && (unsignedp || ! flag_trapv))) { if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT && TREE_CONSTANT (TREE_OPERAND (exp, 1))) { + rtx constant_part; + op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, EXPAND_SUM); - op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0))); + /* Use immed_double_const to ensure that the constant is + truncated according to the mode of OP1, then sign extended + to a HOST_WIDE_INT. Using the constant directly can result + in non-canonical RTL in a 64x32 cross compile. */ + constant_part + = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), + (HOST_WIDE_INT) 0, + TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); + op1 = plus_constant (op1, INTVAL (constant_part)); if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) op1 = force_operand (op1, target); return op1; @@ -7030,6 +7437,8 @@ expand_expr (exp, target, tmode, modifier) && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT && TREE_CONSTANT (TREE_OPERAND (exp, 0))) { + rtx constant_part; + op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM); if (! CONSTANT_P (op0)) @@ -7042,7 +7451,15 @@ expand_expr (exp, target, tmode, modifier) goto binop2; goto both_summands; } - op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))); + /* Use immed_double_const to ensure that the constant is + truncated according to the mode of OP1, then sign extended + to a HOST_WIDE_INT. Using the constant directly can result + in non-canonical RTL in a 64x32 cross compile. */ + constant_part + = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), + (HOST_WIDE_INT) 0, + TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); + op0 = plus_constant (op0, INTVAL (constant_part)); if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) op0 = force_operand (op0, target); return op0; @@ -7057,12 +7474,11 @@ expand_expr (exp, target, tmode, modifier) || mode != ptr_mode) goto binop; - preexpand_calls (exp); if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) subtarget = 0; - op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier); - op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier); + op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier); + op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier); both_summands: /* Make sure any term that's a sum with a constant comes last. */ @@ -7093,7 +7509,7 @@ expand_expr (exp, target, tmode, modifier) op0 = eliminate_constant_term (op0, &constant_term); /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so - their sum should be a constant. Form it into OP1, since the + their sum should be a constant. Form it into OP1, since the result we want will then be OP0 + OP1. */ temp = simplify_binary_operation (PLUS, mode, constant_term, @@ -7121,10 +7537,10 @@ expand_expr (exp, target, tmode, modifier) && really_constant_p (TREE_OPERAND (exp, 0)) && really_constant_p (TREE_OPERAND (exp, 1))) { - rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, - VOIDmode, ro_modifier); - rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, - VOIDmode, ro_modifier); + rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, + modifier); + rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, + modifier); /* If the last operand is a CONST_INT, use plus_constant of the negated constant. Else make the MINUS. */ @@ -7139,38 +7555,29 @@ expand_expr (exp, target, tmode, modifier) tree negated = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (exp, 1))); - /* Deal with the case where we can't negate the constant - in TYPE. */ if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated)) - { - tree newtype = signed_type (type); - tree newop0 = convert (newtype, TREE_OPERAND (exp, 0)); - tree newop1 = convert (newtype, TREE_OPERAND (exp, 1)); - tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1)); - - if (! TREE_OVERFLOW (newneg)) - return expand_expr (convert (type, - build (PLUS_EXPR, newtype, - newop0, newneg)), - target, tmode, ro_modifier); - } + /* If we can't negate the constant in TYPE, leave it alone and + expand_binop will negate it for us. We used to try to do it + here in the signed version of TYPE, but that doesn't work + on POINTER_TYPEs. */; else { exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated); goto plus_expr; } } - this_optab = sub_optab; + this_optab = ! unsignedp && flag_trapv + && (GET_MODE_CLASS(mode) == MODE_INT) + ? subv_optab : sub_optab; goto binop; case MULT_EXPR: - preexpand_calls (exp); /* If first operand is constant, swap them. Thus the following special case checks need only check the second operand. */ if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) { - register tree t1 = TREE_OPERAND (exp, 0); + tree t1 = TREE_OPERAND (exp, 0); TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); TREE_OPERAND (exp, 1) = t1; } @@ -7188,19 +7595,23 @@ expand_expr (exp, target, tmode, modifier) /* Apply distributive law if OP0 is x+c. */ if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT) - return gen_rtx_PLUS (mode, - gen_rtx_MULT (mode, XEXP (op0, 0), - GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))), - GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) - * INTVAL (XEXP (op0, 1)))); + return + gen_rtx_PLUS + (mode, + gen_rtx_MULT + (mode, XEXP (op0, 0), + GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))), + GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) + * INTVAL (XEXP (op0, 1)))); if (GET_CODE (op0) != REG) op0 = force_operand (op0, NULL_RTX); if (GET_CODE (op0) != REG) op0 = copy_to_mode_reg (mode, op0); - return gen_rtx_MULT (mode, op0, - GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))); + return + gen_rtx_MULT (mode, op0, + GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))); } if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) @@ -7259,8 +7670,10 @@ expand_expr (exp, target, tmode, modifier) op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), NULL_RTX, VOIDmode, 0); if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) - op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, - VOIDmode, 0); + op1 = convert_modes (innermode, mode, + expand_expr (TREE_OPERAND (exp, 1), + NULL_RTX, VOIDmode, 0), + unsignedp); else op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), NULL_RTX, VOIDmode, 0); @@ -7285,7 +7698,6 @@ expand_expr (exp, target, tmode, modifier) case CEIL_DIV_EXPR: case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: - preexpand_calls (exp); if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) subtarget = 0; /* Possible optimization: compute the dividend with EXPAND_SUM @@ -7296,14 +7708,23 @@ expand_expr (exp, target, tmode, modifier) return expand_divmod (0, code, mode, op0, op1, target, unsignedp); case RDIV_EXPR: - this_optab = flodiv_optab; + /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving + expensive divide. If not, combine will rebuild the original + computation. */ + if (flag_unsafe_math_optimizations && optimize && !optimize_size + && !real_onep (TREE_OPERAND (exp, 0))) + return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0), + build (RDIV_EXPR, type, + build_real (type, dconst1), + TREE_OPERAND (exp, 1))), + target, tmode, unsignedp); + this_optab = sdiv_optab; goto binop; case TRUNC_MOD_EXPR: case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: case ROUND_MOD_EXPR: - preexpand_calls (exp); if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) subtarget = 0; op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); @@ -7337,7 +7758,10 @@ expand_expr (exp, target, tmode, modifier) case NEGATE_EXPR: op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); - temp = expand_unop (mode, neg_optab, op0, target, 0); + temp = expand_unop (mode, + ! unsignedp && flag_trapv + && (GET_MODE_CLASS(mode) == MODE_INT) + ? negv_optab : neg_optab, op0, target, 0); if (temp == 0) abort (); return temp; @@ -7355,7 +7779,7 @@ expand_expr (exp, target, tmode, modifier) if (TREE_UNSIGNED (type)) return op0; - return expand_abs (mode, op0, target, + return expand_abs (mode, op0, target, unsignedp, safe_from_p (target, TREE_OPERAND (exp, 0), 1)); case MAX_EXPR: @@ -7384,7 +7808,7 @@ expand_expr (exp, target, tmode, modifier) /* At this point, a MEM target is no longer useful; we will get better code without it. */ - + if (GET_CODE (target) == MEM) target = gen_reg_rtx (mode); @@ -7395,7 +7819,8 @@ expand_expr (exp, target, tmode, modifier) /* If this mode is an integer too wide to compare properly, compare word by word. Rely on cse to optimize constant cases. */ - if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode)) + if (GET_MODE_CLASS (mode) == MODE_INT + && ! can_compare_p (GE, mode, ccp_jump)) { if (code == MAX_EXPR) do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), @@ -7403,29 +7828,15 @@ expand_expr (exp, target, tmode, modifier) else do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL_RTX, op0); - emit_move_insn (target, op1); } else { - if (code == MAX_EXPR) - temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))) - ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0) - : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0)); - else - temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))) - ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0) - : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0)); - if (temp == const0_rtx) - emit_move_insn (target, op1); - else if (temp != const_true_rtx) - { - if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0) - emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0)); - else - abort (); - emit_move_insn (target, op1); - } + int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))); + do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE, + unsignedp, mode, NULL_RTX, NULL_RTX, + op0); } + emit_move_insn (target, op1); emit_label (op0); return target; @@ -7475,7 +7886,6 @@ expand_expr (exp, target, tmode, modifier) case RSHIFT_EXPR: case LROTATE_EXPR: case RROTATE_EXPR: - preexpand_calls (exp); if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) subtarget = 0; op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); @@ -7490,7 +7900,13 @@ expand_expr (exp, target, tmode, modifier) case GE_EXPR: case EQ_EXPR: case NE_EXPR: - preexpand_calls (exp); + case UNORDERED_EXPR: + case ORDERED_EXPR: + case UNLT_EXPR: + case UNLE_EXPR: + case UNGT_EXPR: + case UNGE_EXPR: + case UNEQ_EXPR: temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0); if (temp != 0) return temp; @@ -7510,7 +7926,7 @@ expand_expr (exp, target, tmode, modifier) op1 = gen_label_rtx (); emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, - GET_MODE (temp), unsignedp, 0, op1); + GET_MODE (temp), unsignedp, op1); emit_move_insn (temp, const1_rtx); emit_label (op1); return temp; @@ -7567,21 +7983,21 @@ expand_expr (exp, target, tmode, modifier) && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0)))) { - tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0); - tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0); - - if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2' - && operand_equal_p (false, TREE_OPERAND (true, 0), 0)) - || (TREE_CODE_CLASS (TREE_CODE (false)) == '2' - && operand_equal_p (true, TREE_OPERAND (false, 0), 0)) - || (TREE_CODE_CLASS (TREE_CODE (true)) == '1' - && operand_equal_p (false, TREE_OPERAND (true, 0), 0)) - || (TREE_CODE_CLASS (TREE_CODE (false)) == '1' - && operand_equal_p (true, TREE_OPERAND (false, 0), 0))) + tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0); + tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0); + + if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2' + && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) + || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2' + && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)) + || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1' + && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) + || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1' + && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))) return expand_expr (build1 (NOP_EXPR, type, - build (COND_EXPR, TREE_TYPE (true), + build (COND_EXPR, TREE_TYPE (iftrue), TREE_OPERAND (exp, 0), - true, false)), + iftrue, iffalse)), target, tmode, modifier); } @@ -7606,11 +8022,11 @@ expand_expr (exp, target, tmode, modifier) if (ignore) { expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, - ro_modifier); + modifier); return const0_rtx; } - op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier); + op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier); if (GET_MODE (op0) == mode) return op0; @@ -7645,7 +8061,7 @@ expand_expr (exp, target, tmode, modifier) /* If we are not to produce a result, we have no target. Otherwise, if a target was specified use it; it will not be used as an - intermediate target unless it is safe. If no target, use a + intermediate target unless it is safe. If no target, use a temporary. */ if (ignore) @@ -7661,8 +8077,8 @@ expand_expr (exp, target, tmode, modifier) || GET_CODE (original_target) == REG || TREE_ADDRESSABLE (type)) #endif - && ! (GET_CODE (original_target) == MEM - && MEM_VOLATILE_P (original_target))) + && (GET_CODE (original_target) != MEM + || TREE_ADDRESSABLE (type))) temp = original_target; else if (TREE_ADDRESSABLE (type)) abort (); @@ -7683,10 +8099,14 @@ expand_expr (exp, target, tmode, modifier) && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') { rtx result; - optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab - : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab - : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab - : xor_optab); + optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR + ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) + ? addv_optab : add_optab) + : TREE_CODE (binary_op) == MINUS_EXPR + ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) + ? subv_optab : sub_optab) + : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab + : xor_optab); /* If we had X ? A : A + 1, do this as A + (X == 0). @@ -7723,7 +8143,7 @@ expand_expr (exp, target, tmode, modifier) TREE_OPERAND (exp, 0) = invert_truthvalue (TREE_OPERAND (exp, 0)); } - + do_pending_stack_adjust (); NO_DEFER_POP; op0 = gen_label_rtx (); @@ -7780,7 +8200,8 @@ expand_expr (exp, target, tmode, modifier) || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR) && safe_from_p (temp, TREE_OPERAND (exp, 2), 1)) { - if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER) + if (GET_CODE (temp) == REG + && REGNO (temp) < FIRST_PSEUDO_REGISTER) temp = gen_reg_rtx (mode); store_expr (TREE_OPERAND (exp, 1), temp, 0); jumpif (TREE_OPERAND (exp, 0), op0); @@ -7798,7 +8219,8 @@ expand_expr (exp, target, tmode, modifier) || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR) && safe_from_p (temp, TREE_OPERAND (exp, 1), 1)) { - if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER) + if (GET_CODE (temp) == REG + && REGNO (temp) < FIRST_PSEUDO_REGISTER) temp = gen_reg_rtx (mode); store_expr (TREE_OPERAND (exp, 2), temp, 0); jumpifnot (TREE_OPERAND (exp, 0), op0); @@ -7813,7 +8235,11 @@ expand_expr (exp, target, tmode, modifier) jumpifnot (TREE_OPERAND (exp, 0), op0); start_cleanup_deferral (); - if (temp != 0) + + /* One branch of the cond can be void, if it never returns. For + example A ? throw : E */ + if (temp != 0 + && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node) store_expr (TREE_OPERAND (exp, 1), temp, 0); else expand_expr (TREE_OPERAND (exp, 1), @@ -7824,7 +8250,8 @@ expand_expr (exp, target, tmode, modifier) emit_barrier (); emit_label (op0); start_cleanup_deferral (); - if (temp != 0) + if (temp != 0 + && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node) store_expr (TREE_OPERAND (exp, 2), temp, 0); else expand_expr (TREE_OPERAND (exp, 2), @@ -7861,9 +8288,14 @@ expand_expr (exp, target, tmode, modifier) if (! ignore) target = original_target; + /* Set this here so that if we get a target that refers to a + register variable that's already been used, put_reg_into_stack + knows that it should fix up those uses. */ + TREE_USED (slot) = 1; + if (target == 0) { - if (DECL_RTL (slot) != 0) + if (DECL_RTL_SET_P (slot)) { target = DECL_RTL (slot); /* If we have already expanded the slot, so don't do @@ -7876,12 +8308,9 @@ expand_expr (exp, target, tmode, modifier) target = assign_temp (type, 2, 0, 1); /* All temp slots at this level must not conflict. */ preserve_temp_slots (target); - DECL_RTL (slot) = target; + SET_DECL_RTL (slot, target); if (TREE_ADDRESSABLE (slot)) - { - TREE_ADDRESSABLE (slot) = 0; - mark_addressable (slot); - } + put_var_into_stack (slot); /* Since SLOT is not known to the called function to belong to its stack frame, we must build an explicit @@ -7905,24 +8334,21 @@ expand_expr (exp, target, tmode, modifier) /* If we have already assigned it space, use that space, not target that we were passed in, as our target parameter is only a hint. */ - if (DECL_RTL (slot) != 0) - { - target = DECL_RTL (slot); - /* If we have already expanded the slot, so don't do + if (DECL_RTL_SET_P (slot)) + { + target = DECL_RTL (slot); + /* If we have already expanded the slot, so don't do it again. (mrs) */ - if (TREE_OPERAND (exp, 1) == NULL_TREE) - return target; + if (TREE_OPERAND (exp, 1) == NULL_TREE) + return target; } else { - DECL_RTL (slot) = target; + SET_DECL_RTL (slot, target); /* If we must have an addressable slot, then make sure that the RTL that we just stored in slot is OK. */ if (TREE_ADDRESSABLE (slot)) - { - TREE_ADDRESSABLE (slot) = 0; - mark_addressable (slot); - } + put_var_into_stack (slot); } } @@ -7930,11 +8356,10 @@ expand_expr (exp, target, tmode, modifier) /* Mark it as expanded. */ TREE_OPERAND (exp, 1) = NULL_TREE; - TREE_USED (slot) = 1; store_expr (exp1, target, 0); expand_decl_cleanup (NULL_TREE, cleanups); - + return target; } @@ -7942,45 +8367,26 @@ expand_expr (exp, target, tmode, modifier) { tree lhs = TREE_OPERAND (exp, 0); tree rhs = TREE_OPERAND (exp, 1); - tree noncopied_parts = 0; - tree lhs_type = TREE_TYPE (lhs); temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); - if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs)) - noncopied_parts = init_noncopied_parts (stabilize_reference (lhs), - TYPE_NONCOPIED_PARTS (lhs_type)); - while (noncopied_parts != 0) - { - expand_assignment (TREE_VALUE (noncopied_parts), - TREE_PURPOSE (noncopied_parts), 0, 0); - noncopied_parts = TREE_CHAIN (noncopied_parts); - } return temp; } case MODIFY_EXPR: { /* If lhs is complex, expand calls in rhs before computing it. - That's so we don't compute a pointer and save it over a call. - If lhs is simple, compute it first so we can give it as a - target if the rhs is just a call. This avoids an extra temp and copy - and that prevents a partial-subsumption which makes bad code. - Actually we could treat component_ref's of vars like vars. */ + That's so we don't compute a pointer and save it over a + call. If lhs is simple, compute it first so we can give it + as a target if the rhs is just a call. This avoids an + extra temp and copy and that prevents a partial-subsumption + which makes bad code. Actually we could treat + component_ref's of vars like vars. */ tree lhs = TREE_OPERAND (exp, 0); tree rhs = TREE_OPERAND (exp, 1); - tree noncopied_parts = 0; - tree lhs_type = TREE_TYPE (lhs); temp = 0; - if (TREE_CODE (lhs) != VAR_DECL - && TREE_CODE (lhs) != RESULT_DECL - && TREE_CODE (lhs) != PARM_DECL - && ! (TREE_CODE (lhs) == INDIRECT_REF - && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0))))) - preexpand_calls (exp); - /* Check for |= or &= of a bitfield of size one into another bitfield of size 1. In this case, (unless we need the result of the assignment) we can do this more efficiently with a @@ -7995,8 +8401,8 @@ expand_expr (exp, target, tmode, modifier) || TREE_CODE (rhs) == BIT_AND_EXPR) && TREE_OPERAND (rhs, 0) == lhs && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF - && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1 - && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1) + && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) + && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) { rtx label = gen_label_rtx (); @@ -8013,18 +8419,8 @@ expand_expr (exp, target, tmode, modifier) return const0_rtx; } - if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 - && ! (fixed_type_p (lhs) && fixed_type_p (rhs))) - noncopied_parts = save_noncopied_parts (stabilize_reference (lhs), - TYPE_NONCOPIED_PARTS (lhs_type)); - temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); - while (noncopied_parts != 0) - { - expand_assignment (TREE_PURPOSE (noncopied_parts), - TREE_VALUE (noncopied_parts), 0, 0); - noncopied_parts = TREE_CHAIN (noncopied_parts); - } + return temp; } @@ -8045,10 +8441,6 @@ expand_expr (exp, target, tmode, modifier) return expand_increment (exp, ! ignore, ignore); case ADDR_EXPR: - /* If nonzero, TEMP will be set to the address of something that might - be a MEM corresponding to a stack slot. */ - temp = 0; - /* Are we taking the address of a nested function? */ if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL && decl_function_context (TREE_OPERAND (exp, 0)) != 0 @@ -8062,6 +8454,14 @@ expand_expr (exp, target, tmode, modifier) return a zero. */ else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) return const0_rtx; + /* If we are taking the address of a constant and are at the + top level, we have to use output_constant_def since we can't + call force_const_mem at top level. */ + else if (cfun == 0 + && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR + || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) + == 'c'))) + op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0); else { /* We make sure to pass const0_rtx down if we came in with @@ -8077,63 +8477,107 @@ expand_expr (exp, target, tmode, modifier) if (ignore) return op0; - op0 = protect_from_queue (op0, 0); + /* Pass 1 for MODIFY, so that protect_from_queue doesn't get + clever and returns a REG when given a MEM. */ + op0 = protect_from_queue (op0, 1); - /* We would like the object in memory. If it is a constant, - we can have it be statically allocated into memory. For - a non-constant (REG, SUBREG or CONCAT), we need to allocate some - memory and store the value into it. */ + /* We would like the object in memory. If it is a constant, we can + have it be statically allocated into memory. For a non-constant, + we need to allocate some memory and store the value into it. */ if (CONSTANT_P (op0)) op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), op0); - else if (GET_CODE (op0) == MEM) - { - mark_temp_addr_taken (op0); - temp = XEXP (op0, 0); - } - else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG - || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF) + || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF + || GET_CODE (op0) == PARALLEL) { - /* If this object is in a register, it must be not - be BLKmode. */ - tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); - rtx memloc = assign_temp (inner_type, 1, 1, 1); - - mark_temp_addr_taken (memloc); - emit_move_insn (memloc, op0); - op0 = memloc; + /* If the operand is a SAVE_EXPR, we can deal with this by + forcing the SAVE_EXPR into memory. */ + if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR) + { + put_var_into_stack (TREE_OPERAND (exp, 0)); + op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0)); + } + else + { + /* If this object is in a register, it can't be BLKmode. */ + tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); + rtx memloc = assign_temp (inner_type, 1, 1, 1); + + if (GET_CODE (op0) == PARALLEL) + /* Handle calls that pass values in multiple + non-contiguous locations. The Irix 6 ABI has examples + of this. */ + emit_group_store (memloc, op0, + int_size_in_bytes (inner_type)); + else + emit_move_insn (memloc, op0); + + op0 = memloc; + } } if (GET_CODE (op0) != MEM) abort (); - + + mark_temp_addr_taken (op0); if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) { - temp = XEXP (op0, 0); + op0 = XEXP (op0, 0); #ifdef POINTERS_EXTEND_UNSIGNED - if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode + if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode && mode == ptr_mode) - temp = convert_memory_address (ptr_mode, temp); + op0 = convert_memory_address (ptr_mode, op0); #endif - return temp; + return op0; + } + + /* If OP0 is not aligned as least as much as the type requires, we + need to make a temporary, copy OP0 to it, and take the address of + the temporary. We want to use the alignment of the type, not of + the operand. Note that this is incorrect for FUNCTION_TYPE, but + the test for BLKmode means that can't happen. The test for + BLKmode is because we never make mis-aligned MEMs with + non-BLKmode. + + We don't need to do this at all if the machine doesn't have + strict alignment. */ + if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode + && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) + > MEM_ALIGN (op0)) + && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT) + { + tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); + rtx new + = assign_stack_temp_for_type + (TYPE_MODE (inner_type), + MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0)) + : int_size_in_bytes (inner_type), + 1, build_qualified_type (inner_type, + (TYPE_QUALS (inner_type) + | TYPE_QUAL_CONST))); + + if (TYPE_ALIGN_OK (inner_type)) + abort (); + + emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0))); + op0 = new; } op0 = force_operand (XEXP (op0, 0), target); } - if (flag_force_addr && GET_CODE (op0) != REG) + if (flag_force_addr + && GET_CODE (op0) != REG + && modifier != EXPAND_CONST_ADDRESS + && modifier != EXPAND_INITIALIZER + && modifier != EXPAND_SUM) op0 = force_reg (Pmode, op0); if (GET_CODE (op0) == REG && ! REG_USERVAR_P (op0)) - mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT); - - /* If we might have had a temp slot, add an equivalent address - for it. */ - if (temp != 0) - update_temp_slot_address (temp, op0); + mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type))); #ifdef POINTERS_EXTEND_UNSIGNED if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode @@ -8183,7 +8627,7 @@ expand_expr (exp, target, tmode, modifier) case REALPART_EXPR: op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); return gen_realpart (mode, op0); - + case IMAGPART_EXPR: op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); return gen_imagpart (mode, op0); @@ -8193,12 +8637,12 @@ expand_expr (exp, target, tmode, modifier) enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); rtx imag_t; rtx insns; - - op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); + + op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); if (! target) target = gen_reg_rtx (mode); - + start_sequence (); /* Store the realpart and the negated imagpart to target. */ @@ -8206,15 +8650,18 @@ expand_expr (exp, target, tmode, modifier) gen_realpart (partmode, op0)); imag_t = gen_imagpart (partmode, target); - temp = expand_unop (partmode, neg_optab, - gen_imagpart (partmode, op0), imag_t, 0); + temp = expand_unop (partmode, + ! unsignedp && flag_trapv + && (GET_MODE_CLASS(partmode) == MODE_INT) + ? negv_optab : neg_optab, + gen_imagpart (partmode, op0), imag_t, 0); if (temp != imag_t) emit_move_insn (imag_t, temp); insns = get_insns (); end_sequence (); - /* Conjugate should appear as a single unit + /* Conjugate should appear as a single unit If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS, each with a separate pseudo as destination. It's not correct for flow to treat them as a unit. */ @@ -8234,7 +8681,7 @@ expand_expr (exp, target, tmode, modifier) op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); - expand_eh_region_end (handler); + expand_eh_region_end_cleanup (handler); return op0; } @@ -8252,7 +8699,7 @@ expand_expr (exp, target, tmode, modifier) /* Start a new binding layer that will keep track of all cleanup actions to be performed. */ - expand_start_bindings (0); + expand_start_bindings (2); target_temp_slot_level = temp_slot_level; @@ -8269,30 +8716,28 @@ expand_expr (exp, target, tmode, modifier) return op0; } - case GOTO_SUBROUTINE_EXPR: + case GOTO_SUBROUTINE_EXPR: { rtx subr = (rtx) TREE_OPERAND (exp, 0); rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1); rtx return_address = gen_label_rtx (); - emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address)); + emit_move_insn (return_link, + gen_rtx_LABEL_REF (Pmode, return_address)); emit_jump (subr); emit_label (return_address); return const0_rtx; } - case POPDCC_EXPR: - { - rtx dcc = get_dynamic_cleanup_chain (); - emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc))); - return const0_rtx; - } + case VA_ARG_EXPR: + return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type); - case POPDHC_EXPR: - { - rtx dhc = get_dynamic_handler_chain (); - emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc))); - return const0_rtx; - } + case EXC_PTR_EXPR: + return get_exception_pointer (cfun); + + case FDESC_EXPR: + /* Function descriptors are not valid except for as + initialization constants, and should not be expanded. */ + abort (); default: return (*lang_expand_expr) (exp, original_target, tmode, modifier); @@ -8301,7 +8746,6 @@ expand_expr (exp, target, tmode, modifier) /* Here to do an ordinary binary operator, generating an instruction from the optab already placed in `this_optab'. */ binop: - preexpand_calls (exp); if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) subtarget = 0; op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); @@ -8313,82 +8757,13 @@ expand_expr (exp, target, tmode, modifier) abort (); return temp; } - - -/* Return the alignment in bits of EXP, a pointer valued expression. - But don't return more than MAX_ALIGN no matter what. - The alignment returned is, by default, the alignment of the thing that - EXP points to (if it is not a POINTER_TYPE, 0 is returned). - - Otherwise, look at the expression to see if we can do better, i.e., if the - expression is actually pointing at an object whose alignment is tighter. */ - -static int -get_pointer_alignment (exp, max_align) - tree exp; - unsigned max_align; -{ - unsigned align, inner; - - if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) - return 0; - - align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); - align = MIN (align, max_align); +/* Return the tree node if a ARG corresponds to a string constant or zero + if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset + in bytes within the string that ARG is accessing. The type of the + offset will be `sizetype'. */ - while (1) - { - switch (TREE_CODE (exp)) - { - case NOP_EXPR: - case CONVERT_EXPR: - case NON_LVALUE_EXPR: - exp = TREE_OPERAND (exp, 0); - if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) - return align; - inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); - align = MIN (inner, max_align); - break; - - case PLUS_EXPR: - /* If sum of pointer + int, restrict our maximum alignment to that - imposed by the integer. If not, we can't do any better than - ALIGN. */ - if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) - return align; - - while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) - & (max_align - 1)) - != 0) - max_align >>= 1; - - exp = TREE_OPERAND (exp, 0); - break; - - case ADDR_EXPR: - /* See what we are pointing at and look at its alignment. */ - exp = TREE_OPERAND (exp, 0); - if (TREE_CODE (exp) == FUNCTION_DECL) - align = FUNCTION_BOUNDARY; - else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd') - align = DECL_ALIGN (exp); -#ifdef CONSTANT_ALIGNMENT - else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') - align = CONSTANT_ALIGNMENT (exp, align); -#endif - return MIN (align, max_align); - - default: - return align; - } - } -} - -/* Return the tree node and offset if a given argument corresponds to - a string constant. */ - -static tree +tree string_constant (arg, ptr_offset) tree arg; tree *ptr_offset; @@ -8398,7 +8773,7 @@ string_constant (arg, ptr_offset) if (TREE_CODE (arg) == ADDR_EXPR && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) { - *ptr_offset = integer_zero_node; + *ptr_offset = size_zero_node; return TREE_OPERAND (arg, 0); } else if (TREE_CODE (arg) == PLUS_EXPR) @@ -8412,1902 +8787,19 @@ string_constant (arg, ptr_offset) if (TREE_CODE (arg0) == ADDR_EXPR && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST) { - *ptr_offset = arg1; + *ptr_offset = convert (sizetype, arg1); return TREE_OPERAND (arg0, 0); } else if (TREE_CODE (arg1) == ADDR_EXPR && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST) { - *ptr_offset = arg0; + *ptr_offset = convert (sizetype, arg0); return TREE_OPERAND (arg1, 0); } } return 0; } - -/* Compute the length of a C string. TREE_STRING_LENGTH is not the right - way, because it could contain a zero byte in the middle. - TREE_STRING_LENGTH is the size of the character array, not the string. - - Unfortunately, string_constant can't access the values of const char - arrays with initializers, so neither can we do so here. */ - -static tree -c_strlen (src) - tree src; -{ - tree offset_node; - int offset, max; - char *ptr; - - src = string_constant (src, &offset_node); - if (src == 0) - return 0; - max = TREE_STRING_LENGTH (src); - ptr = TREE_STRING_POINTER (src); - if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) - { - /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't - compute the offset to the following null if we don't know where to - start searching for it. */ - int i; - for (i = 0; i < max; i++) - if (ptr[i] == 0) - return 0; - /* We don't know the starting offset, but we do know that the string - has no internal zero bytes. We can assume that the offset falls - within the bounds of the string; otherwise, the programmer deserves - what he gets. Subtract the offset from the length of the string, - and return that. */ - /* This would perhaps not be valid if we were dealing with named - arrays in addition to literal string constants. */ - return size_binop (MINUS_EXPR, size_int (max), offset_node); - } - - /* We have a known offset into the string. Start searching there for - a null character. */ - if (offset_node == 0) - offset = 0; - else - { - /* Did we get a long long offset? If so, punt. */ - if (TREE_INT_CST_HIGH (offset_node) != 0) - return 0; - offset = TREE_INT_CST_LOW (offset_node); - } - /* If the offset is known to be out of bounds, warn, and call strlen at - runtime. */ - if (offset < 0 || offset > max) - { - warning ("offset outside bounds of constant string"); - return 0; - } - /* Use strlen to search for the first zero byte. Since any strings - constructed with build_string will have nulls appended, we win even - if we get handed something like (char[4])"abcd". - - Since OFFSET is our starting index into the string, no further - calculation is needed. */ - return size_int (strlen (ptr + offset)); -} - -rtx -expand_builtin_return_addr (fndecl_code, count, tem) - enum built_in_function fndecl_code; - int count; - rtx tem; -{ - int i; - - /* Some machines need special handling before we can access - arbitrary frames. For example, on the sparc, we must first flush - all register windows to the stack. */ -#ifdef SETUP_FRAME_ADDRESSES - if (count > 0) - SETUP_FRAME_ADDRESSES (); -#endif - - /* On the sparc, the return address is not in the frame, it is in a - register. There is no way to access it off of the current frame - pointer, but it can be accessed off the previous frame pointer by - reading the value from the register window save area. */ -#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME - if (fndecl_code == BUILT_IN_RETURN_ADDRESS) - count--; -#endif - - /* Scan back COUNT frames to the specified frame. */ - for (i = 0; i < count; i++) - { - /* Assume the dynamic chain pointer is in the word that the - frame address points to, unless otherwise specified. */ -#ifdef DYNAMIC_CHAIN_ADDRESS - tem = DYNAMIC_CHAIN_ADDRESS (tem); -#endif - tem = memory_address (Pmode, tem); - tem = copy_to_reg (gen_rtx_MEM (Pmode, tem)); - } - - /* For __builtin_frame_address, return what we've got. */ - if (fndecl_code == BUILT_IN_FRAME_ADDRESS) - return tem; - - /* For __builtin_return_address, Get the return address from that - frame. */ -#ifdef RETURN_ADDR_RTX - tem = RETURN_ADDR_RTX (count, tem); -#else - tem = memory_address (Pmode, - plus_constant (tem, GET_MODE_SIZE (Pmode))); - tem = gen_rtx_MEM (Pmode, tem); -#endif - return tem; -} - -/* Construct the leading half of a __builtin_setjmp call. Control will - return to RECEIVER_LABEL. This is used directly by sjlj exception - handling code. */ - -void -expand_builtin_setjmp_setup (buf_addr, receiver_label) - rtx buf_addr; - rtx receiver_label; -{ - enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); - rtx stack_save; - -#ifdef POINTERS_EXTEND_UNSIGNED - buf_addr = convert_memory_address (Pmode, buf_addr); -#endif - - buf_addr = force_reg (Pmode, buf_addr); - - emit_queue (); - - /* We store the frame pointer and the address of receiver_label in - the buffer and use the rest of it for the stack save area, which - is machine-dependent. */ - -#ifndef BUILTIN_SETJMP_FRAME_VALUE -#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx -#endif - - emit_move_insn (gen_rtx_MEM (Pmode, buf_addr), - BUILTIN_SETJMP_FRAME_VALUE); - emit_move_insn (validize_mem - (gen_rtx_MEM (Pmode, - plus_constant (buf_addr, - GET_MODE_SIZE (Pmode)))), - force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label))); - - stack_save = gen_rtx_MEM (sa_mode, - plus_constant (buf_addr, - 2 * GET_MODE_SIZE (Pmode))); - emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); - - /* If there is further processing to do, do it. */ -#ifdef HAVE_builtin_setjmp_setup - if (HAVE_builtin_setjmp_setup) - emit_insn (gen_builtin_setjmp_setup (buf_addr)); -#endif - - /* Tell optimize_save_area_alloca that extra work is going to - need to go on during alloca. */ - current_function_calls_setjmp = 1; - - /* Set this so all the registers get saved in our frame; we need to be - able to copy the saved values for any registers from frames we unwind. */ - current_function_has_nonlocal_label = 1; -} - -/* Construct the trailing part of a __builtin_setjmp call. - This is used directly by sjlj exception handling code. */ - -void -expand_builtin_setjmp_receiver (receiver_label) - rtx receiver_label ATTRIBUTE_UNUSED; -{ - /* Clobber the FP when we get here, so we have to make sure it's - marked as used by this function. */ - emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); - - /* Mark the static chain as clobbered here so life information - doesn't get messed up for it. */ - emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); - - /* Now put in the code to restore the frame pointer, and argument - pointer, if needed. The code below is from expand_end_bindings - in stmt.c; see detailed documentation there. */ -#ifdef HAVE_nonlocal_goto - if (! HAVE_nonlocal_goto) -#endif - emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); - -#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM - if (fixed_regs[ARG_POINTER_REGNUM]) - { -#ifdef ELIMINABLE_REGS - size_t i; - static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS; - - for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++) - if (elim_regs[i].from == ARG_POINTER_REGNUM - && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) - break; - - if (i == sizeof elim_regs / sizeof elim_regs [0]) -#endif - { - /* Now restore our arg pointer from the address at which it - was saved in our stack frame. - If there hasn't be space allocated for it yet, make - some now. */ - if (arg_pointer_save_area == 0) - arg_pointer_save_area - = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); - emit_move_insn (virtual_incoming_args_rtx, - copy_to_reg (arg_pointer_save_area)); - } - } -#endif - -#ifdef HAVE_builtin_setjmp_receiver - if (HAVE_builtin_setjmp_receiver) - emit_insn (gen_builtin_setjmp_receiver (receiver_label)); - else -#endif -#ifdef HAVE_nonlocal_goto_receiver - if (HAVE_nonlocal_goto_receiver) - emit_insn (gen_nonlocal_goto_receiver ()); - else -#endif - { - ; /* Nothing */ - } - - /* @@@ This is a kludge. Not all machine descriptions define a blockage - insn, but we must not allow the code we just generated to be reordered - by scheduling. Specifically, the update of the frame pointer must - happen immediately, not later. So emit an ASM_INPUT to act as blockage - insn. */ - emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); -} - - -/* __builtin_setjmp is passed a pointer to an array of five words (not - all will be used on all machines). It operates similarly to the C - library function of the same name, but is more efficient. Much of - the code below (and for longjmp) is copied from the handling of - non-local gotos. - - NOTE: This is intended for use by GNAT and the exception handling - scheme in the compiler and will only work in the method used by - them. */ - -static rtx -expand_builtin_setjmp (arglist, target) - tree arglist; - rtx target; -{ - rtx buf_addr, next_lab, cont_lab; - - if (arglist == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) - return NULL_RTX; - - if (target == 0 || GET_CODE (target) != REG - || REGNO (target) < FIRST_PSEUDO_REGISTER) - target = gen_reg_rtx (TYPE_MODE (integer_type_node)); - - buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); - - next_lab = gen_label_rtx (); - cont_lab = gen_label_rtx (); - - expand_builtin_setjmp_setup (buf_addr, next_lab); - - /* Set TARGET to zero and branch to the continue label. */ - emit_move_insn (target, const0_rtx); - emit_jump_insn (gen_jump (cont_lab)); - emit_barrier (); - emit_label (next_lab); - - expand_builtin_setjmp_receiver (next_lab); - - /* Set TARGET to one. */ - emit_move_insn (target, const1_rtx); - emit_label (cont_lab); - - /* Tell flow about the strange goings on. Putting `next_lab' on - `nonlocal_goto_handler_labels' to indicates that function - calls may traverse the arc back to this label. */ - - current_function_has_nonlocal_label = 1; - nonlocal_goto_handler_labels - = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels); - - return target; -} - -void -expand_builtin_longjmp (buf_addr, value) - rtx buf_addr, value; -{ - rtx fp, lab, stack; - enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); - -#ifdef POINTERS_EXTEND_UNSIGNED - buf_addr = convert_memory_address (Pmode, buf_addr); -#endif - buf_addr = force_reg (Pmode, buf_addr); - - /* We used to store value in static_chain_rtx, but that fails if pointers - are smaller than integers. We instead require that the user must pass - a second argument of 1, because that is what builtin_setjmp will - return. This also makes EH slightly more efficient, since we are no - longer copying around a value that we don't care about. */ - if (value != const1_rtx) - abort (); - -#ifdef HAVE_builtin_longjmp - if (HAVE_builtin_longjmp) - emit_insn (gen_builtin_longjmp (buf_addr)); - else -#endif - { - fp = gen_rtx_MEM (Pmode, buf_addr); - lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr, - GET_MODE_SIZE (Pmode))); - - stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr, - 2 * GET_MODE_SIZE (Pmode))); - - /* Pick up FP, label, and SP from the block and jump. This code is - from expand_goto in stmt.c; see there for detailed comments. */ -#if HAVE_nonlocal_goto - if (HAVE_nonlocal_goto) - /* We have to pass a value to the nonlocal_goto pattern that will - get copied into the static_chain pointer, but it does not matter - what that value is, because builtin_setjmp does not use it. */ - emit_insn (gen_nonlocal_goto (value, fp, stack, lab)); - else -#endif - { - lab = copy_to_reg (lab); - - emit_move_insn (hard_frame_pointer_rtx, fp); - emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX); - - emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); - emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); - emit_indirect_jump (lab); - } - } -} - -static rtx -get_memory_rtx (exp) - tree exp; -{ - rtx mem; - int is_aggregate; - - mem = gen_rtx_MEM (BLKmode, - memory_address (BLKmode, - expand_expr (exp, NULL_RTX, - ptr_mode, EXPAND_SUM))); - - RTX_UNCHANGING_P (mem) = TREE_READONLY (exp); - - /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P - if the value is the address of a structure or if the expression is - cast to a pointer to structure type. */ - is_aggregate = 0; - - while (TREE_CODE (exp) == NOP_EXPR) - { - tree cast_type = TREE_TYPE (exp); - if (TREE_CODE (cast_type) == POINTER_TYPE - && AGGREGATE_TYPE_P (TREE_TYPE (cast_type))) - { - is_aggregate = 1; - break; - } - exp = TREE_OPERAND (exp, 0); - } - - if (is_aggregate == 0) - { - tree type; - - if (TREE_CODE (exp) == ADDR_EXPR) - /* If this is the address of an object, check whether the - object is an array. */ - type = TREE_TYPE (TREE_OPERAND (exp, 0)); - else - type = TREE_TYPE (TREE_TYPE (exp)); - is_aggregate = AGGREGATE_TYPE_P (type); - } - - MEM_SET_IN_STRUCT_P (mem, is_aggregate); - return mem; -} - - -/* Expand an expression EXP that calls a built-in function, - with result going to TARGET if that's convenient - (and in mode MODE if that's convenient). - SUBTARGET may be used as the target for computing one of EXP's operands. - IGNORE is nonzero if the value is to be ignored. */ - -#define CALLED_AS_BUILT_IN(NODE) \ - (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10)) - -static rtx -expand_builtin (exp, target, subtarget, mode, ignore) - tree exp; - rtx target; - rtx subtarget; - enum machine_mode mode; - int ignore; -{ - tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); - tree arglist = TREE_OPERAND (exp, 1); - rtx op0; - rtx lab1, insns; - enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); - optab builtin_optab; - - switch (DECL_FUNCTION_CODE (fndecl)) - { - case BUILT_IN_ABS: - case BUILT_IN_LABS: - case BUILT_IN_FABS: - /* build_function_call changes these into ABS_EXPR. */ - abort (); - - case BUILT_IN_SIN: - case BUILT_IN_COS: - /* Treat these like sqrt, but only if the user asks for them. */ - if (! flag_fast_math) - break; - case BUILT_IN_FSQRT: - /* If not optimizing, call the library function. */ - if (! optimize) - break; - - if (arglist == 0 - /* Arg could be wrong type if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) - break; - - /* Stabilize and compute the argument. */ - if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL - && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) - { - exp = copy_node (exp); - arglist = copy_node (arglist); - TREE_OPERAND (exp, 1) = arglist; - TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); - } - op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); - - /* Make a suitable register to place result in. */ - target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); - - emit_queue (); - start_sequence (); - - switch (DECL_FUNCTION_CODE (fndecl)) - { - case BUILT_IN_SIN: - builtin_optab = sin_optab; break; - case BUILT_IN_COS: - builtin_optab = cos_optab; break; - case BUILT_IN_FSQRT: - builtin_optab = sqrt_optab; break; - default: - abort (); - } - - /* Compute into TARGET. - Set TARGET to wherever the result comes back. */ - target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), - builtin_optab, op0, target, 0); - - /* If we were unable to expand via the builtin, stop the - sequence (without outputting the insns) and break, causing - a call to the library function. */ - if (target == 0) - { - end_sequence (); - break; - } - - /* Check the results by default. But if flag_fast_math is turned on, - then assume sqrt will always be called with valid arguments. */ - - if (flag_errno_math && ! flag_fast_math) - { - /* Don't define the builtin FP instructions - if your machine is not IEEE. */ - if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) - abort (); - - lab1 = gen_label_rtx (); - - /* Test the result; if it is NaN, set errno=EDOM because - the argument was not in the domain. */ - emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target), - 0, 0, lab1); - -#ifdef TARGET_EDOM - { -#ifdef GEN_ERRNO_RTX - rtx errno_rtx = GEN_ERRNO_RTX; -#else - rtx errno_rtx - = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); -#endif - - emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); - } -#else - /* We can't set errno=EDOM directly; let the library call do it. - Pop the arguments right away in case the call gets deleted. */ - NO_DEFER_POP; - expand_call (exp, target, 0); - OK_DEFER_POP; -#endif - - emit_label (lab1); - } - - /* Output the entire sequence. */ - insns = get_insns (); - end_sequence (); - emit_insns (insns); - - return target; - - case BUILT_IN_FMOD: - break; - - /* __builtin_apply_args returns block of memory allocated on - the stack into which is stored the arg pointer, structure - value address, static chain, and all the registers that might - possibly be used in performing a function call. The code is - moved to the start of the function so the incoming values are - saved. */ - case BUILT_IN_APPLY_ARGS: - /* Don't do __builtin_apply_args more than once in a function. - Save the result of the first call and reuse it. */ - if (apply_args_value != 0) - return apply_args_value; - { - /* When this function is called, it means that registers must be - saved on entry to this function. So we migrate the - call to the first insn of this function. */ - rtx temp; - rtx seq; - - start_sequence (); - temp = expand_builtin_apply_args (); - seq = get_insns (); - end_sequence (); - - apply_args_value = temp; - - /* Put the sequence after the NOTE that starts the function. - If this is inside a SEQUENCE, make the outer-level insn - chain current, so the code is placed at the start of the - function. */ - push_topmost_sequence (); - emit_insns_before (seq, NEXT_INSN (get_insns ())); - pop_topmost_sequence (); - return temp; - } - - /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes - FUNCTION with a copy of the parameters described by - ARGUMENTS, and ARGSIZE. It returns a block of memory - allocated on the stack into which is stored all the registers - that might possibly be used for returning the result of a - function. ARGUMENTS is the value returned by - __builtin_apply_args. ARGSIZE is the number of bytes of - arguments that must be copied. ??? How should this value be - computed? We'll also need a safe worst case value for varargs - functions. */ - case BUILT_IN_APPLY: - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))) - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE - || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) - return const0_rtx; - else - { - int i; - tree t; - rtx ops[3]; - - for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) - ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); - - return expand_builtin_apply (ops[0], ops[1], ops[2]); - } - - /* __builtin_return (RESULT) causes the function to return the - value described by RESULT. RESULT is address of the block of - memory returned by __builtin_apply. */ - case BUILT_IN_RETURN: - if (arglist - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) - expand_builtin_return (expand_expr (TREE_VALUE (arglist), - NULL_RTX, VOIDmode, 0)); - return const0_rtx; - - case BUILT_IN_SAVEREGS: - /* Don't do __builtin_saveregs more than once in a function. - Save the result of the first call and reuse it. */ - if (saveregs_value != 0) - return saveregs_value; - { - /* When this function is called, it means that registers must be - saved on entry to this function. So we migrate the - call to the first insn of this function. */ - rtx temp; - rtx seq; - - /* Now really call the function. `expand_call' does not call - expand_builtin, so there is no danger of infinite recursion here. */ - start_sequence (); - -#ifdef EXPAND_BUILTIN_SAVEREGS - /* Do whatever the machine needs done in this case. */ - temp = EXPAND_BUILTIN_SAVEREGS (arglist); -#else - /* The register where the function returns its value - is likely to have something else in it, such as an argument. - So preserve that register around the call. */ - - if (value_mode != VOIDmode) - { - rtx valreg = hard_libcall_value (value_mode); - rtx saved_valreg = gen_reg_rtx (value_mode); - - emit_move_insn (saved_valreg, valreg); - temp = expand_call (exp, target, ignore); - emit_move_insn (valreg, saved_valreg); - } - else - /* Generate the call, putting the value in a pseudo. */ - temp = expand_call (exp, target, ignore); -#endif - - seq = get_insns (); - end_sequence (); - - saveregs_value = temp; - - /* Put the sequence after the NOTE that starts the function. - If this is inside a SEQUENCE, make the outer-level insn - chain current, so the code is placed at the start of the - function. */ - push_topmost_sequence (); - emit_insns_before (seq, NEXT_INSN (get_insns ())); - pop_topmost_sequence (); - return temp; - } - - /* __builtin_args_info (N) returns word N of the arg space info - for the current function. The number and meanings of words - is controlled by the definition of CUMULATIVE_ARGS. */ - case BUILT_IN_ARGS_INFO: - { - int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); - int *word_ptr = (int *) ¤t_function_args_info; -#if 0 - /* These are used by the code below that is if 0'ed away */ - int i; - tree type, elts, result; -#endif - - if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) - fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d", - __FILE__, __LINE__); - - if (arglist != 0) - { - tree arg = TREE_VALUE (arglist); - if (TREE_CODE (arg) != INTEGER_CST) - error ("argument of `__builtin_args_info' must be constant"); - else - { - int wordnum = TREE_INT_CST_LOW (arg); - - if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) - error ("argument of `__builtin_args_info' out of range"); - else - return GEN_INT (word_ptr[wordnum]); - } - } - else - error ("missing argument in `__builtin_args_info'"); - - return const0_rtx; - -#if 0 - for (i = 0; i < nwords; i++) - elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); - - type = build_array_type (integer_type_node, - build_index_type (build_int_2 (nwords, 0))); - result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); - TREE_CONSTANT (result) = 1; - TREE_STATIC (result) = 1; - result = build (INDIRECT_REF, build_pointer_type (type), result); - TREE_CONSTANT (result) = 1; - return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD); -#endif - } - - /* Return the address of the first anonymous stack arg. */ - case BUILT_IN_NEXT_ARG: - { - tree fntype = TREE_TYPE (current_function_decl); - - if ((TYPE_ARG_TYPES (fntype) == 0 - || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) - == void_type_node)) - && ! current_function_varargs) - { - error ("`va_start' used in function with fixed args"); - return const0_rtx; - } - - if (arglist) - { - tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl)); - tree arg = TREE_VALUE (arglist); - - /* Strip off all nops for the sake of the comparison. This - is not quite the same as STRIP_NOPS. It does more. - We must also strip off INDIRECT_EXPR for C++ reference - parameters. */ - while (TREE_CODE (arg) == NOP_EXPR - || TREE_CODE (arg) == CONVERT_EXPR - || TREE_CODE (arg) == NON_LVALUE_EXPR - || TREE_CODE (arg) == INDIRECT_REF) - arg = TREE_OPERAND (arg, 0); - if (arg != last_parm) - warning ("second parameter of `va_start' not last named argument"); - } - else if (! current_function_varargs) - /* Evidently an out of date version of <stdarg.h>; can't validate - va_start's second argument, but can still work as intended. */ - warning ("`__builtin_next_arg' called without an argument"); - } - - return expand_binop (Pmode, add_optab, - current_function_internal_arg_pointer, - current_function_arg_offset_rtx, - NULL_RTX, 0, OPTAB_LIB_WIDEN); - - case BUILT_IN_CLASSIFY_TYPE: - if (arglist != 0) - { - tree type = TREE_TYPE (TREE_VALUE (arglist)); - enum tree_code code = TREE_CODE (type); - if (code == VOID_TYPE) - return GEN_INT (void_type_class); - if (code == INTEGER_TYPE) - return GEN_INT (integer_type_class); - if (code == CHAR_TYPE) - return GEN_INT (char_type_class); - if (code == ENUMERAL_TYPE) - return GEN_INT (enumeral_type_class); - if (code == BOOLEAN_TYPE) - return GEN_INT (boolean_type_class); - if (code == POINTER_TYPE) - return GEN_INT (pointer_type_class); - if (code == REFERENCE_TYPE) - return GEN_INT (reference_type_class); - if (code == OFFSET_TYPE) - return GEN_INT (offset_type_class); - if (code == REAL_TYPE) - return GEN_INT (real_type_class); - if (code == COMPLEX_TYPE) - return GEN_INT (complex_type_class); - if (code == FUNCTION_TYPE) - return GEN_INT (function_type_class); - if (code == METHOD_TYPE) - return GEN_INT (method_type_class); - if (code == RECORD_TYPE) - return GEN_INT (record_type_class); - if (code == UNION_TYPE || code == QUAL_UNION_TYPE) - return GEN_INT (union_type_class); - if (code == ARRAY_TYPE) - { - if (TYPE_STRING_FLAG (type)) - return GEN_INT (string_type_class); - else - return GEN_INT (array_type_class); - } - if (code == SET_TYPE) - return GEN_INT (set_type_class); - if (code == FILE_TYPE) - return GEN_INT (file_type_class); - if (code == LANG_TYPE) - return GEN_INT (lang_type_class); - } - return GEN_INT (no_type_class); - - case BUILT_IN_CONSTANT_P: - if (arglist == 0) - return const0_rtx; - else - { - tree arg = TREE_VALUE (arglist); - rtx tmp; - - /* We return 1 for a numeric type that's known to be a constant - value at compile-time or for an aggregate type that's a - literal constant. */ - STRIP_NOPS (arg); - - /* If we know this is a constant, emit the constant of one. */ - if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c' - || (TREE_CODE (arg) == CONSTRUCTOR - && TREE_CONSTANT (arg)) - || (TREE_CODE (arg) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)) - return const1_rtx; - - /* If we aren't going to be running CSE or this expression - has side effects, show we don't know it to be a constant. - Likewise if it's a pointer or aggregate type since in those - case we only want literals, since those are only optimized - when generating RTL, not later. */ - if (TREE_SIDE_EFFECTS (arg) || cse_not_expected - || AGGREGATE_TYPE_P (TREE_TYPE (arg)) - || POINTER_TYPE_P (TREE_TYPE (arg))) - return const0_rtx; - - /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a - chance to see if it can deduce whether ARG is constant. */ - - tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0); - tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp); - return tmp; - } - - case BUILT_IN_FRAME_ADDRESS: - /* The argument must be a nonnegative integer constant. - It counts the number of frames to scan up the stack. - The value is the address of that frame. */ - case BUILT_IN_RETURN_ADDRESS: - /* The argument must be a nonnegative integer constant. - It counts the number of frames to scan up the stack. - The value is the return address saved in that frame. */ - if (arglist == 0) - /* Warning about missing arg was already issued. */ - return const0_rtx; - else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST - || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0) - { - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) - error ("invalid arg to `__builtin_frame_address'"); - else - error ("invalid arg to `__builtin_return_address'"); - return const0_rtx; - } - else - { - rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), - TREE_INT_CST_LOW (TREE_VALUE (arglist)), - hard_frame_pointer_rtx); - - /* Some ports cannot access arbitrary stack frames. */ - if (tem == NULL) - { - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) - warning ("unsupported arg to `__builtin_frame_address'"); - else - warning ("unsupported arg to `__builtin_return_address'"); - return const0_rtx; - } - - /* For __builtin_frame_address, return what we've got. */ - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) - return tem; - - if (GET_CODE (tem) != REG - && ! CONSTANT_P (tem)) - tem = copy_to_mode_reg (Pmode, tem); - return tem; - } - - /* Returns the address of the area where the structure is returned. - 0 otherwise. */ - case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: - if (arglist != 0 - || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) - || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM) - return const0_rtx; - else - return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); - - case BUILT_IN_ALLOCA: - if (arglist == 0 - /* Arg could be non-integer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) - break; - - /* Compute the argument. */ - op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); - - /* Allocate the desired space. */ - return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); - - case BUILT_IN_FFS: - /* If not optimizing, call the library function. */ - if (!optimize && ! CALLED_AS_BUILT_IN (fndecl)) - break; - - if (arglist == 0 - /* Arg could be non-integer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) - break; - - /* Compute the argument. */ - op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); - /* Compute ffs, into TARGET if possible. - Set TARGET to wherever the result comes back. */ - target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), - ffs_optab, op0, target, 1); - if (target == 0) - abort (); - return target; - - case BUILT_IN_STRLEN: - /* If not optimizing, call the library function. */ - if (!optimize && ! CALLED_AS_BUILT_IN (fndecl)) - break; - - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) - break; - else - { - tree src = TREE_VALUE (arglist); - tree len = c_strlen (src); - - int align - = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - - rtx result, src_rtx, char_rtx; - enum machine_mode insn_mode = value_mode, char_mode; - enum insn_code icode; - - /* If the length is known, just return it. */ - if (len != 0) - return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD); - - /* If SRC is not a pointer type, don't do this operation inline. */ - if (align == 0) - break; - - /* Call a function if we can't compute strlen in the right mode. */ - - while (insn_mode != VOIDmode) - { - icode = strlen_optab->handlers[(int) insn_mode].insn_code; - if (icode != CODE_FOR_nothing) - break; - - insn_mode = GET_MODE_WIDER_MODE (insn_mode); - } - if (insn_mode == VOIDmode) - break; - - /* Make a place to write the result of the instruction. */ - result = target; - if (! (result != 0 - && GET_CODE (result) == REG - && GET_MODE (result) == insn_mode - && REGNO (result) >= FIRST_PSEUDO_REGISTER)) - result = gen_reg_rtx (insn_mode); - - /* Make sure the operands are acceptable to the predicates. */ - - if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode)) - result = gen_reg_rtx (insn_mode); - src_rtx = memory_address (BLKmode, - expand_expr (src, NULL_RTX, ptr_mode, - EXPAND_NORMAL)); - - if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode)) - src_rtx = copy_to_mode_reg (Pmode, src_rtx); - - /* Check the string is readable and has an end. */ - if (current_function_check_memory_usage) - emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2, - src_rtx, Pmode, - GEN_INT (MEMORY_USE_RO), - TYPE_MODE (integer_type_node)); - - char_rtx = const0_rtx; - char_mode = insn_operand_mode[(int)icode][2]; - if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode)) - char_rtx = copy_to_mode_reg (char_mode, char_rtx); - - emit_insn (GEN_FCN (icode) (result, - gen_rtx_MEM (BLKmode, src_rtx), - char_rtx, GEN_INT (align))); - - /* Return the value in the proper mode for this function. */ - if (GET_MODE (result) == value_mode) - return result; - else if (target != 0) - { - convert_move (target, result, 0); - return target; - } - else - return convert_to_mode (value_mode, result, 0); - } - - case BUILT_IN_STRCPY: - /* If not optimizing, call the library function. */ - if (!optimize && ! CALLED_AS_BUILT_IN (fndecl)) - break; - - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) - break; - else - { - tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); - - if (len == 0) - break; - - len = size_binop (PLUS_EXPR, len, integer_one_node); - - chainon (arglist, build_tree_list (NULL_TREE, len)); - } - - /* Drops in. */ - case BUILT_IN_MEMCPY: - /* If not optimizing, call the library function. */ - if (!optimize && ! CALLED_AS_BUILT_IN (fndecl)) - break; - - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) - != POINTER_TYPE) - || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 - || (TREE_CODE (TREE_TYPE (TREE_VALUE - (TREE_CHAIN (TREE_CHAIN (arglist))))) - != INTEGER_TYPE)) - break; - else - { - tree dest = TREE_VALUE (arglist); - tree src = TREE_VALUE (TREE_CHAIN (arglist)); - tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); - - int src_align - = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - int dest_align - = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - rtx dest_mem, src_mem, dest_addr, len_rtx; - - /* If either SRC or DEST is not a pointer type, don't do - this operation in-line. */ - if (src_align == 0 || dest_align == 0) - { - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY) - TREE_CHAIN (TREE_CHAIN (arglist)) = 0; - break; - } - - dest_mem = get_memory_rtx (dest); - src_mem = get_memory_rtx (src); - len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); - - /* Just copy the rights of SRC to the rights of DEST. */ - if (current_function_check_memory_usage) - emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, - XEXP (dest_mem, 0), Pmode, - XEXP (src_mem, 0), Pmode, - len_rtx, TYPE_MODE (sizetype)); - - /* Copy word part most expediently. */ - dest_addr - = emit_block_move (dest_mem, src_mem, len_rtx, - MIN (src_align, dest_align)); - - if (dest_addr == 0) - dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); - - return dest_addr; - } - - case BUILT_IN_MEMSET: - /* If not optimizing, call the library function. */ - if (!optimize && ! CALLED_AS_BUILT_IN (fndecl)) - break; - - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) - != INTEGER_TYPE) - || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 - || (INTEGER_TYPE - != (TREE_CODE (TREE_TYPE - (TREE_VALUE - (TREE_CHAIN (TREE_CHAIN (arglist)))))))) - break; - else - { - tree dest = TREE_VALUE (arglist); - tree val = TREE_VALUE (TREE_CHAIN (arglist)); - tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); - - int dest_align - = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - rtx dest_mem, dest_addr, len_rtx; - - /* If DEST is not a pointer type, don't do this - operation in-line. */ - if (dest_align == 0) - break; - - /* If the arguments have side-effects, then we can only evaluate - them at most once. The following code evaluates them twice if - they are not constants because we break out to expand_call - in that case. They can't be constants if they have side-effects - so we can check for that first. Alternatively, we could call - save_expr to make multiple evaluation safe. */ - if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len)) - break; - - /* If VAL is not 0, don't do this operation in-line. */ - if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx) - break; - - /* If LEN does not expand to a constant, don't do this - operation in-line. */ - len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); - if (GET_CODE (len_rtx) != CONST_INT) - break; - - dest_mem = get_memory_rtx (dest); - - /* Just check DST is writable and mark it as readable. */ - if (current_function_check_memory_usage) - emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, - XEXP (dest_mem, 0), Pmode, - len_rtx, TYPE_MODE (sizetype), - GEN_INT (MEMORY_USE_WO), - TYPE_MODE (integer_type_node)); - - - dest_addr = clear_storage (dest_mem, len_rtx, dest_align); - - if (dest_addr == 0) - dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); - - return dest_addr; - } - -/* These comparison functions need an instruction that returns an actual - index. An ordinary compare that just sets the condition codes - is not enough. */ -#ifdef HAVE_cmpstrsi - case BUILT_IN_STRCMP: - /* If not optimizing, call the library function. */ - if (!optimize && ! CALLED_AS_BUILT_IN (fndecl)) - break; - - /* If we need to check memory accesses, call the library function. */ - if (current_function_check_memory_usage) - break; - - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) - break; - else if (!HAVE_cmpstrsi) - break; - { - tree arg1 = TREE_VALUE (arglist); - tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); - tree len, len2; - - len = c_strlen (arg1); - if (len) - len = size_binop (PLUS_EXPR, integer_one_node, len); - len2 = c_strlen (arg2); - if (len2) - len2 = size_binop (PLUS_EXPR, integer_one_node, len2); - - /* If we don't have a constant length for the first, use the length - of the second, if we know it. We don't require a constant for - this case; some cost analysis could be done if both are available - but neither is constant. For now, assume they're equally cheap. - - If both strings have constant lengths, use the smaller. This - could arise if optimization results in strcpy being called with - two fixed strings, or if the code was machine-generated. We should - add some code to the `memcmp' handler below to deal with such - situations, someday. */ - if (!len || TREE_CODE (len) != INTEGER_CST) - { - if (len2) - len = len2; - else if (len == 0) - break; - } - else if (len2 && TREE_CODE (len2) == INTEGER_CST) - { - if (tree_int_cst_lt (len2, len)) - len = len2; - } - - chainon (arglist, build_tree_list (NULL_TREE, len)); - } - - /* Drops in. */ - case BUILT_IN_MEMCMP: - /* If not optimizing, call the library function. */ - if (!optimize && ! CALLED_AS_BUILT_IN (fndecl)) - break; - - /* If we need to check memory accesses, call the library function. */ - if (current_function_check_memory_usage) - break; - - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE - || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) - break; - else if (!HAVE_cmpstrsi) - break; - { - tree arg1 = TREE_VALUE (arglist); - tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); - tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); - rtx result; - - int arg1_align - = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - int arg2_align - = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - enum machine_mode insn_mode - = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0]; - - /* If we don't have POINTER_TYPE, call the function. */ - if (arg1_align == 0 || arg2_align == 0) - { - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP) - TREE_CHAIN (TREE_CHAIN (arglist)) = 0; - break; - } - - /* Make a place to write the result of the instruction. */ - result = target; - if (! (result != 0 - && GET_CODE (result) == REG && GET_MODE (result) == insn_mode - && REGNO (result) >= FIRST_PSEUDO_REGISTER)) - result = gen_reg_rtx (insn_mode); - - emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1), - get_memory_rtx (arg2), - expand_expr (len, NULL_RTX, VOIDmode, 0), - GEN_INT (MIN (arg1_align, arg2_align)))); - - /* Return the value in the proper mode for this function. */ - mode = TYPE_MODE (TREE_TYPE (exp)); - if (GET_MODE (result) == mode) - return result; - else if (target != 0) - { - convert_move (target, result, 0); - return target; - } - else - return convert_to_mode (mode, result, 0); - } -#else - case BUILT_IN_STRCMP: - case BUILT_IN_MEMCMP: - break; -#endif - - case BUILT_IN_SETJMP: - target = expand_builtin_setjmp (arglist, target); - if (target) - return target; - break; - - /* __builtin_longjmp is passed a pointer to an array of five words. - It's similar to the C library longjmp function but works with - __builtin_setjmp above. */ - case BUILT_IN_LONGJMP: - if (arglist == 0 || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) - break; - else - { - rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, - VOIDmode, 0); - rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), - NULL_RTX, VOIDmode, 0); - - if (value != const1_rtx) - { - error ("__builtin_longjmp second argument must be 1"); - return const0_rtx; - } - - expand_builtin_longjmp (buf_addr, value); - return const0_rtx; - } - - case BUILT_IN_TRAP: -#ifdef HAVE_trap - if (HAVE_trap) - emit_insn (gen_trap ()); - else -#endif - error ("__builtin_trap not supported by this target"); - emit_barrier (); - return const0_rtx; - - /* Various hooks for the DWARF 2 __throw routine. */ - case BUILT_IN_UNWIND_INIT: - expand_builtin_unwind_init (); - return const0_rtx; - case BUILT_IN_DWARF_CFA: - return virtual_cfa_rtx; -#ifdef DWARF2_UNWIND_INFO - case BUILT_IN_DWARF_FP_REGNUM: - return expand_builtin_dwarf_fp_regnum (); - case BUILT_IN_DWARF_REG_SIZE: - return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target); -#endif - case BUILT_IN_FROB_RETURN_ADDR: - return expand_builtin_frob_return_addr (TREE_VALUE (arglist)); - case BUILT_IN_EXTRACT_RETURN_ADDR: - return expand_builtin_extract_return_addr (TREE_VALUE (arglist)); - case BUILT_IN_EH_RETURN: - expand_builtin_eh_return (TREE_VALUE (arglist), - TREE_VALUE (TREE_CHAIN (arglist)), - TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)))); - return const0_rtx; - - default: /* just do library call, if unknown builtin */ - error ("built-in function `%s' not currently supported", - IDENTIFIER_POINTER (DECL_NAME (fndecl))); - } - - /* The switch statement above can drop through to cause the function - to be called normally. */ - - return expand_call (exp, target, ignore); -} - -/* Built-in functions to perform an untyped call and return. */ - -/* For each register that may be used for calling a function, this - gives a mode used to copy the register's value. VOIDmode indicates - the register is not used for calling a function. If the machine - has register windows, this gives only the outbound registers. - INCOMING_REGNO gives the corresponding inbound register. */ -static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; - -/* For each register that may be used for returning values, this gives - a mode used to copy the register's value. VOIDmode indicates the - register is not used for returning values. If the machine has - register windows, this gives only the outbound registers. - INCOMING_REGNO gives the corresponding inbound register. */ -static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; - -/* For each register that may be used for calling a function, this - gives the offset of that register into the block returned by - __builtin_apply_args. 0 indicates that the register is not - used for calling a function. */ -static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; - -/* Return the offset of register REGNO into the block returned by - __builtin_apply_args. This is not declared static, since it is - needed in objc-act.c. */ - -int -apply_args_register_offset (regno) - int regno; -{ - apply_args_size (); - - /* Arguments are always put in outgoing registers (in the argument - block) if such make sense. */ -#ifdef OUTGOING_REGNO - regno = OUTGOING_REGNO(regno); -#endif - return apply_args_reg_offset[regno]; -} - -/* Return the size required for the block returned by __builtin_apply_args, - and initialize apply_args_mode. */ - -static int -apply_args_size () -{ - static int size = -1; - int align, regno; - enum machine_mode mode; - - /* The values computed by this function never change. */ - if (size < 0) - { - /* The first value is the incoming arg-pointer. */ - size = GET_MODE_SIZE (Pmode); - - /* The second value is the structure value address unless this is - passed as an "invisible" first argument. */ - if (struct_value_rtx) - size += GET_MODE_SIZE (Pmode); - - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if (FUNCTION_ARG_REGNO_P (regno)) - { - /* Search for the proper mode for copying this register's - value. I'm not sure this is right, but it works so far. */ - enum machine_mode best_mode = VOIDmode; - - for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); - mode != VOIDmode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode) - && HARD_REGNO_NREGS (regno, mode) == 1) - best_mode = mode; - - if (best_mode == VOIDmode) - for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); - mode != VOIDmode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode) - && (mov_optab->handlers[(int) mode].insn_code - != CODE_FOR_nothing)) - best_mode = mode; - - mode = best_mode; - if (mode == VOIDmode) - abort (); - - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - apply_args_reg_offset[regno] = size; - size += GET_MODE_SIZE (mode); - apply_args_mode[regno] = mode; - } - else - { - apply_args_mode[regno] = VOIDmode; - apply_args_reg_offset[regno] = 0; - } - } - return size; -} - -/* Return the size required for the block returned by __builtin_apply, - and initialize apply_result_mode. */ - -static int -apply_result_size () -{ - static int size = -1; - int align, regno; - enum machine_mode mode; - - /* The values computed by this function never change. */ - if (size < 0) - { - size = 0; - - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if (FUNCTION_VALUE_REGNO_P (regno)) - { - /* Search for the proper mode for copying this register's - value. I'm not sure this is right, but it works so far. */ - enum machine_mode best_mode = VOIDmode; - - for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); - mode != TImode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode)) - best_mode = mode; - - if (best_mode == VOIDmode) - for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); - mode != VOIDmode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode) - && (mov_optab->handlers[(int) mode].insn_code - != CODE_FOR_nothing)) - best_mode = mode; - - mode = best_mode; - if (mode == VOIDmode) - abort (); - - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - size += GET_MODE_SIZE (mode); - apply_result_mode[regno] = mode; - } - else - apply_result_mode[regno] = VOIDmode; - - /* Allow targets that use untyped_call and untyped_return to override - the size so that machine-specific information can be stored here. */ -#ifdef APPLY_RESULT_SIZE - size = APPLY_RESULT_SIZE; -#endif - } - return size; -} - -#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) -/* Create a vector describing the result block RESULT. If SAVEP is true, - the result block is used to save the values; otherwise it is used to - restore the values. */ - -static rtx -result_vector (savep, result) - int savep; - rtx result; -{ - int regno, size, align, nelts; - enum machine_mode mode; - rtx reg, mem; - rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); - - size = nelts = 0; - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_result_mode[regno]) != VOIDmode) - { - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); - mem = change_address (result, mode, - plus_constant (XEXP (result, 0), size)); - savevec[nelts++] = (savep - ? gen_rtx_SET (VOIDmode, mem, reg) - : gen_rtx_SET (VOIDmode, reg, mem)); - size += GET_MODE_SIZE (mode); - } - return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); -} -#endif /* HAVE_untyped_call or HAVE_untyped_return */ - -/* Save the state required to perform an untyped call with the same - arguments as were passed to the current function. */ - -static rtx -expand_builtin_apply_args () -{ - rtx registers; - int size, align, regno; - enum machine_mode mode; - - /* Create a block where the arg-pointer, structure value address, - and argument registers can be saved. */ - registers = assign_stack_local (BLKmode, apply_args_size (), -1); - - /* Walk past the arg-pointer and structure value address. */ - size = GET_MODE_SIZE (Pmode); - if (struct_value_rtx) - size += GET_MODE_SIZE (Pmode); - - /* Save each register used in calling a function to the block. */ - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_args_mode[regno]) != VOIDmode) - { - rtx tem; - - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - - tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); - -#ifdef STACK_REGS - /* For reg-stack.c's stack register household. - Compare with a similar piece of code in function.c. */ - - emit_insn (gen_rtx_USE (mode, tem)); -#endif - - emit_move_insn (change_address (registers, mode, - plus_constant (XEXP (registers, 0), - size)), - tem); - size += GET_MODE_SIZE (mode); - } - - /* Save the arg pointer to the block. */ - emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), - copy_to_reg (virtual_incoming_args_rtx)); - size = GET_MODE_SIZE (Pmode); - - /* Save the structure value address unless this is passed as an - "invisible" first argument. */ - if (struct_value_incoming_rtx) - { - emit_move_insn (change_address (registers, Pmode, - plus_constant (XEXP (registers, 0), - size)), - copy_to_reg (struct_value_incoming_rtx)); - size += GET_MODE_SIZE (Pmode); - } - - /* Return the address of the block. */ - return copy_addr_to_reg (XEXP (registers, 0)); -} - -/* Perform an untyped call and save the state required to perform an - untyped return of whatever value was returned by the given function. */ - -static rtx -expand_builtin_apply (function, arguments, argsize) - rtx function, arguments, argsize; -{ - int size, align, regno; - enum machine_mode mode; - rtx incoming_args, result, reg, dest, call_insn; - rtx old_stack_level = 0; - rtx call_fusage = 0; - - /* Create a block where the return registers can be saved. */ - result = assign_stack_local (BLKmode, apply_result_size (), -1); - - /* ??? The argsize value should be adjusted here. */ - - /* Fetch the arg pointer from the ARGUMENTS block. */ - incoming_args = gen_reg_rtx (Pmode); - emit_move_insn (incoming_args, - gen_rtx_MEM (Pmode, arguments)); -#ifndef STACK_GROWS_DOWNWARD - incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, - incoming_args, 0, OPTAB_LIB_WIDEN); -#endif - - /* Perform postincrements before actually calling the function. */ - emit_queue (); - - /* Push a new argument block and copy the arguments. */ - do_pending_stack_adjust (); - - /* Save the stack with nonlocal if available */ -#ifdef HAVE_save_stack_nonlocal - if (HAVE_save_stack_nonlocal) - emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX); - else -#endif - emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); - - /* Push a block of memory onto the stack to store the memory arguments. - Save the address in a register, and copy the memory arguments. ??? I - haven't figured out how the calling convention macros effect this, - but it's likely that the source and/or destination addresses in - the block copy will need updating in machine specific ways. */ - dest = allocate_dynamic_stack_space (argsize, 0, 0); - emit_block_move (gen_rtx_MEM (BLKmode, dest), - gen_rtx_MEM (BLKmode, incoming_args), - argsize, - PARM_BOUNDARY / BITS_PER_UNIT); - - /* Refer to the argument block. */ - apply_args_size (); - arguments = gen_rtx_MEM (BLKmode, arguments); - - /* Walk past the arg-pointer and structure value address. */ - size = GET_MODE_SIZE (Pmode); - if (struct_value_rtx) - size += GET_MODE_SIZE (Pmode); - - /* Restore each of the registers previously saved. Make USE insns - for each of these registers for use in making the call. */ - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_args_mode[regno]) != VOIDmode) - { - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - reg = gen_rtx_REG (mode, regno); - emit_move_insn (reg, - change_address (arguments, mode, - plus_constant (XEXP (arguments, 0), - size))); - - use_reg (&call_fusage, reg); - size += GET_MODE_SIZE (mode); - } - - /* Restore the structure value address unless this is passed as an - "invisible" first argument. */ - size = GET_MODE_SIZE (Pmode); - if (struct_value_rtx) - { - rtx value = gen_reg_rtx (Pmode); - emit_move_insn (value, - change_address (arguments, Pmode, - plus_constant (XEXP (arguments, 0), - size))); - emit_move_insn (struct_value_rtx, value); - if (GET_CODE (struct_value_rtx) == REG) - use_reg (&call_fusage, struct_value_rtx); - size += GET_MODE_SIZE (Pmode); - } - - /* All arguments and registers used for the call are set up by now! */ - function = prepare_call_address (function, NULL_TREE, &call_fusage, 0); - - /* Ensure address is valid. SYMBOL_REF is already valid, so no need, - and we don't want to load it into a register as an optimization, - because prepare_call_address already did it if it should be done. */ - if (GET_CODE (function) != SYMBOL_REF) - function = memory_address (FUNCTION_MODE, function); - - /* Generate the actual call instruction and save the return value. */ -#ifdef HAVE_untyped_call - if (HAVE_untyped_call) - emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), - result, result_vector (1, result))); - else -#endif -#ifdef HAVE_call_value - if (HAVE_call_value) - { - rtx valreg = 0; - - /* Locate the unique return register. It is not possible to - express a call that sets more than one return register using - call_value; use untyped_call for that. In fact, untyped_call - only needs to save the return registers in the given block. */ - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_result_mode[regno]) != VOIDmode) - { - if (valreg) - abort (); /* HAVE_untyped_call required. */ - valreg = gen_rtx_REG (mode, regno); - } - - emit_call_insn (gen_call_value (valreg, - gen_rtx_MEM (FUNCTION_MODE, function), - const0_rtx, NULL_RTX, const0_rtx)); - - emit_move_insn (change_address (result, GET_MODE (valreg), - XEXP (result, 0)), - valreg); - } - else -#endif - abort (); - - /* Find the CALL insn we just emitted. */ - for (call_insn = get_last_insn (); - call_insn && GET_CODE (call_insn) != CALL_INSN; - call_insn = PREV_INSN (call_insn)) - ; - - if (! call_insn) - abort (); - - /* Put the register usage information on the CALL. If there is already - some usage information, put ours at the end. */ - if (CALL_INSN_FUNCTION_USAGE (call_insn)) - { - rtx link; - - for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; - link = XEXP (link, 1)) - ; - - XEXP (link, 1) = call_fusage; - } - else - CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; - - /* Restore the stack. */ -#ifdef HAVE_save_stack_nonlocal - if (HAVE_save_stack_nonlocal) - emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX); - else -#endif - emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); - - /* Return the address of the result block. */ - return copy_addr_to_reg (XEXP (result, 0)); -} - -/* Perform an untyped return. */ - -static void -expand_builtin_return (result) - rtx result; -{ - int size, align, regno; - enum machine_mode mode; - rtx reg; - rtx call_fusage = 0; - - apply_result_size (); - result = gen_rtx_MEM (BLKmode, result); - -#ifdef HAVE_untyped_return - if (HAVE_untyped_return) - { - emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); - emit_barrier (); - return; - } -#endif - - /* Restore the return value and note that each value is used. */ - size = 0; - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_result_mode[regno]) != VOIDmode) - { - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); - emit_move_insn (reg, - change_address (result, mode, - plus_constant (XEXP (result, 0), - size))); - - push_to_sequence (call_fusage); - emit_insn (gen_rtx_USE (VOIDmode, reg)); - call_fusage = get_insns (); - end_sequence (); - size += GET_MODE_SIZE (mode); - } - - /* Put the USE insns before the return. */ - emit_insns (call_fusage); - - /* Return whatever values was restored by jumping directly to the end - of the function. */ - expand_null_return (); -} /* Expand code for a post- or pre- increment or decrement and return the RTX for the result. @@ -10315,12 +8807,12 @@ expand_builtin_return (result) static rtx expand_increment (exp, post, ignore) - register tree exp; + tree exp; int post, ignore; { - register rtx op0, op1; - register rtx temp, value; - register tree incremented = TREE_OPERAND (exp, 0); + rtx op0, op1; + rtx temp, value; + tree incremented = TREE_OPERAND (exp, 0); optab this_optab = add_optab; int icode; enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); @@ -10349,10 +8841,10 @@ expand_increment (exp, post, ignore) /* Compute the operands as RTX. Note whether OP0 is the actual lvalue or a copy of it: I believe it is a copy iff it is a register or subreg - and insns were generated in computing it. */ + and insns were generated in computing it. */ temp = get_last_insn (); - op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW); + op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0); /* If OP0 is a SUBREG made for a promoted variable, we cannot increment in place but instead must do sign- or zero-extension during assignment, @@ -10383,8 +8875,7 @@ expand_increment (exp, post, ignore) op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG) && temp != get_last_insn ()); - op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, - EXPAND_MEMORY_USE_BAD); + op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); /* Decide whether incrementing or decrementing. */ if (TREE_CODE (exp) == POSTDECREMENT_EXPR @@ -10395,10 +8886,13 @@ expand_increment (exp, post, ignore) if (this_optab == sub_optab && GET_CODE (op1) == CONST_INT) { - op1 = GEN_INT (- INTVAL (op1)); + op1 = GEN_INT (-INTVAL (op1)); this_optab = add_optab; } + if (TYPE_TRAP_SIGNED (TREE_TYPE (exp))) + this_optab = this_optab == add_optab ? addv_optab : subv_optab; + /* For a preincrement, see if we can do this with a single instruction. */ if (!post) { @@ -10406,9 +8900,9 @@ expand_increment (exp, post, ignore) if (icode != (int) CODE_FOR_nothing /* Make sure that OP0 is valid for operands 0 and 1 of the insn we want to queue. */ - && (*insn_operand_predicate[icode][0]) (op0, mode) - && (*insn_operand_predicate[icode][1]) (op0, mode) - && (*insn_operand_predicate[icode][2]) (op1, mode)) + && (*insn_data[icode].operand[0].predicate) (op0, mode) + && (*insn_data[icode].operand[1].predicate) (op0, mode) + && (*insn_data[icode].operand[2].predicate) (op1, mode)) single_insn = 1; } @@ -10462,10 +8956,10 @@ expand_increment (exp, post, ignore) if (icode != (int) CODE_FOR_nothing /* Make sure that OP0 is valid for operands 0 and 1 of the insn we want to queue. */ - && (*insn_operand_predicate[icode][0]) (op0, mode) - && (*insn_operand_predicate[icode][1]) (op0, mode)) + && (*insn_data[icode].operand[0].predicate) (op0, mode) + && (*insn_data[icode].operand[1].predicate) (op0, mode)) { - if (! (*insn_operand_predicate[icode][2]) (op1, mode)) + if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) op1 = force_reg (mode, op1); return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1)); @@ -10477,9 +8971,9 @@ expand_increment (exp, post, ignore) : copy_to_reg (XEXP (op0, 0))); rtx temp, result; - op0 = change_address (op0, VOIDmode, addr); + op0 = replace_equiv_address (op0, addr); temp = force_reg (GET_MODE (op0), op0); - if (! (*insn_operand_predicate[icode][2]) (op1, mode)) + if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) op1 = force_reg (mode, op1); /* The increment queue is LIFO, thus we have to `queue' @@ -10502,9 +8996,9 @@ expand_increment (exp, post, ignore) temp = copy_rtx (value = op0); /* Increment however we can. */ - op1 = expand_binop (mode, this_optab, value, op1, - current_function_check_memory_usage ? NULL_RTX : op0, + op1 = expand_binop (mode, this_optab, value, op1, op0, TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN); + /* Make sure the value is stored into OP0. */ if (op1 != op0) emit_move_insn (op0, op1); @@ -10512,81 +9006,6 @@ expand_increment (exp, post, ignore) return temp; } -/* Expand all function calls contained within EXP, innermost ones first. - But don't look within expressions that have sequence points. - For each CALL_EXPR, record the rtx for its value - in the CALL_EXPR_RTL field. */ - -static void -preexpand_calls (exp) - tree exp; -{ - register int nops, i; - int type = TREE_CODE_CLASS (TREE_CODE (exp)); - - if (! do_preexpand_calls) - return; - - /* Only expressions and references can contain calls. */ - - if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r') - return; - - switch (TREE_CODE (exp)) - { - case CALL_EXPR: - /* Do nothing if already expanded. */ - if (CALL_EXPR_RTL (exp) != 0 - /* Do nothing if the call returns a variable-sized object. */ - || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST - /* Do nothing to built-in functions. */ - || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR - && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) - == FUNCTION_DECL) - && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))) - return; - - CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0); - return; - - case COMPOUND_EXPR: - case COND_EXPR: - case TRUTH_ANDIF_EXPR: - case TRUTH_ORIF_EXPR: - /* If we find one of these, then we can be sure - the adjust will be done for it (since it makes jumps). - Do it now, so that if this is inside an argument - of a function, we don't get the stack adjustment - after some other args have already been pushed. */ - do_pending_stack_adjust (); - return; - - case BLOCK: - case RTL_EXPR: - case WITH_CLEANUP_EXPR: - case CLEANUP_POINT_EXPR: - case TRY_CATCH_EXPR: - return; - - case SAVE_EXPR: - if (SAVE_EXPR_RTL (exp) != 0) - return; - - default: - break; - } - - nops = tree_code_length[(int) TREE_CODE (exp)]; - for (i = 0; i < nops; i++) - if (TREE_OPERAND (exp, i) != 0) - { - type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i))); - if (type == 'e' || type == '<' || type == '1' || type == '2' - || type == 'r') - preexpand_calls (TREE_OPERAND (exp, i)); - } -} - /* At the start of a function, record that we have no previously-pushed arguments waiting to be popped. */ @@ -10611,7 +9030,10 @@ clear_pending_stack_adjust () && EXIT_IGNORE_STACK && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline) && ! flag_inline_functions) - pending_stack_adjust = 0; + { + stack_pointer_delta -= pending_stack_adjust, + pending_stack_adjust = 0; + } #endif } @@ -10669,13 +9091,12 @@ do_jump (exp, if_false_label, if_true_label) tree exp; rtx if_false_label, if_true_label; { - register enum tree_code code = TREE_CODE (exp); + enum tree_code code = TREE_CODE (exp); /* Some cases need to create a label to jump to in order to properly fall through. These cases set DROP_THROUGH_LABEL nonzero. */ rtx drop_through_label = 0; rtx temp; - rtx comparison = 0; int i; tree type; enum machine_mode mode; @@ -10709,7 +9130,8 @@ do_jump (exp, if_false_label, if_true_label) case NOP_EXPR: if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF - || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF) + || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF + || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) goto normal; case CONVERT_EXPR: /* If we are narrowing the operand, we have to do the compare in the @@ -10727,6 +9149,15 @@ do_jump (exp, if_false_label, if_true_label) do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); break; + case WITH_RECORD_EXPR: + /* Put the object on the placeholder list, recurse through our first + operand, and pop the list. */ + placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, + placeholder_list); + do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); + placeholder_list = TREE_CHAIN (placeholder_list); + break; + #if 0 /* This is never less insns than evaluating the PLUS_EXPR followed by a test and can be longer if the test is eliminated. */ @@ -10741,10 +9172,10 @@ do_jump (exp, if_false_label, if_true_label) case MINUS_EXPR: /* Non-zero iff operands of minus differ. */ - comparison = compare (build (NE_EXPR, TREE_TYPE (exp), - TREE_OPERAND (exp, 0), - TREE_OPERAND (exp, 1)), - NE, NE); + do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp), + TREE_OPERAND (exp, 0), + TREE_OPERAND (exp, 1)), + NE, NE, if_false_label, if_true_label); break; case BIT_AND_EXPR: @@ -10758,7 +9189,7 @@ do_jump (exp, if_false_label, if_true_label) if (! SLOW_BYTE_ACCESS && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT - && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0 + && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode && (type = type_for_mode (mode, 1)) != 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) @@ -10806,19 +9237,19 @@ do_jump (exp, if_false_label, if_true_label) case COMPONENT_REF: case BIT_FIELD_REF: case ARRAY_REF: + case ARRAY_RANGE_REF: { - int bitsize, bitpos, unsignedp; + HOST_WIDE_INT bitsize, bitpos; + int unsignedp; enum machine_mode mode; tree type; tree offset; int volatilep = 0; - int alignment; /* Get description of this reference. We don't actually care about the underlying object here. */ - get_inner_reference (exp, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep, - &alignment); + get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, + &unsignedp, &volatilep); type = type_for_size (bitsize, unsignedp); if (! SLOW_BYTE_ACCESS @@ -10845,7 +9276,7 @@ do_jump (exp, if_false_label, if_true_label) else { - register rtx label1 = gen_label_rtx (); + rtx label1 = gen_label_rtx (); drop_through_label = gen_label_rtx (); do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); @@ -10900,10 +9331,10 @@ do_jump (exp, if_false_label, if_true_label) do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT - && !can_compare_p (TYPE_MODE (inner_type))) + && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) do_jump_by_parts_equality (exp, if_false_label, if_true_label); else - comparison = compare (exp, EQ, EQ); + do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); break; } @@ -10940,49 +9371,155 @@ do_jump (exp, if_false_label, if_true_label) do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT - && !can_compare_p (TYPE_MODE (inner_type))) + && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) do_jump_by_parts_equality (exp, if_true_label, if_false_label); else - comparison = compare (exp, NE, NE); + do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); break; } case LT_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + if (GET_MODE_CLASS (mode) == MODE_INT + && ! can_compare_p (LT, mode, ccp_jump)) do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); else - comparison = compare (exp, LT, LTU); + do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); break; case LE_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + if (GET_MODE_CLASS (mode) == MODE_INT + && ! can_compare_p (LE, mode, ccp_jump)) do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); else - comparison = compare (exp, LE, LEU); + do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); break; case GT_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + if (GET_MODE_CLASS (mode) == MODE_INT + && ! can_compare_p (GT, mode, ccp_jump)) do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); else - comparison = compare (exp, GT, GTU); + do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); break; case GE_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + if (GET_MODE_CLASS (mode) == MODE_INT + && ! can_compare_p (GE, mode, ccp_jump)) do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); else - comparison = compare (exp, GE, GEU); + do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); + break; + + case UNORDERED_EXPR: + case ORDERED_EXPR: + { + enum rtx_code cmp, rcmp; + int do_rev; + + if (code == UNORDERED_EXPR) + cmp = UNORDERED, rcmp = ORDERED; + else + cmp = ORDERED, rcmp = UNORDERED; + mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + + do_rev = 0; + if (! can_compare_p (cmp, mode, ccp_jump) + && (can_compare_p (rcmp, mode, ccp_jump) + /* If the target doesn't provide either UNORDERED or ORDERED + comparisons, canonicalize on UNORDERED for the library. */ + || rcmp == UNORDERED)) + do_rev = 1; + + if (! do_rev) + do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); + else + do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); + } break; + { + enum rtx_code rcode1; + enum tree_code tcode2; + + case UNLT_EXPR: + rcode1 = UNLT; + tcode2 = LT_EXPR; + goto unordered_bcc; + case UNLE_EXPR: + rcode1 = UNLE; + tcode2 = LE_EXPR; + goto unordered_bcc; + case UNGT_EXPR: + rcode1 = UNGT; + tcode2 = GT_EXPR; + goto unordered_bcc; + case UNGE_EXPR: + rcode1 = UNGE; + tcode2 = GE_EXPR; + goto unordered_bcc; + case UNEQ_EXPR: + rcode1 = UNEQ; + tcode2 = EQ_EXPR; + goto unordered_bcc; + + unordered_bcc: + mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + if (can_compare_p (rcode1, mode, ccp_jump)) + do_compare_and_jump (exp, rcode1, rcode1, if_false_label, + if_true_label); + else + { + tree op0 = save_expr (TREE_OPERAND (exp, 0)); + tree op1 = save_expr (TREE_OPERAND (exp, 1)); + tree cmp0, cmp1; + + /* If the target doesn't support combined unordered + compares, decompose into UNORDERED + comparison. */ + cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1)); + cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1)); + exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1); + do_jump (exp, if_false_label, if_true_label); + } + } + break; + + /* Special case: + __builtin_expect (<test>, 0) and + __builtin_expect (<test>, 1) + + We need to do this here, so that <test> is not converted to a SCC + operation on machines that use condition code registers and COMPARE + like the PowerPC, and then the jump is done based on whether the SCC + operation produced a 1 or 0. */ + case CALL_EXPR: + /* Check for a built-in function. */ + if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) + { + tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); + tree arglist = TREE_OPERAND (exp, 1); + + if (TREE_CODE (fndecl) == FUNCTION_DECL + && DECL_BUILT_IN (fndecl) + && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT + && arglist != NULL_TREE + && TREE_CHAIN (arglist) != NULL_TREE) + { + rtx seq = expand_builtin_expect_jump (exp, if_false_label, + if_true_label); + + if (seq != NULL_RTX) + { + emit_insn (seq); + return; + } + } + } + /* fall through and generate the normal code. */ + default: normal: temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); @@ -10996,42 +9533,30 @@ do_jump (exp, if_false_label, if_true_label) temp = copy_to_reg (temp); #endif do_pending_stack_adjust (); - if (GET_CODE (temp) == CONST_INT) - comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx); - else if (GET_CODE (temp) == LABEL_REF) - comparison = const_true_rtx; + /* Do any postincrements in the expression that was tested. */ + emit_queue (); + + if (GET_CODE (temp) == CONST_INT + || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode) + || GET_CODE (temp) == LABEL_REF) + { + rtx target = temp == const0_rtx ? if_false_label : if_true_label; + if (target) + emit_jump (target); + } else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT - && !can_compare_p (GET_MODE (temp))) + && ! can_compare_p (NE, GET_MODE (temp), ccp_jump)) /* Note swapping the labels gives us not-equal. */ do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); else if (GET_MODE (temp) != VOIDmode) - comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)), - NE, TREE_UNSIGNED (TREE_TYPE (exp)), - GET_MODE (temp), NULL_RTX, 0); + do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), + NE, TREE_UNSIGNED (TREE_TYPE (exp)), + GET_MODE (temp), NULL_RTX, + if_false_label, if_true_label); else abort (); } - /* Do any postincrements in the expression that was tested. */ - emit_queue (); - - /* If COMPARISON is nonzero here, it is an rtx that can be substituted - straight into a conditional jump instruction as the jump condition. - Otherwise, all the work has been done already. */ - - if (comparison == const_true_rtx) - { - if (if_true_label) - emit_jump (if_true_label); - } - else if (comparison == const0_rtx) - { - if (if_false_label) - emit_jump (if_false_label); - } - else if (comparison) - do_jump_for_compare (comparison, if_false_label, if_true_label); - if (drop_through_label) { /* If do_jump produces code that might be jumped around, @@ -11056,57 +9581,9 @@ do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label) rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); - int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); - rtx drop_through_label = 0; int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); - int i; - - if (! if_true_label || ! if_false_label) - drop_through_label = gen_label_rtx (); - if (! if_true_label) - if_true_label = drop_through_label; - if (! if_false_label) - if_false_label = drop_through_label; - - /* Compare a word at a time, high order first. */ - for (i = 0; i < nwords; i++) - { - rtx comp; - rtx op0_word, op1_word; - - if (WORDS_BIG_ENDIAN) - { - op0_word = operand_subword_force (op0, i, mode); - op1_word = operand_subword_force (op1, i, mode); - } - else - { - op0_word = operand_subword_force (op0, nwords - 1 - i, mode); - op1_word = operand_subword_force (op1, nwords - 1 - i, mode); - } - /* All but high-order word must be compared as unsigned. */ - comp = compare_from_rtx (op0_word, op1_word, - (unsignedp || i > 0) ? GTU : GT, - unsignedp, word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_true_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_true_label); - - /* Consider lower words only if these are equal. */ - comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode, - NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_false_label); - } - - if (if_false_label) - emit_jump (if_false_label); - if (drop_through_label) - emit_label (drop_through_label); + do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label); } /* Compare OP0 with OP1, word at a time, in mode MODE. @@ -11134,7 +9611,6 @@ do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true /* Compare a word at a time, high order first. */ for (i = 0; i < nwords; i++) { - rtx comp; rtx op0_word, op1_word; if (WORDS_BIG_ENDIAN) @@ -11149,21 +9625,13 @@ do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true } /* All but high-order word must be compared as unsigned. */ - comp = compare_from_rtx (op0_word, op1_word, - (unsignedp || i > 0) ? GTU : GT, - unsignedp, word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_true_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_true_label); + do_compare_rtx_and_jump (op0_word, op1_word, GT, + (unsignedp || i > 0), word_mode, NULL_RTX, + NULL_RTX, if_true_label); /* Consider lower words only if these are equal. */ - comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode, - NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_false_label); + do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode, + NULL_RTX, NULL_RTX, if_false_label); } if (if_false_label) @@ -11191,16 +9659,10 @@ do_jump_by_parts_equality (exp, if_false_label, if_true_label) drop_through_label = if_false_label = gen_label_rtx (); for (i = 0; i < nwords; i++) - { - rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode), - operand_subword_force (op1, i, mode), - EQ, TREE_UNSIGNED (TREE_TYPE (exp)), - word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, if_false_label, NULL_RTX); - } + do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), + operand_subword_force (op1, i, mode), + EQ, TREE_UNSIGNED (TREE_TYPE (exp)), + word_mode, NULL_RTX, if_false_label, NULL_RTX); if (if_true_label) emit_jump (if_true_label); @@ -11236,15 +9698,8 @@ do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) if (part != 0) { - rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode, - NULL_RTX, 0); - - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp == const0_rtx) - emit_jump (if_true_label); - else - do_jump_for_compare (comp, if_false_label, if_true_label); + do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode, + NULL_RTX, if_false_label, if_true_label); return; } @@ -11254,15 +9709,9 @@ do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) drop_through_label = if_false_label = gen_label_rtx (); for (i = 0; i < nwords; i++) - { - rtx comp = compare_from_rtx (operand_subword_force (op0, i, - GET_MODE (op0)), - const0_rtx, EQ, 1, word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, if_false_label, NULL_RTX); - } + do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)), + const0_rtx, EQ, 1, word_mode, NULL_RTX, + if_false_label, NULL_RTX); if (if_true_label) emit_jump (if_true_label); @@ -11270,192 +9719,110 @@ do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) if (drop_through_label) emit_label (drop_through_label); } - -/* Given a comparison expression in rtl form, output conditional branches to - IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */ - -static void -do_jump_for_compare (comparison, if_false_label, if_true_label) - rtx comparison, if_false_label, if_true_label; -{ - if (if_true_label) - { - if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0) - emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) - (if_true_label)); - else - abort (); - - if (if_false_label) - emit_jump (if_false_label); - } - else if (if_false_label) - { - rtx first = get_last_insn (), insn, branch; - int br_count; - - /* Output the branch with the opposite condition. Then try to invert - what is generated. If more than one insn is a branch, or if the - branch is not the last insn written, abort. If we can't invert - the branch, emit make a true label, redirect this jump to that, - emit a jump to the false label and define the true label. */ - /* ??? Note that we wouldn't have to do any of this nonsense if - we passed both labels into a combined compare-and-branch. - Ah well, jump threading does a good job of repairing the damage. */ - - if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0) - emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) - (if_false_label)); - else - abort (); - - /* Here we get the first insn that was just emitted. It used to be the - case that, on some machines, emitting the branch would discard - the previous compare insn and emit a replacement. This isn't - done anymore, but abort if we see that FIRST is deleted. */ - - if (first == 0) - first = get_insns (); - else if (INSN_DELETED_P (first)) - abort (); - else - first = NEXT_INSN (first); - - /* Look for multiple branches in this sequence, as might be generated - for a multi-word integer comparison. */ - - br_count = 0; - branch = NULL_RTX; - for (insn = first; insn ; insn = NEXT_INSN (insn)) - if (GET_CODE (insn) == JUMP_INSN) - { - branch = insn; - br_count += 1; - } - - /* If we've got one branch at the end of the sequence, - we can try to reverse it. */ - - if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX) - { - rtx insn_label; - insn_label = XEXP (condjump_label (branch), 0); - JUMP_LABEL (branch) = insn_label; - - if (insn_label != if_false_label) - abort (); - - if (invert_jump (branch, if_false_label)) - return; - } - - /* Multiple branches, or reversion failed. Convert to branches - around an unconditional jump. */ - - if_true_label = gen_label_rtx (); - for (insn = first; insn; insn = NEXT_INSN (insn)) - if (GET_CODE (insn) == JUMP_INSN) - { - rtx insn_label; - insn_label = XEXP (condjump_label (insn), 0); - JUMP_LABEL (insn) = insn_label; - - if (insn_label == if_false_label) - redirect_jump (insn, if_true_label); - } - emit_jump (if_false_label); - emit_label (if_true_label); - } -} -/* Generate code for a comparison expression EXP +/* Generate code for a comparison of OP0 and OP1 with rtx code CODE. (including code to compute the values to be compared) and set (CC0) according to the result. - SIGNED_CODE should be the rtx operation for this comparison for - signed data; UNSIGNED_CODE, likewise for use if data is unsigned. + The decision as to signed or unsigned comparison must be made by the caller. We force a stack adjustment unless there are currently - things pushed on the stack that aren't yet used. */ + things pushed on the stack that aren't yet used. -static rtx -compare (exp, signed_code, unsigned_code) - register tree exp; - enum rtx_code signed_code, unsigned_code; + If MODE is BLKmode, SIZE is an RTX giving the size of the objects being + compared. */ + +rtx +compare_from_rtx (op0, op1, code, unsignedp, mode, size) + rtx op0, op1; + enum rtx_code code; + int unsignedp; + enum machine_mode mode; + rtx size; { - register rtx op0, op1; - register tree type; - register enum machine_mode mode; - int unsignedp; - enum rtx_code code; + rtx tem; - /* Don't crash if the comparison was erroneous. */ - op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); - if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) - return op0; - - op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); - type = TREE_TYPE (TREE_OPERAND (exp, 0)); - mode = TYPE_MODE (type); - unsignedp = TREE_UNSIGNED (type); - code = unsignedp ? unsigned_code : signed_code; + /* If one operand is constant, make it the second one. Only do this + if the other operand is not constant as well. */ -#ifdef HAVE_canonicalize_funcptr_for_compare - /* If function pointers need to be "canonicalized" before they can - be reliably compared, then canonicalize them. */ - if (HAVE_canonicalize_funcptr_for_compare - && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE - && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == FUNCTION_TYPE)) + if (swap_commutative_operands_p (op0, op1)) { - rtx new_op0 = gen_reg_rtx (mode); - - emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); - op0 = new_op0; + tem = op0; + op0 = op1; + op1 = tem; + code = swap_condition (code); } - if (HAVE_canonicalize_funcptr_for_compare - && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE - && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) - == FUNCTION_TYPE)) + if (flag_force_mem) { - rtx new_op1 = gen_reg_rtx (mode); + op0 = force_not_mem (op0); + op1 = force_not_mem (op1); + } - emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); - op1 = new_op1; + do_pending_stack_adjust (); + + if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT + && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) + return tem; + +#if 0 + /* There's no need to do this now that combine.c can eliminate lots of + sign extensions. This can be less efficient in certain cases on other + machines. */ + + /* If this is a signed equality comparison, we can do it as an + unsigned comparison since zero-extension is cheaper than sign + extension and comparisons with zero are done as unsigned. This is + the case even on machines that can do fast sign extension, since + zero-extension is easier to combine with other operations than + sign-extension is. If we are comparing against a constant, we must + convert it to what it would look like unsigned. */ + if ((code == EQ || code == NE) && ! unsignedp + && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) + { + if (GET_CODE (op1) == CONST_INT + && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) + op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); + unsignedp = 1; } #endif - return compare_from_rtx (op0, op1, code, unsignedp, mode, - ((mode == BLKmode) - ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), - TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); + emit_cmp_insn (op0, op1, code, size, mode, unsignedp); + + return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); } -/* Like compare but expects the values to compare as two rtx's. +/* Like do_compare_and_jump but expects the values to compare as two rtx's. The decision as to signed or unsigned comparison must be made by the caller. If MODE is BLKmode, SIZE is an RTX giving the size of the objects being - compared. + compared. */ - If ALIGN is non-zero, it is the alignment of this type; if zero, the - size of MODE should be used. */ - -rtx -compare_from_rtx (op0, op1, code, unsignedp, mode, size, align) - register rtx op0, op1; +void +do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, + if_false_label, if_true_label) + rtx op0, op1; enum rtx_code code; int unsignedp; enum machine_mode mode; rtx size; - int align; + rtx if_false_label, if_true_label; { rtx tem; + int dummy_true_label = 0; + + /* Reverse the comparison if that is safe and we want to jump if it is + false. */ + if (! if_true_label && ! FLOAT_MODE_P (mode)) + { + if_true_label = if_false_label; + if_false_label = 0; + code = reverse_condition (code); + } /* If one operand is constant, make it the second one. Only do this if the other operand is not constant as well. */ - if ((CONSTANT_P (op0) && ! CONSTANT_P (op1)) - || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT)) + if (swap_commutative_operands_p (op0, op1)) { tem = op0; op0 = op1; @@ -11473,7 +9840,19 @@ compare_from_rtx (op0, op1, code, unsignedp, mode, size, align) if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) - return tem; + { + if (tem == const_true_rtx) + { + if (if_true_label) + emit_jump (if_true_label); + } + else + { + if (if_false_label) + emit_jump (if_false_label); + } + return; + } #if 0 /* There's no need to do this now that combine.c can eliminate lots of @@ -11496,10 +9875,103 @@ compare_from_rtx (op0, op1, code, unsignedp, mode, size, align) unsignedp = 1; } #endif - - emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align); - return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); + if (! if_true_label) + { + dummy_true_label = 1; + if_true_label = gen_label_rtx (); + } + + emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, + if_true_label); + + if (if_false_label) + emit_jump (if_false_label); + if (dummy_true_label) + emit_label (if_true_label); +} + +/* Generate code for a comparison expression EXP (including code to compute + the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or + IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the + generated code will drop through. + SIGNED_CODE should be the rtx operation for this comparison for + signed data; UNSIGNED_CODE, likewise for use if data is unsigned. + + We force a stack adjustment unless there are currently + things pushed on the stack that aren't yet used. */ + +static void +do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label, + if_true_label) + tree exp; + enum rtx_code signed_code, unsigned_code; + rtx if_false_label, if_true_label; +{ + rtx op0, op1; + tree type; + enum machine_mode mode; + int unsignedp; + enum rtx_code code; + + /* Don't crash if the comparison was erroneous. */ + op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); + if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) + return; + + op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); + if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK) + return; + + type = TREE_TYPE (TREE_OPERAND (exp, 0)); + mode = TYPE_MODE (type); + if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST + && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST + || (GET_MODE_BITSIZE (mode) + > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, + 1))))))) + { + /* op0 might have been replaced by promoted constant, in which + case the type of second argument should be used. */ + type = TREE_TYPE (TREE_OPERAND (exp, 1)); + mode = TYPE_MODE (type); + } + unsignedp = TREE_UNSIGNED (type); + code = unsignedp ? unsigned_code : signed_code; + +#ifdef HAVE_canonicalize_funcptr_for_compare + /* If function pointers need to be "canonicalized" before they can + be reliably compared, then canonicalize them. */ + if (HAVE_canonicalize_funcptr_for_compare + && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE + && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + == FUNCTION_TYPE)) + { + rtx new_op0 = gen_reg_rtx (mode); + + emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); + op0 = new_op0; + } + + if (HAVE_canonicalize_funcptr_for_compare + && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE + && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) + == FUNCTION_TYPE)) + { + rtx new_op1 = gen_reg_rtx (mode); + + emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); + op1 = new_op1; + } +#endif + + /* Do any postincrements in the expression that was tested. */ + emit_queue (); + + do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, + ((mode == BLKmode) + ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), + if_false_label, if_true_label); } /* Generate code to calculate EXP using a store-flag instruction @@ -11550,6 +10022,11 @@ do_store_flag (exp, target, mode, only_cheap) arg0 = TREE_OPERAND (exp, 0); arg1 = TREE_OPERAND (exp, 1); + + /* Don't crash if the comparison was erroneous. */ + if (arg0 == error_mark_node || arg1 == error_mark_node) + return const0_rtx; + type = TREE_TYPE (arg0); operand_mode = TYPE_MODE (type); unsignedp = TREE_UNSIGNED (type); @@ -11614,6 +10091,29 @@ do_store_flag (exp, target, mode, only_cheap) else code = unsignedp ? GEU : GE; break; + + case UNORDERED_EXPR: + code = UNORDERED; + break; + case ORDERED_EXPR: + code = ORDERED; + break; + case UNLT_EXPR: + code = UNLT; + break; + case UNLE_EXPR: + code = UNLE; + break; + case UNGT_EXPR: + code = UNGT; + break; + case UNGE_EXPR: + code = UNGE; + break; + case UNEQ_EXPR: + code = UNEQ; + break; + default: abort (); } @@ -11645,8 +10145,9 @@ do_store_flag (exp, target, mode, only_cheap) if (TREE_CODE (inner) == RSHIFT_EXPR && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 - && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)) - < TYPE_PRECISION (type))) + && bitnum < TYPE_PRECISION (type) + && 0 > compare_tree_int (TREE_OPERAND (inner, 1), + bitnum - TYPE_PRECISION (type))) { bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); inner = TREE_OPERAND (inner, 0); @@ -11663,7 +10164,7 @@ do_store_flag (exp, target, mode, only_cheap) #endif ); - if (subtarget == 0 || GET_CODE (subtarget) != REG + if (! get_subtarget (subtarget) || GET_MODE (subtarget) != operand_mode || ! safe_from_p (subtarget, inner, 1)) subtarget = 0; @@ -11671,7 +10172,7 @@ do_store_flag (exp, target, mode, only_cheap) op0 = expand_expr (inner, subtarget, VOIDmode, 0); if (bitnum != 0) - op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0, + op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0, size_int (bitnum), subtarget, ops_unsignedp); if (GET_MODE (op0) != mode) @@ -11689,11 +10190,12 @@ do_store_flag (exp, target, mode, only_cheap) } /* Now see if we are likely to be able to do this. Return if not. */ - if (! can_compare_p (operand_mode)) + if (! can_compare_p (code, operand_mode, ccp_store_flag)) return 0; + icode = setcc_gen_code[(int) code]; if (icode == CODE_FOR_nothing - || (only_cheap && insn_operand_mode[(int) icode][0] != mode)) + || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) { /* We can only do this if it is one of the special cases that can be handled without an scc insn. */ @@ -11711,9 +10213,8 @@ do_store_flag (exp, target, mode, only_cheap) else return 0; } - - preexpand_calls (exp); - if (subtarget == 0 || GET_CODE (subtarget) != REG + + if (! get_subtarget (target) || GET_MODE (subtarget) != operand_mode || ! safe_from_p (subtarget, arg1, 1)) subtarget = 0; @@ -11748,12 +10249,20 @@ do_store_flag (exp, target, mode, only_cheap) emit_move_insn (target, invert ? const0_rtx : const1_rtx); result = compare_from_rtx (op0, op1, code, unsignedp, - operand_mode, NULL_RTX, 0); + operand_mode, NULL_RTX); if (GET_CODE (result) == CONST_INT) return (((result == const0_rtx && ! invert) || (result != const0_rtx && invert)) ? const0_rtx : const1_rtx); + /* The code of RESULT may not match CODE if compare_from_rtx + decided to swap its operands and reverse the original code. + + We know that compare_from_rtx returns either a CONST_INT or + a new comparison code, so it is safe to just extract the + code from RESULT. */ + code = GET_CODE (result); + label = gen_label_rtx (); if (bcc_gen_fctn[(int) code] == 0) abort (); @@ -11765,11 +10274,112 @@ do_store_flag (exp, target, mode, only_cheap) return target; } -/* Generate a tablejump instruction (used for switch statements). */ -#ifdef HAVE_tablejump +/* Stubs in case we haven't got a casesi insn. */ +#ifndef HAVE_casesi +# define HAVE_casesi 0 +# define gen_casesi(a, b, c, d, e) (0) +# define CODE_FOR_casesi CODE_FOR_nothing +#endif + +/* If the machine does not have a case insn that compares the bounds, + this means extra overhead for dispatch tables, which raises the + threshold for using them. */ +#ifndef CASE_VALUES_THRESHOLD +#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) +#endif /* CASE_VALUES_THRESHOLD */ + +unsigned int +case_values_threshold () +{ + return CASE_VALUES_THRESHOLD; +} + +/* Attempt to generate a casesi instruction. Returns 1 if successful, + 0 otherwise (i.e. if there is no casesi instruction). */ +int +try_casesi (index_type, index_expr, minval, range, + table_label, default_label) + tree index_type, index_expr, minval, range; + rtx table_label ATTRIBUTE_UNUSED; + rtx default_label; +{ + enum machine_mode index_mode = SImode; + int index_bits = GET_MODE_BITSIZE (index_mode); + rtx op1, op2, index; + enum machine_mode op_mode; -/* INDEX is the value being switched on, with the lowest value + if (! HAVE_casesi) + return 0; + + /* Convert the index to SImode. */ + if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) + { + enum machine_mode omode = TYPE_MODE (index_type); + rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0); + + /* We must handle the endpoints in the original mode. */ + index_expr = build (MINUS_EXPR, index_type, + index_expr, minval); + minval = integer_zero_node; + index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); + emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, + omode, 1, default_label); + /* Now we can safely truncate. */ + index = convert_to_mode (index_mode, index, 0); + } + else + { + if (TYPE_MODE (index_type) != index_mode) + { + index_expr = convert (type_for_size (index_bits, 0), + index_expr); + index_type = TREE_TYPE (index_expr); + } + + index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); + } + emit_queue (); + index = protect_from_queue (index, 0); + do_pending_stack_adjust (); + + op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; + if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) + (index, op_mode)) + index = copy_to_mode_reg (op_mode, index); + + op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0); + + op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; + op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), + op1, TREE_UNSIGNED (TREE_TYPE (minval))); + if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) + (op1, op_mode)) + op1 = copy_to_mode_reg (op_mode, op1); + + op2 = expand_expr (range, NULL_RTX, VOIDmode, 0); + + op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; + op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), + op2, TREE_UNSIGNED (TREE_TYPE (range))); + if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) + (op2, op_mode)) + op2 = copy_to_mode_reg (op_mode, op2); + + emit_jump_insn (gen_casesi (index, op1, op2, + table_label, default_label)); + return 1; +} + +/* Attempt to generate a tablejump instruction; same concept. */ +#ifndef HAVE_tablejump +#define HAVE_tablejump 0 +#define gen_tablejump(x, y) (0) +#endif + +/* Subroutine of the next function. + + INDEX is the value being switched on, with the lowest value in the table already subtracted. MODE is its expected mode (needed if INDEX is constant). RANGE is the length of the jump table. @@ -11778,12 +10388,12 @@ do_store_flag (exp, target, mode, only_cheap) DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the index value is out of range. */ -void +static void do_tablejump (index, mode, range, table_label, default_label) rtx index, range, table_label, default_label; enum machine_mode mode; { - register rtx temp, vector; + rtx temp, vector; /* Do an unsigned comparison (in the proper mode) between the index expression and the value which represents the length of the range. @@ -11794,7 +10404,7 @@ do_tablejump (index, mode, range, table_label, default_label) the maximum value of the range. */ emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, - 0, default_label); + default_label); /* If index is in range, it must fit in Pmode. Convert to Pmode so we can index with it. */ @@ -11840,4 +10450,31 @@ do_tablejump (index, mode, range, table_label, default_label) emit_barrier (); } -#endif /* HAVE_tablejump */ +int +try_tablejump (index_type, index_expr, minval, range, + table_label, default_label) + tree index_type, index_expr, minval, range; + rtx table_label, default_label; +{ + rtx index; + + if (! HAVE_tablejump) + return 0; + + index_expr = fold (build (MINUS_EXPR, index_type, + convert (index_type, index_expr), + convert (index_type, minval))); + index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); + emit_queue (); + index = protect_from_queue (index, 0); + do_pending_stack_adjust (); + + do_tablejump (index, TYPE_MODE (index_type), + convert_modes (TYPE_MODE (index_type), + TYPE_MODE (TREE_TYPE (range)), + expand_expr (range, NULL_RTX, + VOIDmode, 0), + TREE_UNSIGNED (TREE_TYPE (range))), + table_label, default_label); + return 1; +} |