118334Speter/* Convert tree expression to rtl instructions, for GNU compiler.
272562Sobrien   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3169689Skan   2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4169689Skan   Inc.
518334Speter
690075SobrienThis file is part of GCC.
718334Speter
890075SobrienGCC is free software; you can redistribute it and/or modify it under
990075Sobrienthe terms of the GNU General Public License as published by the Free
1090075SobrienSoftware Foundation; either version 2, or (at your option) any later
1190075Sobrienversion.
1218334Speter
1390075SobrienGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1490075SobrienWARRANTY; without even the implied warranty of MERCHANTABILITY or
1590075SobrienFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1690075Sobrienfor more details.
1718334Speter
1818334SpeterYou should have received a copy of the GNU General Public License
1990075Sobrienalong with GCC; see the file COPYING.  If not, write to the Free
20169689SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21169689Skan02110-1301, USA.  */
2218334Speter
2318334Speter#include "config.h"
2450397Sobrien#include "system.h"
25132718Skan#include "coretypes.h"
26132718Skan#include "tm.h"
2718334Speter#include "machmode.h"
28117395Skan#include "real.h"
2918334Speter#include "rtl.h"
3018334Speter#include "tree.h"
3118334Speter#include "flags.h"
3218334Speter#include "regs.h"
3350397Sobrien#include "hard-reg-set.h"
3450397Sobrien#include "except.h"
3518334Speter#include "function.h"
3650397Sobrien#include "insn-config.h"
3790075Sobrien#include "insn-attr.h"
3890075Sobrien/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
3918334Speter#include "expr.h"
4090075Sobrien#include "optabs.h"
4190075Sobrien#include "libfuncs.h"
4218334Speter#include "recog.h"
4390075Sobrien#include "reload.h"
4418334Speter#include "output.h"
4518334Speter#include "typeclass.h"
4650397Sobrien#include "toplev.h"
4790075Sobrien#include "ggc.h"
4890075Sobrien#include "langhooks.h"
4990075Sobrien#include "intl.h"
5090075Sobrien#include "tm_p.h"
51169689Skan#include "tree-iterator.h"
52169689Skan#include "tree-pass.h"
53169689Skan#include "tree-flow.h"
54132718Skan#include "target.h"
55169689Skan#include "timevar.h"
5618334Speter
5718334Speter/* Decide whether a function's arguments should be processed
5818334Speter   from first to last or from last to first.
5918334Speter
6018334Speter   They should if the stack and args grow in opposite directions, but
6118334Speter   only if we have push insns.  */
6218334Speter
6318334Speter#ifdef PUSH_ROUNDING
6418334Speter
65107590Sobrien#ifndef PUSH_ARGS_REVERSED
6618334Speter#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
6790075Sobrien#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
6818334Speter#endif
69107590Sobrien#endif
7018334Speter
7118334Speter#endif
7218334Speter
7318334Speter#ifndef STACK_PUSH_CODE
7418334Speter#ifdef STACK_GROWS_DOWNWARD
7518334Speter#define STACK_PUSH_CODE PRE_DEC
7618334Speter#else
7718334Speter#define STACK_PUSH_CODE PRE_INC
7818334Speter#endif
7918334Speter#endif
8018334Speter
8150397Sobrien
8218334Speter/* If this is nonzero, we do not bother generating VOLATILE
8318334Speter   around volatile memory references, and we are willing to
8418334Speter   output indirect addresses.  If cse is to follow, we reject
8518334Speter   indirect addresses so a useful potential cse is generated;
8618334Speter   if it is used only once, instruction combination will produce
8718334Speter   the same indirect address eventually.  */
8818334Speterint cse_not_expected;
8918334Speter
9018334Speter/* This structure is used by move_by_pieces to describe the move to
9118334Speter   be performed.  */
9218334Speterstruct move_by_pieces
9318334Speter{
9418334Speter  rtx to;
9518334Speter  rtx to_addr;
9618334Speter  int autinc_to;
9718334Speter  int explicit_inc_to;
9818334Speter  rtx from;
9918334Speter  rtx from_addr;
10018334Speter  int autinc_from;
10118334Speter  int explicit_inc_from;
10290075Sobrien  unsigned HOST_WIDE_INT len;
10390075Sobrien  HOST_WIDE_INT offset;
10418334Speter  int reverse;
10518334Speter};
10618334Speter
10790075Sobrien/* This structure is used by store_by_pieces to describe the clear to
10850397Sobrien   be performed.  */
10918334Speter
11090075Sobrienstruct store_by_pieces
11150397Sobrien{
11250397Sobrien  rtx to;
11350397Sobrien  rtx to_addr;
11450397Sobrien  int autinc_to;
11550397Sobrien  int explicit_inc_to;
11690075Sobrien  unsigned HOST_WIDE_INT len;
11790075Sobrien  HOST_WIDE_INT offset;
118132718Skan  rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119132718Skan  void *constfundata;
12050397Sobrien  int reverse;
12150397Sobrien};
12250397Sobrien
123132718Skanstatic unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124169689Skan						     unsigned int,
125132718Skan						     unsigned int);
126132718Skanstatic void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127132718Skan			      struct move_by_pieces *);
128132718Skanstatic bool block_move_libcall_safe_for_call_parm (void);
129169689Skanstatic bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130169689Skanstatic rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131132718Skanstatic tree emit_block_move_libcall_fn (int);
132132718Skanstatic void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133132718Skanstatic rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134132718Skanstatic void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135132718Skanstatic void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136132718Skanstatic void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137132718Skan			       struct store_by_pieces *);
138169689Skanstatic rtx clear_storage_via_libcall (rtx, rtx, bool);
139132718Skanstatic tree clear_storage_libcall_fn (int);
140132718Skanstatic rtx compress_float_constant (rtx, rtx);
141132718Skanstatic rtx get_subtarget (rtx);
142132718Skanstatic void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143132718Skan				     HOST_WIDE_INT, enum machine_mode,
144132718Skan				     tree, tree, int, int);
145132718Skanstatic void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146132718Skanstatic rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147169689Skan			tree, tree, int);
148132718Skan
149132718Skanstatic unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150132718Skan
151132718Skanstatic int is_aligning_offset (tree, tree);
152132718Skanstatic void expand_operands (tree, tree, rtx, rtx*, rtx*,
153132718Skan			     enum expand_modifier);
154169689Skanstatic rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155132718Skanstatic rtx do_store_flag (tree, rtx, enum machine_mode, int);
15690075Sobrien#ifdef PUSH_ROUNDING
157132718Skanstatic void emit_single_push_insn (enum machine_mode, rtx, tree);
15890075Sobrien#endif
159132718Skanstatic void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160132718Skanstatic rtx const_vector_from_tree (tree);
161169689Skanstatic void write_complex_part (rtx, rtx, bool);
16218334Speter
16318334Speter/* Record for each mode whether we can move a register directly to or
16418334Speter   from an object of that mode in memory.  If we can't, we won't try
16518334Speter   to use that mode directly when accessing a field of that mode.  */
16618334Speter
16718334Speterstatic char direct_load[NUM_MACHINE_MODES];
16818334Speterstatic char direct_store[NUM_MACHINE_MODES];
16918334Speter
170117395Skan/* Record for each mode whether we can float-extend from memory.  */
171117395Skan
172117395Skanstatic bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173117395Skan
17452284Sobrien/* This macro is used to determine whether move_by_pieces should be called
17590075Sobrien   to perform a structure copy.  */
17652284Sobrien#ifndef MOVE_BY_PIECES_P
17790075Sobrien#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178169689Skan  (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179169689Skan   < (unsigned int) MOVE_RATIO)
18052284Sobrien#endif
18152284Sobrien
182117395Skan/* This macro is used to determine whether clear_by_pieces should be
183117395Skan   called to clear storage.  */
184117395Skan#ifndef CLEAR_BY_PIECES_P
185117395Skan#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186169689Skan  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187169689Skan   < (unsigned int) CLEAR_RATIO)
188117395Skan#endif
189117395Skan
190132718Skan/* This macro is used to determine whether store_by_pieces should be
191132718Skan   called to "memset" storage with byte values other than zero, or
192132718Skan   to "memcpy" storage when the source is a constant string.  */
193132718Skan#ifndef STORE_BY_PIECES_P
194169689Skan#define STORE_BY_PIECES_P(SIZE, ALIGN) \
195169689Skan  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196169689Skan   < (unsigned int) MOVE_RATIO)
197132718Skan#endif
198132718Skan
19918334Speter/* This array records the insn_code of insns to perform block moves.  */
200169689Skanenum insn_code movmem_optab[NUM_MACHINE_MODES];
20118334Speter
202169689Skan/* This array records the insn_code of insns to perform block sets.  */
203169689Skanenum insn_code setmem_optab[NUM_MACHINE_MODES];
20418334Speter
205169689Skan/* These arrays record the insn_code of three different kinds of insns
206132718Skan   to perform block compares.  */
207132718Skanenum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208169689Skanenum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209132718Skanenum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210132718Skan
211169689Skan/* Synchronization primitives.  */
212169689Skanenum insn_code sync_add_optab[NUM_MACHINE_MODES];
213169689Skanenum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214169689Skanenum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215169689Skanenum insn_code sync_and_optab[NUM_MACHINE_MODES];
216169689Skanenum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217169689Skanenum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218169689Skanenum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219169689Skanenum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220169689Skanenum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221169689Skanenum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222169689Skanenum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223169689Skanenum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224169689Skanenum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225169689Skanenum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226169689Skanenum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227169689Skanenum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228169689Skanenum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229169689Skanenum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230169689Skanenum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231169689Skanenum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232169689Skanenum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233169689Skanenum insn_code sync_lock_release[NUM_MACHINE_MODES];
234132718Skan
235117395Skan/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
23650397Sobrien
23718334Speter#ifndef SLOW_UNALIGNED_ACCESS
23890075Sobrien#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
23918334Speter#endif
24018334Speter
24118334Speter/* This is run once per compilation to set up which modes can be used
24218334Speter   directly in memory and to initialize the block move optab.  */
24318334Speter
24418334Spetervoid
245132718Skaninit_expr_once (void)
24618334Speter{
24718334Speter  rtx insn, pat;
24818334Speter  enum machine_mode mode;
24950397Sobrien  int num_clobbers;
25050397Sobrien  rtx mem, mem1;
251117395Skan  rtx reg;
25250397Sobrien
25318334Speter  /* Try indexing by frame ptr and try by stack ptr.
25418334Speter     It is known that on the Convex the stack ptr isn't a valid index.
25518334Speter     With luck, one or the other is valid on any machine.  */
25650397Sobrien  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
25750397Sobrien  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
25818334Speter
259117395Skan  /* A scratch register we can modify in-place below to avoid
260117395Skan     useless RTL allocations.  */
261117395Skan  reg = gen_rtx_REG (VOIDmode, -1);
26218334Speter
263117395Skan  insn = rtx_alloc (INSN);
264117395Skan  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265117395Skan  PATTERN (insn) = pat;
266117395Skan
26718334Speter  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
26818334Speter       mode = (enum machine_mode) ((int) mode + 1))
26918334Speter    {
27018334Speter      int regno;
27118334Speter
27218334Speter      direct_load[(int) mode] = direct_store[(int) mode] = 0;
27318334Speter      PUT_MODE (mem, mode);
27418334Speter      PUT_MODE (mem1, mode);
275117395Skan      PUT_MODE (reg, mode);
27618334Speter
27718334Speter      /* See if there is some register that can be used in this mode and
27818334Speter	 directly loaded or stored from memory.  */
27918334Speter
28018334Speter      if (mode != VOIDmode && mode != BLKmode)
28118334Speter	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
28218334Speter	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
28318334Speter	     regno++)
28418334Speter	  {
28518334Speter	    if (! HARD_REGNO_MODE_OK (regno, mode))
28618334Speter	      continue;
28718334Speter
288117395Skan	    REGNO (reg) = regno;
28918334Speter
29018334Speter	    SET_SRC (pat) = mem;
29118334Speter	    SET_DEST (pat) = reg;
29218334Speter	    if (recog (pat, insn, &num_clobbers) >= 0)
29318334Speter	      direct_load[(int) mode] = 1;
29418334Speter
29518334Speter	    SET_SRC (pat) = mem1;
29618334Speter	    SET_DEST (pat) = reg;
29718334Speter	    if (recog (pat, insn, &num_clobbers) >= 0)
29818334Speter	      direct_load[(int) mode] = 1;
29918334Speter
30018334Speter	    SET_SRC (pat) = reg;
30118334Speter	    SET_DEST (pat) = mem;
30218334Speter	    if (recog (pat, insn, &num_clobbers) >= 0)
30318334Speter	      direct_store[(int) mode] = 1;
30418334Speter
30518334Speter	    SET_SRC (pat) = reg;
30618334Speter	    SET_DEST (pat) = mem1;
30718334Speter	    if (recog (pat, insn, &num_clobbers) >= 0)
30818334Speter	      direct_store[(int) mode] = 1;
30918334Speter	  }
31018334Speter    }
31118334Speter
312117395Skan  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313117395Skan
314117395Skan  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315117395Skan       mode = GET_MODE_WIDER_MODE (mode))
316117395Skan    {
317117395Skan      enum machine_mode srcmode;
318117395Skan      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319117395Skan	   srcmode = GET_MODE_WIDER_MODE (srcmode))
320117395Skan	{
321117395Skan	  enum insn_code ic;
322117395Skan
323117395Skan	  ic = can_extend_p (mode, srcmode, 0);
324117395Skan	  if (ic == CODE_FOR_nothing)
325117395Skan	    continue;
326117395Skan
327117395Skan	  PUT_MODE (mem, srcmode);
328117395Skan
329117395Skan	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330117395Skan	    float_extend_from_mem[mode][srcmode] = true;
331117395Skan	}
332117395Skan    }
33318334Speter}
33450397Sobrien
33518334Speter/* This is run at the start of compiling a function.  */
33618334Speter
33718334Spetervoid
338132718Skaninit_expr (void)
33918334Speter{
340132718Skan  cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
34118334Speter}
34218334Speter
34318334Speter/* Copy data from FROM to TO, where the machine modes are not the same.
34418334Speter   Both modes may be integer, or both may be floating.
34518334Speter   UNSIGNEDP should be nonzero if FROM is an unsigned type.
34618334Speter   This causes zero-extension instead of sign-extension.  */
34718334Speter
34818334Spetervoid
349132718Skanconvert_move (rtx to, rtx from, int unsignedp)
35018334Speter{
35118334Speter  enum machine_mode to_mode = GET_MODE (to);
35218334Speter  enum machine_mode from_mode = GET_MODE (from);
353169689Skan  int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354169689Skan  int from_real = SCALAR_FLOAT_MODE_P (from_mode);
35518334Speter  enum insn_code code;
35618334Speter  rtx libcall;
35718334Speter
35818334Speter  /* rtx code for making an equivalent value.  */
359117395Skan  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360117395Skan			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
36118334Speter
36218334Speter
363169689Skan  gcc_assert (to_real == from_real);
364259563Spfg  gcc_assert (to_mode != BLKmode);
365259563Spfg  gcc_assert (from_mode != BLKmode);
36618334Speter
367169689Skan  /* If the source and destination are already the same, then there's
368169689Skan     nothing to do.  */
369169689Skan  if (to == from)
370169689Skan    return;
371169689Skan
37218334Speter  /* If FROM is a SUBREG that indicates that we have already done at least
37318334Speter     the required extension, strip it.  We don't handle such SUBREGs as
37418334Speter     TO here.  */
37518334Speter
37618334Speter  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
37718334Speter      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
37818334Speter	  >= GET_MODE_SIZE (to_mode))
37918334Speter      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
38018334Speter    from = gen_lowpart (to_mode, from), from_mode = to_mode;
38118334Speter
382169689Skan  gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
38318334Speter
38418334Speter  if (to_mode == from_mode
38518334Speter      || (from_mode == VOIDmode && CONSTANT_P (from)))
38618334Speter    {
38718334Speter      emit_move_insn (to, from);
38818334Speter      return;
38918334Speter    }
39018334Speter
39190075Sobrien  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
39290075Sobrien    {
393169689Skan      gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
39490075Sobrien
39590075Sobrien      if (VECTOR_MODE_P (to_mode))
39690075Sobrien	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
39790075Sobrien      else
39890075Sobrien	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
39990075Sobrien
40090075Sobrien      emit_move_insn (to, from);
40190075Sobrien      return;
40290075Sobrien    }
40390075Sobrien
404132718Skan  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
405132718Skan    {
406132718Skan      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
407132718Skan      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
408132718Skan      return;
409132718Skan    }
41090075Sobrien
41118334Speter  if (to_real)
41218334Speter    {
41390075Sobrien      rtx value, insns;
414132718Skan      convert_optab tab;
41518334Speter
416169689Skan      gcc_assert ((GET_MODE_PRECISION (from_mode)
417169689Skan		   != GET_MODE_PRECISION (to_mode))
418169689Skan		  || (DECIMAL_FLOAT_MODE_P (from_mode)
419169689Skan		      != DECIMAL_FLOAT_MODE_P (to_mode)));
420259563Spfg
421169689Skan      if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
422169689Skan	/* Conversion between decimal float and binary float, same size.  */
423169689Skan	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
424169689Skan      else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
425132718Skan	tab = sext_optab;
426169689Skan      else
427132718Skan	tab = trunc_optab;
42890075Sobrien
429132718Skan      /* Try converting directly if the insn is supported.  */
43018334Speter
431132718Skan      code = tab->handlers[to_mode][from_mode].insn_code;
432132718Skan      if (code != CODE_FOR_nothing)
43318334Speter	{
434132718Skan	  emit_unop_insn (code, to, from,
435132718Skan			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
43618334Speter	  return;
43718334Speter	}
43850397Sobrien
439132718Skan      /* Otherwise use a libcall.  */
440132718Skan      libcall = tab->handlers[to_mode][from_mode].libfunc;
44150397Sobrien
442169689Skan      /* Is this conversion implemented yet?  */
443169689Skan      gcc_assert (libcall);
44418334Speter
44590075Sobrien      start_sequence ();
44690075Sobrien      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
44718334Speter				       1, from, from_mode);
44890075Sobrien      insns = get_insns ();
44990075Sobrien      end_sequence ();
450132718Skan      emit_libcall_block (insns, to, value,
451132718Skan			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
452132718Skan								       from)
453132718Skan			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
45418334Speter      return;
45518334Speter    }
45618334Speter
457132718Skan  /* Handle pointer conversion.  */			/* SPEE 900220.  */
458132718Skan  /* Targets are expected to provide conversion insns between PxImode and
459132718Skan     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
460132718Skan  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
461132718Skan    {
462132718Skan      enum machine_mode full_mode
463132718Skan	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
464132718Skan
465169689Skan      gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
466169689Skan		  != CODE_FOR_nothing);
467132718Skan
468132718Skan      if (full_mode != from_mode)
469132718Skan	from = convert_to_mode (full_mode, from, unsignedp);
470132718Skan      emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
471132718Skan		      to, from, UNKNOWN);
472132718Skan      return;
473132718Skan    }
474132718Skan  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
475132718Skan    {
476169689Skan      rtx new_from;
477132718Skan      enum machine_mode full_mode
478132718Skan	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
479132718Skan
480169689Skan      gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
481169689Skan		  != CODE_FOR_nothing);
482132718Skan
483132718Skan      if (to_mode == full_mode)
484169689Skan	{
485169689Skan	  emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
486169689Skan			  to, from, UNKNOWN);
487169689Skan	  return;
488169689Skan	}
489132718Skan
490169689Skan      new_from = gen_reg_rtx (full_mode);
491169689Skan      emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
492169689Skan		      new_from, from, UNKNOWN);
493169689Skan
494169689Skan      /* else proceed to integer conversions below.  */
495132718Skan      from_mode = full_mode;
496169689Skan      from = new_from;
497132718Skan    }
498132718Skan
49918334Speter  /* Now both modes are integers.  */
50018334Speter
50118334Speter  /* Handle expanding beyond a word.  */
50218334Speter  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
50318334Speter      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
50418334Speter    {
50518334Speter      rtx insns;
50618334Speter      rtx lowpart;
50718334Speter      rtx fill_value;
50818334Speter      rtx lowfrom;
50918334Speter      int i;
51018334Speter      enum machine_mode lowpart_mode;
51118334Speter      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
51218334Speter
51318334Speter      /* Try converting directly if the insn is supported.  */
51418334Speter      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
51518334Speter	  != CODE_FOR_nothing)
51618334Speter	{
51718334Speter	  /* If FROM is a SUBREG, put it into a register.  Do this
51818334Speter	     so that we always generate the same set of insns for
51918334Speter	     better cse'ing; if an intermediate assignment occurred,
52018334Speter	     we won't be doing the operation directly on the SUBREG.  */
52118334Speter	  if (optimize > 0 && GET_CODE (from) == SUBREG)
52218334Speter	    from = force_reg (from_mode, from);
52318334Speter	  emit_unop_insn (code, to, from, equiv_code);
52418334Speter	  return;
52518334Speter	}
52618334Speter      /* Next, try converting via full word.  */
52718334Speter      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
52818334Speter	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
52918334Speter		   != CODE_FOR_nothing))
53018334Speter	{
531169689Skan	  if (REG_P (to))
532132718Skan	    {
533132718Skan	      if (reg_overlap_mentioned_p (to, from))
534132718Skan		from = force_reg (from_mode, from);
535132718Skan	      emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
536132718Skan	    }
53718334Speter	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
53818334Speter	  emit_unop_insn (code, to,
53918334Speter			  gen_lowpart (word_mode, to), equiv_code);
54018334Speter	  return;
54118334Speter	}
54218334Speter
54318334Speter      /* No special multiword conversion insn; do it by hand.  */
54418334Speter      start_sequence ();
54518334Speter
54618334Speter      /* Since we will turn this into a no conflict block, we must ensure
54718334Speter	 that the source does not overlap the target.  */
54818334Speter
54918334Speter      if (reg_overlap_mentioned_p (to, from))
55018334Speter	from = force_reg (from_mode, from);
55118334Speter
55218334Speter      /* Get a copy of FROM widened to a word, if necessary.  */
55318334Speter      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
55418334Speter	lowpart_mode = word_mode;
55518334Speter      else
55618334Speter	lowpart_mode = from_mode;
55718334Speter
55818334Speter      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
55918334Speter
56018334Speter      lowpart = gen_lowpart (lowpart_mode, to);
56118334Speter      emit_move_insn (lowpart, lowfrom);
56218334Speter
56318334Speter      /* Compute the value to put in each remaining word.  */
56418334Speter      if (unsignedp)
56518334Speter	fill_value = const0_rtx;
56618334Speter      else
56718334Speter	{
56818334Speter#ifdef HAVE_slt
56918334Speter	  if (HAVE_slt
57090075Sobrien	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
57118334Speter	      && STORE_FLAG_VALUE == -1)
57218334Speter	    {
57318334Speter	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
57490075Sobrien			     lowpart_mode, 0);
57518334Speter	      fill_value = gen_reg_rtx (word_mode);
57618334Speter	      emit_insn (gen_slt (fill_value));
57718334Speter	    }
57818334Speter	  else
57918334Speter#endif
58018334Speter	    {
58118334Speter	      fill_value
58218334Speter		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
58318334Speter				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
58418334Speter				NULL_RTX, 0);
58518334Speter	      fill_value = convert_to_mode (word_mode, fill_value, 1);
58618334Speter	    }
58718334Speter	}
58818334Speter
58918334Speter      /* Fill the remaining words.  */
59018334Speter      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
59118334Speter	{
59218334Speter	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
59318334Speter	  rtx subword = operand_subword (to, index, 1, to_mode);
59418334Speter
595169689Skan	  gcc_assert (subword);
59618334Speter
59718334Speter	  if (fill_value != subword)
59818334Speter	    emit_move_insn (subword, fill_value);
59918334Speter	}
60018334Speter
60118334Speter      insns = get_insns ();
60218334Speter      end_sequence ();
60318334Speter
60418334Speter      emit_no_conflict_block (insns, to, from, NULL_RTX,
60550397Sobrien			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
60618334Speter      return;
60718334Speter    }
60818334Speter
60918334Speter  /* Truncating multi-word to a word or less.  */
61018334Speter  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
61118334Speter      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
61218334Speter    {
613169689Skan      if (!((MEM_P (from)
61418334Speter	     && ! MEM_VOLATILE_P (from)
61518334Speter	     && direct_load[(int) to_mode]
61618334Speter	     && ! mode_dependent_address_p (XEXP (from, 0)))
617169689Skan	    || REG_P (from)
61818334Speter	    || GET_CODE (from) == SUBREG))
61918334Speter	from = force_reg (from_mode, from);
62018334Speter      convert_move (to, gen_lowpart (word_mode, from), 0);
62118334Speter      return;
62218334Speter    }
62318334Speter
62418334Speter  /* Now follow all the conversions between integers
62518334Speter     no more than a word long.  */
62618334Speter
62718334Speter  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
62818334Speter  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
62918334Speter      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
63018334Speter				GET_MODE_BITSIZE (from_mode)))
63118334Speter    {
632169689Skan      if (!((MEM_P (from)
63318334Speter	     && ! MEM_VOLATILE_P (from)
63418334Speter	     && direct_load[(int) to_mode]
63518334Speter	     && ! mode_dependent_address_p (XEXP (from, 0)))
636169689Skan	    || REG_P (from)
63718334Speter	    || GET_CODE (from) == SUBREG))
63818334Speter	from = force_reg (from_mode, from);
639169689Skan      if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
64018334Speter	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
64118334Speter	from = copy_to_reg (from);
64218334Speter      emit_move_insn (to, gen_lowpart (to_mode, from));
64318334Speter      return;
64418334Speter    }
64518334Speter
64618334Speter  /* Handle extension.  */
64718334Speter  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
64818334Speter    {
64918334Speter      /* Convert directly if that works.  */
65018334Speter      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
65118334Speter	  != CODE_FOR_nothing)
65218334Speter	{
65318334Speter	  emit_unop_insn (code, to, from, equiv_code);
65418334Speter	  return;
65518334Speter	}
65618334Speter      else
65718334Speter	{
65818334Speter	  enum machine_mode intermediate;
65952284Sobrien	  rtx tmp;
66052284Sobrien	  tree shift_amount;
66118334Speter
66218334Speter	  /* Search for a mode to convert via.  */
66318334Speter	  for (intermediate = from_mode; intermediate != VOIDmode;
66418334Speter	       intermediate = GET_MODE_WIDER_MODE (intermediate))
66518334Speter	    if (((can_extend_p (to_mode, intermediate, unsignedp)
66618334Speter		  != CODE_FOR_nothing)
66718334Speter		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
66890075Sobrien		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
66990075Sobrien					       GET_MODE_BITSIZE (intermediate))))
67018334Speter		&& (can_extend_p (intermediate, from_mode, unsignedp)
67118334Speter		    != CODE_FOR_nothing))
67218334Speter	      {
67318334Speter		convert_move (to, convert_to_mode (intermediate, from,
67418334Speter						   unsignedp), unsignedp);
67518334Speter		return;
67618334Speter	      }
67718334Speter
67852284Sobrien	  /* No suitable intermediate mode.
67990075Sobrien	     Generate what we need with	shifts.  */
680169689Skan	  shift_amount = build_int_cst (NULL_TREE,
681169689Skan					GET_MODE_BITSIZE (to_mode)
682169689Skan					- GET_MODE_BITSIZE (from_mode));
68352284Sobrien	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
68452284Sobrien	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
68552284Sobrien			      to, unsignedp);
68690075Sobrien	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
68752284Sobrien			      to, unsignedp);
68852284Sobrien	  if (tmp != to)
68952284Sobrien	    emit_move_insn (to, tmp);
69052284Sobrien	  return;
69118334Speter	}
69218334Speter    }
69318334Speter
69490075Sobrien  /* Support special truncate insns for certain modes.  */
695132718Skan  if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
69618334Speter    {
697132718Skan      emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
698132718Skan		      to, from, UNKNOWN);
69918334Speter      return;
70018334Speter    }
70118334Speter
70218334Speter  /* Handle truncation of volatile memrefs, and so on;
70318334Speter     the things that couldn't be truncated directly,
704132718Skan     and for which there was no special instruction.
705132718Skan
706132718Skan     ??? Code above formerly short-circuited this, for most integer
707132718Skan     mode pairs, with a force_reg in from_mode followed by a recursive
708132718Skan     call to this routine.  Appears always to have been wrong.  */
70918334Speter  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
71018334Speter    {
71118334Speter      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
71218334Speter      emit_move_insn (to, temp);
71318334Speter      return;
71418334Speter    }
71518334Speter
71618334Speter  /* Mode combination is not recognized.  */
717169689Skan  gcc_unreachable ();
71818334Speter}
71918334Speter
72018334Speter/* Return an rtx for a value that would result
72118334Speter   from converting X to mode MODE.
72218334Speter   Both X and MODE may be floating, or both integer.
72318334Speter   UNSIGNEDP is nonzero if X is an unsigned value.
72418334Speter   This can be done by referring to a part of X in place
725169689Skan   or by copying to a new temporary with conversion.  */
72618334Speter
72718334Speterrtx
728132718Skanconvert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
72918334Speter{
73018334Speter  return convert_modes (mode, VOIDmode, x, unsignedp);
73118334Speter}
73218334Speter
73318334Speter/* Return an rtx for a value that would result
73418334Speter   from converting X from mode OLDMODE to mode MODE.
73518334Speter   Both modes may be floating, or both integer.
73618334Speter   UNSIGNEDP is nonzero if X is an unsigned value.
73718334Speter
73818334Speter   This can be done by referring to a part of X in place
73918334Speter   or by copying to a new temporary with conversion.
74018334Speter
741169689Skan   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
74218334Speter
74318334Speterrtx
744132718Skanconvert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
74518334Speter{
74690075Sobrien  rtx temp;
74718334Speter
74818334Speter  /* If FROM is a SUBREG that indicates that we have already done at least
74918334Speter     the required extension, strip it.  */
75018334Speter
75118334Speter  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
75218334Speter      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
75318334Speter      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
75418334Speter    x = gen_lowpart (mode, x);
75518334Speter
75618334Speter  if (GET_MODE (x) != VOIDmode)
75718334Speter    oldmode = GET_MODE (x);
75890075Sobrien
75918334Speter  if (mode == oldmode)
76018334Speter    return x;
76118334Speter
76218334Speter  /* There is one case that we must handle specially: If we are converting
76318334Speter     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
76418334Speter     we are to interpret the constant as unsigned, gen_lowpart will do
76518334Speter     the wrong if the constant appears negative.  What we want to do is
76618334Speter     make the high-order word of the constant zero, not all ones.  */
76718334Speter
76818334Speter  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
76918334Speter      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
77018334Speter      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
77150397Sobrien    {
77250397Sobrien      HOST_WIDE_INT val = INTVAL (x);
77318334Speter
77450397Sobrien      if (oldmode != VOIDmode
77550397Sobrien	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
77650397Sobrien	{
77750397Sobrien	  int width = GET_MODE_BITSIZE (oldmode);
77850397Sobrien
77950397Sobrien	  /* We need to zero extend VAL.  */
78050397Sobrien	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
78150397Sobrien	}
78250397Sobrien
78350397Sobrien      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
78450397Sobrien    }
78550397Sobrien
78618334Speter  /* We can do this with a gen_lowpart if both desired and current modes
78718334Speter     are integer, and this is either a constant integer, a register, or a
78818334Speter     non-volatile MEM.  Except for the constant case where MODE is no
78918334Speter     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
79018334Speter
79118334Speter  if ((GET_CODE (x) == CONST_INT
79218334Speter       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
79318334Speter      || (GET_MODE_CLASS (mode) == MODE_INT
79418334Speter	  && GET_MODE_CLASS (oldmode) == MODE_INT
79518334Speter	  && (GET_CODE (x) == CONST_DOUBLE
79618334Speter	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
797169689Skan		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
79818334Speter		       && direct_load[(int) mode])
799169689Skan		      || (REG_P (x)
800117395Skan			  && (! HARD_REGISTER_P (x)
801117395Skan			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
80218334Speter			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
80318334Speter						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
80418334Speter    {
80518334Speter      /* ?? If we don't know OLDMODE, we have to assume here that
80618334Speter	 X does not need sign- or zero-extension.   This may not be
80718334Speter	 the case, but it's the best we can do.  */
80818334Speter      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
80918334Speter	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
81018334Speter	{
81118334Speter	  HOST_WIDE_INT val = INTVAL (x);
81218334Speter	  int width = GET_MODE_BITSIZE (oldmode);
81318334Speter
81418334Speter	  /* We must sign or zero-extend in this case.  Start by
81518334Speter	     zero-extending, then sign extend if we need to.  */
81618334Speter	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
81718334Speter	  if (! unsignedp
81818334Speter	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
81918334Speter	    val |= (HOST_WIDE_INT) (-1) << width;
82018334Speter
821117395Skan	  return gen_int_mode (val, mode);
82218334Speter	}
82318334Speter
82418334Speter      return gen_lowpart (mode, x);
82518334Speter    }
82618334Speter
827122180Skan  /* Converting from integer constant into mode is always equivalent to an
828122180Skan     subreg operation.  */
829122180Skan  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
830122180Skan    {
831169689Skan      gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
832122180Skan      return simplify_gen_subreg (mode, x, oldmode, 0);
833122180Skan    }
834122180Skan
83518334Speter  temp = gen_reg_rtx (mode);
83618334Speter  convert_move (temp, x, unsignedp);
83718334Speter  return temp;
83818334Speter}
83918334Speter
840117395Skan/* STORE_MAX_PIECES is the number of bytes at a time that we can
841117395Skan   store efficiently.  Due to internal GCC limitations, this is
842117395Skan   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
843117395Skan   for an immediate constant.  */
844117395Skan
845117395Skan#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846117395Skan
847132718Skan/* Determine whether the LEN bytes can be moved by using several move
848132718Skan   instructions.  Return nonzero if a call to move_by_pieces should
849132718Skan   succeed.  */
850132718Skan
851132718Skanint
852132718Skancan_move_by_pieces (unsigned HOST_WIDE_INT len,
853132718Skan		    unsigned int align ATTRIBUTE_UNUSED)
854132718Skan{
855132718Skan  return MOVE_BY_PIECES_P (len, align);
856132718Skan}
857132718Skan
85890075Sobrien/* Generate several move instructions to copy LEN bytes from block FROM to
859169689Skan   block TO.  (These are MEM rtx's with BLKmode).
86018334Speter
86190075Sobrien   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
86290075Sobrien   used to push FROM to the stack.
86390075Sobrien
864132718Skan   ALIGN is maximum stack alignment we can assume.
86590075Sobrien
866132718Skan   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
867132718Skan   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
868132718Skan   stpcpy.  */
869132718Skan
870132718Skanrtx
871132718Skanmove_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
872132718Skan		unsigned int align, int endp)
87318334Speter{
87418334Speter  struct move_by_pieces data;
87590075Sobrien  rtx to_addr, from_addr = XEXP (from, 0);
87690075Sobrien  unsigned int max_size = MOVE_MAX_PIECES + 1;
87752284Sobrien  enum machine_mode mode = VOIDmode, tmode;
87852284Sobrien  enum insn_code icode;
87918334Speter
880132718Skan  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
881132718Skan
88218334Speter  data.offset = 0;
88390075Sobrien  data.from_addr = from_addr;
88490075Sobrien  if (to)
88590075Sobrien    {
88690075Sobrien      to_addr = XEXP (to, 0);
88790075Sobrien      data.to = to;
88890075Sobrien      data.autinc_to
88990075Sobrien	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
89090075Sobrien	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
89190075Sobrien      data.reverse
89290075Sobrien	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
89390075Sobrien    }
89490075Sobrien  else
89590075Sobrien    {
89690075Sobrien      to_addr = NULL_RTX;
89790075Sobrien      data.to = NULL_RTX;
89890075Sobrien      data.autinc_to = 1;
89990075Sobrien#ifdef STACK_GROWS_DOWNWARD
90090075Sobrien      data.reverse = 1;
90190075Sobrien#else
90290075Sobrien      data.reverse = 0;
90390075Sobrien#endif
90490075Sobrien    }
90518334Speter  data.to_addr = to_addr;
90618334Speter  data.from = from;
90718334Speter  data.autinc_from
90818334Speter    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
90918334Speter       || GET_CODE (from_addr) == POST_INC
91018334Speter       || GET_CODE (from_addr) == POST_DEC);
91118334Speter
91218334Speter  data.explicit_inc_from = 0;
91318334Speter  data.explicit_inc_to = 0;
91418334Speter  if (data.reverse) data.offset = len;
91518334Speter  data.len = len;
91618334Speter
91718334Speter  /* If copying requires more than two move insns,
91818334Speter     copy addresses to registers (to make displacements shorter)
91918334Speter     and use post-increment if available.  */
92018334Speter  if (!(data.autinc_from && data.autinc_to)
921169689Skan      && move_by_pieces_ninsns (len, align, max_size) > 2)
92218334Speter    {
92390075Sobrien      /* Find the mode of the largest move...  */
92452284Sobrien      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
92552284Sobrien	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
92652284Sobrien	if (GET_MODE_SIZE (tmode) < max_size)
92752284Sobrien	  mode = tmode;
92852284Sobrien
92952284Sobrien      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
93018334Speter	{
93118334Speter	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
93218334Speter	  data.autinc_from = 1;
93318334Speter	  data.explicit_inc_from = -1;
93418334Speter	}
93552284Sobrien      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
93618334Speter	{
93718334Speter	  data.from_addr = copy_addr_to_reg (from_addr);
93818334Speter	  data.autinc_from = 1;
93918334Speter	  data.explicit_inc_from = 1;
94018334Speter	}
94118334Speter      if (!data.autinc_from && CONSTANT_P (from_addr))
94218334Speter	data.from_addr = copy_addr_to_reg (from_addr);
94352284Sobrien      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
94418334Speter	{
94518334Speter	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
94618334Speter	  data.autinc_to = 1;
94718334Speter	  data.explicit_inc_to = -1;
94818334Speter	}
94952284Sobrien      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
95018334Speter	{
95118334Speter	  data.to_addr = copy_addr_to_reg (to_addr);
95218334Speter	  data.autinc_to = 1;
95318334Speter	  data.explicit_inc_to = 1;
95418334Speter	}
95518334Speter      if (!data.autinc_to && CONSTANT_P (to_addr))
95618334Speter	data.to_addr = copy_addr_to_reg (to_addr);
95718334Speter    }
95818334Speter
959169689Skan  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
960169689Skan  if (align >= GET_MODE_ALIGNMENT (tmode))
961169689Skan    align = GET_MODE_ALIGNMENT (tmode);
962169689Skan  else
963169689Skan    {
964169689Skan      enum machine_mode xmode;
96518334Speter
966169689Skan      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
967169689Skan	   tmode != VOIDmode;
968169689Skan	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
969169689Skan	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
970169689Skan	    || SLOW_UNALIGNED_ACCESS (tmode, align))
971169689Skan	  break;
972169689Skan
973169689Skan      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
974169689Skan    }
975169689Skan
97618334Speter  /* First move what we can in the largest integer mode, then go to
97718334Speter     successively smaller modes.  */
97818334Speter
97918334Speter  while (max_size > 1)
98018334Speter    {
98118334Speter      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
98218334Speter	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
98318334Speter	if (GET_MODE_SIZE (tmode) < max_size)
98418334Speter	  mode = tmode;
98518334Speter
98618334Speter      if (mode == VOIDmode)
98718334Speter	break;
98818334Speter
98918334Speter      icode = mov_optab->handlers[(int) mode].insn_code;
99090075Sobrien      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
99118334Speter	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
99218334Speter
99318334Speter      max_size = GET_MODE_SIZE (mode);
99418334Speter    }
99518334Speter
99618334Speter  /* The code above should have handled everything.  */
997169689Skan  gcc_assert (!data.len);
998132718Skan
999132718Skan  if (endp)
1000132718Skan    {
1001132718Skan      rtx to1;
1002132718Skan
1003169689Skan      gcc_assert (!data.reverse);
1004132718Skan      if (data.autinc_to)
1005132718Skan	{
1006132718Skan	  if (endp == 2)
1007132718Skan	    {
1008132718Skan	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1009132718Skan		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1010132718Skan	      else
1011132718Skan		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1012132718Skan								-1));
1013132718Skan	    }
1014132718Skan	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1015132718Skan					   data.offset);
1016132718Skan	}
1017132718Skan      else
1018132718Skan	{
1019132718Skan	  if (endp == 2)
1020132718Skan	    --data.offset;
1021132718Skan	  to1 = adjust_address (data.to, QImode, data.offset);
1022132718Skan	}
1023132718Skan      return to1;
1024132718Skan    }
1025132718Skan  else
1026132718Skan    return data.to;
102718334Speter}
102818334Speter
102918334Speter/* Return number of insns required to move L bytes by pieces.
103090075Sobrien   ALIGN (in bits) is maximum alignment we can assume.  */
103118334Speter
103290075Sobrienstatic unsigned HOST_WIDE_INT
1033169689Skanmove_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1034169689Skan		       unsigned int max_size)
103518334Speter{
103690075Sobrien  unsigned HOST_WIDE_INT n_insns = 0;
1037169689Skan  enum machine_mode tmode;
103818334Speter
1039169689Skan  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1040169689Skan  if (align >= GET_MODE_ALIGNMENT (tmode))
1041169689Skan    align = GET_MODE_ALIGNMENT (tmode);
1042169689Skan  else
1043169689Skan    {
1044169689Skan      enum machine_mode tmode, xmode;
104518334Speter
1046169689Skan      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1047169689Skan	   tmode != VOIDmode;
1048169689Skan	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1049169689Skan	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1050169689Skan	    || SLOW_UNALIGNED_ACCESS (tmode, align))
1051169689Skan	  break;
1052169689Skan
1053169689Skan      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1054169689Skan    }
1055169689Skan
105618334Speter  while (max_size > 1)
105718334Speter    {
1058169689Skan      enum machine_mode mode = VOIDmode;
105918334Speter      enum insn_code icode;
106018334Speter
106118334Speter      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
106218334Speter	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
106318334Speter	if (GET_MODE_SIZE (tmode) < max_size)
106418334Speter	  mode = tmode;
106518334Speter
106618334Speter      if (mode == VOIDmode)
106718334Speter	break;
106818334Speter
106918334Speter      icode = mov_optab->handlers[(int) mode].insn_code;
107090075Sobrien      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
107118334Speter	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
107218334Speter
107318334Speter      max_size = GET_MODE_SIZE (mode);
107418334Speter    }
107518334Speter
1076169689Skan  gcc_assert (!l);
107718334Speter  return n_insns;
107818334Speter}
107918334Speter
108018334Speter/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
108118334Speter   with move instructions for mode MODE.  GENFUN is the gen_... function
108218334Speter   to make a move insn for that mode.  DATA has all the other info.  */
108318334Speter
108418334Speterstatic void
1085132718Skanmove_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1086132718Skan		  struct move_by_pieces *data)
108718334Speter{
108890075Sobrien  unsigned int size = GET_MODE_SIZE (mode);
108990075Sobrien  rtx to1 = NULL_RTX, from1;
109018334Speter
109118334Speter  while (data->len >= size)
109218334Speter    {
109390075Sobrien      if (data->reverse)
109490075Sobrien	data->offset -= size;
109518334Speter
109690075Sobrien      if (data->to)
109790075Sobrien	{
109890075Sobrien	  if (data->autinc_to)
109990075Sobrien	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
110090075Sobrien					     data->offset);
110190075Sobrien	  else
110290075Sobrien	    to1 = adjust_address (data->to, mode, data->offset);
110390075Sobrien	}
110450397Sobrien
110590075Sobrien      if (data->autinc_from)
110690075Sobrien	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
110790075Sobrien					   data->offset);
110890075Sobrien      else
110990075Sobrien	from1 = adjust_address (data->from, mode, data->offset);
111018334Speter
111152284Sobrien      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
111290075Sobrien	emit_insn (gen_add2_insn (data->to_addr,
111390075Sobrien				  GEN_INT (-(HOST_WIDE_INT)size)));
111452284Sobrien      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
111590075Sobrien	emit_insn (gen_add2_insn (data->from_addr,
111690075Sobrien				  GEN_INT (-(HOST_WIDE_INT)size)));
111718334Speter
111890075Sobrien      if (data->to)
111990075Sobrien	emit_insn ((*genfun) (to1, from1));
112090075Sobrien      else
112190075Sobrien	{
112290075Sobrien#ifdef PUSH_ROUNDING
112390075Sobrien	  emit_single_push_insn (mode, from1, NULL);
112490075Sobrien#else
1125169689Skan	  gcc_unreachable ();
112690075Sobrien#endif
112790075Sobrien	}
112890075Sobrien
112952284Sobrien      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
113018334Speter	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
113152284Sobrien      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
113218334Speter	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
113318334Speter
113490075Sobrien      if (! data->reverse)
113590075Sobrien	data->offset += size;
113618334Speter
113718334Speter      data->len -= size;
113818334Speter    }
113918334Speter}
114018334Speter
1141117395Skan/* Emit code to move a block Y to a block X.  This may be done with
1142117395Skan   string-move instructions, with multiple scalar move instructions,
1143117395Skan   or with a library call.
114418334Speter
1145117395Skan   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
114618334Speter   SIZE is an rtx that says how long they are.
114790075Sobrien   ALIGN is the maximum alignment we can assume they have.
1148117395Skan   METHOD describes what kind of copy this is, and what mechanisms may be used.
114918334Speter
115050397Sobrien   Return the address of the new block, if memcpy is called and returns it,
115150397Sobrien   0 otherwise.  */
115250397Sobrien
115350397Sobrienrtx
1154132718Skanemit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
115518334Speter{
1156117395Skan  bool may_use_call;
115750397Sobrien  rtx retval = 0;
1158117395Skan  unsigned int align;
115950397Sobrien
1160117395Skan  switch (method)
1161117395Skan    {
1162117395Skan    case BLOCK_OP_NORMAL:
1163169689Skan    case BLOCK_OP_TAILCALL:
1164117395Skan      may_use_call = true;
1165117395Skan      break;
1166117395Skan
1167117395Skan    case BLOCK_OP_CALL_PARM:
1168117395Skan      may_use_call = block_move_libcall_safe_for_call_parm ();
1169117395Skan
1170117395Skan      /* Make inhibit_defer_pop nonzero around the library call
1171117395Skan	 to force it to pop the arguments right away.  */
1172117395Skan      NO_DEFER_POP;
1173117395Skan      break;
1174117395Skan
1175117395Skan    case BLOCK_OP_NO_LIBCALL:
1176117395Skan      may_use_call = false;
1177117395Skan      break;
1178117395Skan
1179117395Skan    default:
1180169689Skan      gcc_unreachable ();
1181117395Skan    }
1182117395Skan
1183117395Skan  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1184117395Skan
1185169689Skan  gcc_assert (MEM_P (x));
1186169689Skan  gcc_assert (MEM_P (y));
1187169689Skan  gcc_assert (size);
118818334Speter
1189169689Skan  /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1190169689Skan     block copy is more efficient for other large modes, e.g. DCmode.  */
1191169689Skan  x = adjust_address (x, BLKmode, 0);
1192169689Skan  y = adjust_address (y, BLKmode, 0);
119318334Speter
1194117395Skan  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1195117395Skan     can be incorrect is coming from __builtin_memcpy.  */
1196117395Skan  if (GET_CODE (size) == CONST_INT)
1197117395Skan    {
1198132718Skan      if (INTVAL (size) == 0)
1199132718Skan	return 0;
1200132718Skan
1201117395Skan      x = shallow_copy_rtx (x);
1202117395Skan      y = shallow_copy_rtx (y);
1203117395Skan      set_mem_size (x, size);
1204117395Skan      set_mem_size (y, size);
1205117395Skan    }
1206117395Skan
120752284Sobrien  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1208132718Skan    move_by_pieces (x, y, INTVAL (size), align, 0);
1209169689Skan  else if (emit_block_move_via_movmem (x, y, size, align))
1210117395Skan    ;
1211117395Skan  else if (may_use_call)
1212169689Skan    retval = emit_block_move_via_libcall (x, y, size,
1213169689Skan					  method == BLOCK_OP_TAILCALL);
121418334Speter  else
1215117395Skan    emit_block_move_via_loop (x, y, size, align);
1216117395Skan
1217117395Skan  if (method == BLOCK_OP_CALL_PARM)
1218117395Skan    OK_DEFER_POP;
1219117395Skan
1220117395Skan  return retval;
1221117395Skan}
1222117395Skan
1223132718Skan/* A subroutine of emit_block_move.  Returns true if calling the
1224117395Skan   block move libcall will not clobber any parameters which may have
1225117395Skan   already been placed on the stack.  */
1226117395Skan
1227117395Skanstatic bool
1228132718Skanblock_move_libcall_safe_for_call_parm (void)
1229117395Skan{
1230132718Skan  /* If arguments are pushed on the stack, then they're safe.  */
1231117395Skan  if (PUSH_ARGS)
1232117395Skan    return true;
123318334Speter
1234132718Skan  /* If registers go on the stack anyway, any argument is sure to clobber
1235132718Skan     an outgoing argument.  */
1236132718Skan#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1237132718Skan  {
1238132718Skan    tree fn = emit_block_move_libcall_fn (false);
1239132718Skan    (void) fn;
1240132718Skan    if (REG_PARM_STACK_SPACE (fn) != 0)
1241132718Skan      return false;
1242132718Skan  }
1243132718Skan#endif
124418334Speter
1245132718Skan  /* If any argument goes in memory, then it might clobber an outgoing
1246132718Skan     argument.  */
1247132718Skan  {
1248132718Skan    CUMULATIVE_ARGS args_so_far;
1249132718Skan    tree fn, arg;
125090075Sobrien
1251132718Skan    fn = emit_block_move_libcall_fn (false);
1252132718Skan    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1253132718Skan
1254132718Skan    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1255132718Skan    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1256132718Skan      {
1257132718Skan	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1258132718Skan	rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1259132718Skan	if (!tmp || !REG_P (tmp))
1260132718Skan	  return false;
1261169689Skan	if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1262132718Skan	  return false;
1263132718Skan	FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1264132718Skan      }
1265132718Skan  }
1266132718Skan  return true;
1267117395Skan}
1268117395Skan
1269169689Skan/* A subroutine of emit_block_move.  Expand a movmem pattern;
1270117395Skan   return true if successful.  */
1271117395Skan
1272117395Skanstatic bool
1273169689Skanemit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1274117395Skan{
1275117395Skan  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1276169689Skan  int save_volatile_ok = volatile_ok;
1277117395Skan  enum machine_mode mode;
1278117395Skan
1279117395Skan  /* Since this is a move insn, we don't care about volatility.  */
1280117395Skan  volatile_ok = 1;
1281117395Skan
1282132718Skan  /* Try the most limited insn first, because there's no point
1283132718Skan     including more than one in the machine description unless
1284132718Skan     the more limited one has some advantage.  */
1285132718Skan
1286117395Skan  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1287117395Skan       mode = GET_MODE_WIDER_MODE (mode))
1288117395Skan    {
1289169689Skan      enum insn_code code = movmem_optab[(int) mode];
1290117395Skan      insn_operand_predicate_fn pred;
1291117395Skan
1292117395Skan      if (code != CODE_FOR_nothing
1293117395Skan	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1294117395Skan	     here because if SIZE is less than the mode mask, as it is
1295117395Skan	     returned by the macro, it will definitely be less than the
1296117395Skan	     actual mode mask.  */
1297117395Skan	  && ((GET_CODE (size) == CONST_INT
1298117395Skan	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1299117395Skan		   <= (GET_MODE_MASK (mode) >> 1)))
1300117395Skan	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1301117395Skan	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1302117395Skan	      || (*pred) (x, BLKmode))
1303117395Skan	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1304117395Skan	      || (*pred) (y, BLKmode))
1305117395Skan	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1306117395Skan	      || (*pred) (opalign, VOIDmode)))
130718334Speter	{
1308117395Skan	  rtx op2;
1309117395Skan	  rtx last = get_last_insn ();
1310117395Skan	  rtx pat;
131118334Speter
1312117395Skan	  op2 = convert_to_mode (mode, size, 1);
1313117395Skan	  pred = insn_data[(int) code].operand[2].predicate;
1314117395Skan	  if (pred != 0 && ! (*pred) (op2, mode))
1315117395Skan	    op2 = copy_to_mode_reg (mode, op2);
131618334Speter
1317117395Skan	  /* ??? When called via emit_block_move_for_call, it'd be
1318117395Skan	     nice if there were some way to inform the backend, so
1319117395Skan	     that it doesn't fail the expansion because it thinks
1320117395Skan	     emitting the libcall would be more efficient.  */
132118334Speter
1322117395Skan	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1323117395Skan	  if (pat)
1324117395Skan	    {
1325117395Skan	      emit_insn (pat);
1326169689Skan	      volatile_ok = save_volatile_ok;
1327117395Skan	      return true;
132818334Speter	    }
1329117395Skan	  else
1330117395Skan	    delete_insns_since (last);
133118334Speter	}
1332117395Skan    }
133318334Speter
1334169689Skan  volatile_ok = save_volatile_ok;
1335117395Skan  return false;
1336117395Skan}
133790075Sobrien
1338169689Skan/* A subroutine of emit_block_move.  Expand a call to memcpy.
1339117395Skan   Return the return value from memcpy, 0 otherwise.  */
134052284Sobrien
1341117395Skanstatic rtx
1342169689Skanemit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1343117395Skan{
1344132718Skan  rtx dst_addr, src_addr;
1345117395Skan  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1346117395Skan  enum machine_mode size_mode;
1347117395Skan  rtx retval;
134852284Sobrien
1349169689Skan  /* Emit code to copy the addresses of DST and SRC and SIZE into new
1350169689Skan     pseudos.  We can then place those new pseudos into a VAR_DECL and
1351169689Skan     use them later.  */
135252284Sobrien
1353132718Skan  dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1354132718Skan  src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
135550397Sobrien
1356132718Skan  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1357132718Skan  src_addr = convert_memory_address (ptr_mode, src_addr);
1358132718Skan
1359132718Skan  dst_tree = make_tree (ptr_type_node, dst_addr);
1360132718Skan  src_tree = make_tree (ptr_type_node, src_addr);
1361132718Skan
1362169689Skan  size_mode = TYPE_MODE (sizetype);
1363132718Skan
1364117395Skan  size = convert_to_mode (size_mode, size, 1);
1365117395Skan  size = copy_to_mode_reg (size_mode, size);
136650397Sobrien
1367117395Skan  /* It is incorrect to use the libcall calling conventions to call
1368117395Skan     memcpy in this context.  This could be a user call to memcpy and
1369117395Skan     the user may wish to examine the return value from memcpy.  For
1370117395Skan     targets where libcalls and normal calls have different conventions
1371169689Skan     for returning pointers, we could end up generating incorrect code.  */
1372117395Skan
1373169689Skan  size_tree = make_tree (sizetype, size);
1374117395Skan
1375117395Skan  fn = emit_block_move_libcall_fn (true);
1376117395Skan  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1377169689Skan  arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1378169689Skan  arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1379117395Skan
1380117395Skan  /* Now we have to build up the CALL_EXPR itself.  */
1381117395Skan  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1382169689Skan  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1383169689Skan		      call_expr, arg_list, NULL_TREE);
1384169689Skan  CALL_EXPR_TAILCALL (call_expr) = tailcall;
1385117395Skan
1386169689Skan  retval = expand_normal (call_expr);
1387117395Skan
1388169689Skan  return retval;
1389117395Skan}
1390117395Skan
1391117395Skan/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1392117395Skan   for the function we use for block copies.  The first time FOR_CALL
1393117395Skan   is true, we call assemble_external.  */
1394117395Skan
1395117395Skanstatic GTY(()) tree block_move_fn;
1396117395Skan
1397132718Skanvoid
1398132718Skaninit_block_move_fn (const char *asmspec)
1399117395Skan{
1400132718Skan  if (!block_move_fn)
1401132718Skan    {
1402132718Skan      tree args, fn;
1403117395Skan
1404169689Skan      fn = get_identifier ("memcpy");
1405169689Skan      args = build_function_type_list (ptr_type_node, ptr_type_node,
1406169689Skan				       const_ptr_type_node, sizetype,
1407169689Skan				       NULL_TREE);
140850397Sobrien
1409117395Skan      fn = build_decl (FUNCTION_DECL, fn, args);
1410117395Skan      DECL_EXTERNAL (fn) = 1;
1411117395Skan      TREE_PUBLIC (fn) = 1;
1412117395Skan      DECL_ARTIFICIAL (fn) = 1;
1413117395Skan      TREE_NOTHROW (fn) = 1;
1414169689Skan      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1415169689Skan      DECL_VISIBILITY_SPECIFIED (fn) = 1;
141650397Sobrien
1417117395Skan      block_move_fn = fn;
1418117395Skan    }
141950397Sobrien
1420132718Skan  if (asmspec)
1421169689Skan    set_user_assembler_name (block_move_fn, asmspec);
1422132718Skan}
1423132718Skan
1424132718Skanstatic tree
1425132718Skanemit_block_move_libcall_fn (int for_call)
1426132718Skan{
1427132718Skan  static bool emitted_extern;
1428132718Skan
1429132718Skan  if (!block_move_fn)
1430132718Skan    init_block_move_fn (NULL);
1431132718Skan
1432117395Skan  if (for_call && !emitted_extern)
1433117395Skan    {
1434117395Skan      emitted_extern = true;
1435169689Skan      make_decl_rtl (block_move_fn);
1436132718Skan      assemble_external (block_move_fn);
1437117395Skan    }
143850397Sobrien
1439132718Skan  return block_move_fn;
1440117395Skan}
144190075Sobrien
1442117395Skan/* A subroutine of emit_block_move.  Copy the data via an explicit
1443117395Skan   loop.  This is used only when libcalls are forbidden.  */
1444117395Skan/* ??? It'd be nice to copy in hunks larger than QImode.  */
144550397Sobrien
1446117395Skanstatic void
1447132718Skanemit_block_move_via_loop (rtx x, rtx y, rtx size,
1448132718Skan			  unsigned int align ATTRIBUTE_UNUSED)
1449117395Skan{
1450117395Skan  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1451117395Skan  enum machine_mode iter_mode;
1452117395Skan
1453117395Skan  iter_mode = GET_MODE (size);
1454117395Skan  if (iter_mode == VOIDmode)
1455117395Skan    iter_mode = word_mode;
1456117395Skan
1457117395Skan  top_label = gen_label_rtx ();
1458117395Skan  cmp_label = gen_label_rtx ();
1459117395Skan  iter = gen_reg_rtx (iter_mode);
1460117395Skan
1461117395Skan  emit_move_insn (iter, const0_rtx);
1462117395Skan
1463117395Skan  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1464117395Skan  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1465117395Skan  do_pending_stack_adjust ();
1466117395Skan
1467117395Skan  emit_jump (cmp_label);
1468117395Skan  emit_label (top_label);
1469117395Skan
1470117395Skan  tmp = convert_modes (Pmode, iter_mode, iter, true);
1471117395Skan  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1472117395Skan  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1473117395Skan  x = change_address (x, QImode, x_addr);
1474117395Skan  y = change_address (y, QImode, y_addr);
1475117395Skan
1476117395Skan  emit_move_insn (x, y);
1477117395Skan
1478117395Skan  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1479117395Skan			     true, OPTAB_LIB_WIDEN);
1480117395Skan  if (tmp != iter)
1481117395Skan    emit_move_insn (iter, tmp);
1482117395Skan
1483117395Skan  emit_label (cmp_label);
1484117395Skan
1485117395Skan  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1486117395Skan			   true, top_label);
148718334Speter}
148818334Speter
148918334Speter/* Copy all or part of a value X into registers starting at REGNO.
149018334Speter   The number of registers to be filled is NREGS.  */
149118334Speter
149218334Spetervoid
1493132718Skanmove_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
149418334Speter{
149518334Speter  int i;
149650397Sobrien#ifdef HAVE_load_multiple
149790075Sobrien  rtx pat;
149850397Sobrien  rtx last;
149950397Sobrien#endif
150018334Speter
150118334Speter  if (nregs == 0)
150218334Speter    return;
150318334Speter
150418334Speter  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
150518334Speter    x = validize_mem (force_const_mem (mode, x));
150618334Speter
150718334Speter  /* See if the machine can do this with a load multiple insn.  */
150818334Speter#ifdef HAVE_load_multiple
150918334Speter  if (HAVE_load_multiple)
151018334Speter    {
151118334Speter      last = get_last_insn ();
151250397Sobrien      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
151318334Speter			       GEN_INT (nregs));
151418334Speter      if (pat)
151518334Speter	{
151618334Speter	  emit_insn (pat);
151718334Speter	  return;
151818334Speter	}
151918334Speter      else
152018334Speter	delete_insns_since (last);
152118334Speter    }
152218334Speter#endif
152318334Speter
152418334Speter  for (i = 0; i < nregs; i++)
152550397Sobrien    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
152618334Speter		    operand_subword_force (x, i, mode));
152718334Speter}
152818334Speter
152918334Speter/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1530132718Skan   The number of registers to be filled is NREGS.  */
153118334Speter
153218334Spetervoid
1533132718Skanmove_block_from_reg (int regno, rtx x, int nregs)
153418334Speter{
153518334Speter  int i;
153618334Speter
153790075Sobrien  if (nregs == 0)
153890075Sobrien    return;
153990075Sobrien
154018334Speter  /* See if the machine can do this with a store multiple insn.  */
154118334Speter#ifdef HAVE_store_multiple
154218334Speter  if (HAVE_store_multiple)
154318334Speter    {
1544132718Skan      rtx last = get_last_insn ();
1545132718Skan      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1546132718Skan				    GEN_INT (nregs));
154718334Speter      if (pat)
154818334Speter	{
154918334Speter	  emit_insn (pat);
155018334Speter	  return;
155118334Speter	}
155218334Speter      else
155318334Speter	delete_insns_since (last);
155418334Speter    }
155518334Speter#endif
155618334Speter
155718334Speter  for (i = 0; i < nregs; i++)
155818334Speter    {
155918334Speter      rtx tem = operand_subword (x, i, 1, BLKmode);
156018334Speter
1561169689Skan      gcc_assert (tem);
156218334Speter
156350397Sobrien      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
156418334Speter    }
156518334Speter}
156618334Speter
1567117395Skan/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1568117395Skan   ORIG, where ORIG is a non-consecutive group of registers represented by
1569117395Skan   a PARALLEL.  The clone is identical to the original except in that the
1570117395Skan   original set of registers is replaced by a new set of pseudo registers.
1571117395Skan   The new set has the same modes as the original set.  */
1572117395Skan
1573117395Skanrtx
1574132718Skangen_group_rtx (rtx orig)
1575117395Skan{
1576117395Skan  int i, length;
1577117395Skan  rtx *tmps;
1578117395Skan
1579169689Skan  gcc_assert (GET_CODE (orig) == PARALLEL);
1580117395Skan
1581117395Skan  length = XVECLEN (orig, 0);
1582132718Skan  tmps = alloca (sizeof (rtx) * length);
1583117395Skan
1584117395Skan  /* Skip a NULL entry in first slot.  */
1585117395Skan  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1586117395Skan
1587117395Skan  if (i)
1588117395Skan    tmps[0] = 0;
1589117395Skan
1590117395Skan  for (; i < length; i++)
1591117395Skan    {
1592117395Skan      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1593117395Skan      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1594117395Skan
1595117395Skan      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1596117395Skan    }
1597117395Skan
1598117395Skan  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1599117395Skan}
1600117395Skan
1601169689Skan/* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1602169689Skan   except that values are placed in TMPS[i], and must later be moved
1603169689Skan   into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
160450397Sobrien
1605169689Skanstatic void
1606169689Skanemit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
160750397Sobrien{
1608169689Skan  rtx src;
160950397Sobrien  int start, i;
1610169689Skan  enum machine_mode m = GET_MODE (orig_src);
161150397Sobrien
1612169689Skan  gcc_assert (GET_CODE (dst) == PARALLEL);
161350397Sobrien
1614169689Skan  if (m != VOIDmode
1615169689Skan      && !SCALAR_INT_MODE_P (m)
1616169689Skan      && !MEM_P (orig_src)
1617169689Skan      && GET_CODE (orig_src) != CONCAT)
1618169689Skan    {
1619169689Skan      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1620169689Skan      if (imode == BLKmode)
1621169689Skan	src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1622169689Skan      else
1623169689Skan	src = gen_reg_rtx (imode);
1624169689Skan      if (imode != BLKmode)
1625169689Skan	src = gen_lowpart (GET_MODE (orig_src), src);
1626169689Skan      emit_move_insn (src, orig_src);
1627169689Skan      /* ...and back again.  */
1628169689Skan      if (imode != BLKmode)
1629169689Skan	src = gen_lowpart (imode, src);
1630169689Skan      emit_group_load_1 (tmps, dst, src, type, ssize);
1631169689Skan      return;
1632169689Skan    }
1633169689Skan
163450397Sobrien  /* Check for a NULL entry, used to indicate that the parameter goes
163550397Sobrien     both on the stack and in registers.  */
163650397Sobrien  if (XEXP (XVECEXP (dst, 0, 0), 0))
163750397Sobrien    start = 0;
163850397Sobrien  else
163950397Sobrien    start = 1;
164050397Sobrien
164150397Sobrien  /* Process the pieces.  */
164250397Sobrien  for (i = start; i < XVECLEN (dst, 0); i++)
164350397Sobrien    {
164450397Sobrien      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
164590075Sobrien      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
164690075Sobrien      unsigned int bytelen = GET_MODE_SIZE (mode);
164750397Sobrien      int shift = 0;
164850397Sobrien
164950397Sobrien      /* Handle trailing fragments that run over the size of the struct.  */
165090075Sobrien      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
165150397Sobrien	{
1652132718Skan	  /* Arrange to shift the fragment to where it belongs.
1653132718Skan	     extract_bit_field loads to the lsb of the reg.  */
1654132718Skan	  if (
1655132718Skan#ifdef BLOCK_REG_PADDING
1656132718Skan	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1657132718Skan	      == (BYTES_BIG_ENDIAN ? upward : downward)
1658132718Skan#else
1659132718Skan	      BYTES_BIG_ENDIAN
1660132718Skan#endif
1661132718Skan	      )
1662132718Skan	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
166350397Sobrien	  bytelen = ssize - bytepos;
1664169689Skan	  gcc_assert (bytelen > 0);
166550397Sobrien	}
166650397Sobrien
166790075Sobrien      /* If we won't be loading directly from memory, protect the real source
166890075Sobrien	 from strange tricks we might play; but make sure that the source can
166990075Sobrien	 be loaded directly into the destination.  */
167090075Sobrien      src = orig_src;
1671169689Skan      if (!MEM_P (orig_src)
167290075Sobrien	  && (!CONSTANT_P (orig_src)
167390075Sobrien	      || (GET_MODE (orig_src) != mode
167490075Sobrien		  && GET_MODE (orig_src) != VOIDmode)))
167590075Sobrien	{
167690075Sobrien	  if (GET_MODE (orig_src) == VOIDmode)
167790075Sobrien	    src = gen_reg_rtx (mode);
167890075Sobrien	  else
167990075Sobrien	    src = gen_reg_rtx (GET_MODE (orig_src));
168090075Sobrien
168190075Sobrien	  emit_move_insn (src, orig_src);
168290075Sobrien	}
168390075Sobrien
168450397Sobrien      /* Optimize the access just a bit.  */
1685169689Skan      if (MEM_P (src)
1686132718Skan	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1687132718Skan	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
168890075Sobrien	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
168950397Sobrien	  && bytelen == GET_MODE_SIZE (mode))
169050397Sobrien	{
169150397Sobrien	  tmps[i] = gen_reg_rtx (mode);
169290075Sobrien	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
169350397Sobrien	}
1694169689Skan      else if (COMPLEX_MODE_P (mode)
1695169689Skan	       && GET_MODE (src) == mode
1696169689Skan	       && bytelen == GET_MODE_SIZE (mode))
1697169689Skan	/* Let emit_move_complex do the bulk of the work.  */
1698169689Skan	tmps[i] = src;
169990075Sobrien      else if (GET_CODE (src) == CONCAT)
170050397Sobrien	{
1701117395Skan	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1702117395Skan	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1703117395Skan
1704117395Skan	  if ((bytepos == 0 && bytelen == slen0)
1705117395Skan	      || (bytepos != 0 && bytepos + bytelen <= slen))
170696263Sobrien	    {
1707117395Skan	      /* The following assumes that the concatenated objects all
1708117395Skan		 have the same size.  In this case, a simple calculation
1709117395Skan		 can be used to determine the object and the bit field
1710117395Skan		 to be extracted.  */
1711117395Skan	      tmps[i] = XEXP (src, bytepos / slen0);
171296263Sobrien	      if (! CONSTANT_P (tmps[i])
1713169689Skan		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
171496263Sobrien		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1715117395Skan					     (bytepos % slen0) * BITS_PER_UNIT,
1716169689Skan					     1, NULL_RTX, mode, mode);
171796263Sobrien	    }
1718169689Skan	  else
171990075Sobrien	    {
1720169689Skan	      rtx mem;
1721169689Skan
1722169689Skan	      gcc_assert (!bytepos);
1723169689Skan	      mem = assign_stack_temp (GET_MODE (src), slen, 0);
172490075Sobrien	      emit_move_insn (mem, src);
1725169689Skan	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1726169689Skan					   0, 1, NULL_RTX, mode, mode);
172790075Sobrien	    }
172850397Sobrien	}
1729132718Skan      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1730132718Skan	 SIMD register, which is currently broken.  While we get GCC
1731132718Skan	 to emit proper RTL for these cases, let's dump to memory.  */
1732132718Skan      else if (VECTOR_MODE_P (GET_MODE (dst))
1733169689Skan	       && REG_P (src))
1734132718Skan	{
1735132718Skan	  int slen = GET_MODE_SIZE (GET_MODE (src));
1736132718Skan	  rtx mem;
1737132718Skan
1738132718Skan	  mem = assign_stack_temp (GET_MODE (src), slen, 0);
1739132718Skan	  emit_move_insn (mem, src);
1740132718Skan	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1741132718Skan	}
1742132718Skan      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1743169689Skan               && XVECLEN (dst, 0) > 1)
1744169689Skan        tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
174590075Sobrien      else if (CONSTANT_P (src)
1746169689Skan	       || (REG_P (src) && GET_MODE (src) == mode))
174790075Sobrien	tmps[i] = src;
174890075Sobrien      else
174990075Sobrien	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
175090075Sobrien				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1751169689Skan				     mode, mode);
175250397Sobrien
1753132718Skan      if (shift)
1754169689Skan	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1755169689Skan				build_int_cst (NULL_TREE, shift), tmps[i], 0);
175650397Sobrien    }
1757169689Skan}
175850397Sobrien
1759169689Skan/* Emit code to move a block SRC of type TYPE to a block DST,
1760169689Skan   where DST is non-consecutive registers represented by a PARALLEL.
1761169689Skan   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1762169689Skan   if not known.  */
176390075Sobrien
1764169689Skanvoid
1765169689Skanemit_group_load (rtx dst, rtx src, tree type, int ssize)
1766169689Skan{
1767169689Skan  rtx *tmps;
1768169689Skan  int i;
1769169689Skan
1770169689Skan  tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1771169689Skan  emit_group_load_1 (tmps, dst, src, type, ssize);
1772169689Skan
177350397Sobrien  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1774169689Skan  for (i = 0; i < XVECLEN (dst, 0); i++)
1775169689Skan    {
1776169689Skan      rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1777169689Skan      if (d == NULL)
1778169689Skan	continue;
1779169689Skan      emit_move_insn (d, tmps[i]);
1780169689Skan    }
178150397Sobrien}
178250397Sobrien
1783169689Skan/* Similar, but load SRC into new pseudos in a format that looks like
1784169689Skan   PARALLEL.  This can later be fed to emit_group_move to get things
1785169689Skan   in the right place.  */
1786169689Skan
1787169689Skanrtx
1788169689Skanemit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1789169689Skan{
1790169689Skan  rtvec vec;
1791169689Skan  int i;
1792169689Skan
1793169689Skan  vec = rtvec_alloc (XVECLEN (parallel, 0));
1794169689Skan  emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1795169689Skan
1796169689Skan  /* Convert the vector to look just like the original PARALLEL, except
1797169689Skan     with the computed values.  */
1798169689Skan  for (i = 0; i < XVECLEN (parallel, 0); i++)
1799169689Skan    {
1800169689Skan      rtx e = XVECEXP (parallel, 0, i);
1801169689Skan      rtx d = XEXP (e, 0);
1802169689Skan
1803169689Skan      if (d)
1804169689Skan	{
1805169689Skan	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1806169689Skan	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1807169689Skan	}
1808169689Skan      RTVEC_ELT (vec, i) = e;
1809169689Skan    }
1810169689Skan
1811169689Skan  return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1812169689Skan}
1813169689Skan
1814117395Skan/* Emit code to move a block SRC to block DST, where SRC and DST are
1815117395Skan   non-consecutive groups of registers, each represented by a PARALLEL.  */
1816117395Skan
1817117395Skanvoid
1818132718Skanemit_group_move (rtx dst, rtx src)
1819117395Skan{
1820117395Skan  int i;
1821117395Skan
1822169689Skan  gcc_assert (GET_CODE (src) == PARALLEL
1823169689Skan	      && GET_CODE (dst) == PARALLEL
1824169689Skan	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1825117395Skan
1826117395Skan  /* Skip first entry if NULL.  */
1827117395Skan  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1828117395Skan    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1829117395Skan		    XEXP (XVECEXP (src, 0, i), 0));
1830117395Skan}
1831117395Skan
1832169689Skan/* Move a group of registers represented by a PARALLEL into pseudos.  */
1833169689Skan
1834169689Skanrtx
1835169689Skanemit_group_move_into_temps (rtx src)
1836169689Skan{
1837169689Skan  rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1838169689Skan  int i;
1839169689Skan
1840169689Skan  for (i = 0; i < XVECLEN (src, 0); i++)
1841169689Skan    {
1842169689Skan      rtx e = XVECEXP (src, 0, i);
1843169689Skan      rtx d = XEXP (e, 0);
1844169689Skan
1845169689Skan      if (d)
1846169689Skan	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1847169689Skan      RTVEC_ELT (vec, i) = e;
1848169689Skan    }
1849169689Skan
1850169689Skan  return gen_rtx_PARALLEL (GET_MODE (src), vec);
1851169689Skan}
1852169689Skan
1853132718Skan/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1854132718Skan   where SRC is non-consecutive registers represented by a PARALLEL.
1855132718Skan   SSIZE represents the total size of block ORIG_DST, or -1 if not
1856132718Skan   known.  */
185750397Sobrien
185850397Sobrienvoid
1859132718Skanemit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
186050397Sobrien{
186150397Sobrien  rtx *tmps, dst;
1862169689Skan  int start, finish, i;
1863169689Skan  enum machine_mode m = GET_MODE (orig_dst);
186450397Sobrien
1865169689Skan  gcc_assert (GET_CODE (src) == PARALLEL);
186650397Sobrien
1867169689Skan  if (!SCALAR_INT_MODE_P (m)
1868169689Skan      && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1869169689Skan    {
1870169689Skan      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1871169689Skan      if (imode == BLKmode)
1872169689Skan        dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1873169689Skan      else
1874169689Skan        dst = gen_reg_rtx (imode);
1875169689Skan      emit_group_store (dst, src, type, ssize);
1876169689Skan      if (imode != BLKmode)
1877169689Skan        dst = gen_lowpart (GET_MODE (orig_dst), dst);
1878169689Skan      emit_move_insn (orig_dst, dst);
1879169689Skan      return;
1880169689Skan    }
1881169689Skan
188250397Sobrien  /* Check for a NULL entry, used to indicate that the parameter goes
188350397Sobrien     both on the stack and in registers.  */
188450397Sobrien  if (XEXP (XVECEXP (src, 0, 0), 0))
188550397Sobrien    start = 0;
188650397Sobrien  else
188750397Sobrien    start = 1;
1888169689Skan  finish = XVECLEN (src, 0);
188950397Sobrien
1890169689Skan  tmps = alloca (sizeof (rtx) * finish);
189150397Sobrien
189250397Sobrien  /* Copy the (probable) hard regs into pseudos.  */
1893169689Skan  for (i = start; i < finish; i++)
189450397Sobrien    {
189550397Sobrien      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1896169689Skan      if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1897169689Skan	{
1898169689Skan	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1899169689Skan	  emit_move_insn (tmps[i], reg);
1900169689Skan	}
1901169689Skan      else
1902169689Skan	tmps[i] = reg;
190350397Sobrien    }
190450397Sobrien
190550397Sobrien  /* If we won't be storing directly into memory, protect the real destination
190650397Sobrien     from strange tricks we might play.  */
190750397Sobrien  dst = orig_dst;
190852284Sobrien  if (GET_CODE (dst) == PARALLEL)
190950397Sobrien    {
191052284Sobrien      rtx temp;
191152284Sobrien
191252284Sobrien      /* We can get a PARALLEL dst if there is a conditional expression in
191352284Sobrien	 a return statement.  In that case, the dst and src are the same,
191452284Sobrien	 so no action is necessary.  */
191552284Sobrien      if (rtx_equal_p (dst, src))
191652284Sobrien	return;
191752284Sobrien
191852284Sobrien      /* It is unclear if we can ever reach here, but we may as well handle
191952284Sobrien	 it.  Allocate a temporary, and split this into a store/load to/from
192052284Sobrien	 the temporary.  */
192152284Sobrien
192252284Sobrien      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1923132718Skan      emit_group_store (temp, src, type, ssize);
1924132718Skan      emit_group_load (dst, temp, type, ssize);
192552284Sobrien      return;
192652284Sobrien    }
1927169689Skan  else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
192852284Sobrien    {
1929169689Skan      enum machine_mode outer = GET_MODE (dst);
1930169689Skan      enum machine_mode inner;
1931169689Skan      HOST_WIDE_INT bytepos;
1932169689Skan      bool done = false;
1933169689Skan      rtx temp;
1934169689Skan
1935169689Skan      if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1936169689Skan	dst = gen_reg_rtx (outer);
1937169689Skan
193850397Sobrien      /* Make life a bit easier for combine.  */
1939169689Skan      /* If the first element of the vector is the low part
1940169689Skan	 of the destination mode, use a paradoxical subreg to
1941169689Skan	 initialize the destination.  */
1942169689Skan      if (start < finish)
1943169689Skan	{
1944169689Skan	  inner = GET_MODE (tmps[start]);
1945169689Skan	  bytepos = subreg_lowpart_offset (inner, outer);
1946169689Skan	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1947169689Skan	    {
1948169689Skan	      temp = simplify_gen_subreg (outer, tmps[start],
1949169689Skan					  inner, 0);
1950169689Skan	      if (temp)
1951169689Skan		{
1952169689Skan		  emit_move_insn (dst, temp);
1953169689Skan		  done = true;
1954169689Skan		  start++;
1955169689Skan		}
1956169689Skan	    }
1957169689Skan	}
1958169689Skan
1959169689Skan      /* If the first element wasn't the low part, try the last.  */
1960169689Skan      if (!done
1961169689Skan	  && start < finish - 1)
1962169689Skan	{
1963169689Skan	  inner = GET_MODE (tmps[finish - 1]);
1964169689Skan	  bytepos = subreg_lowpart_offset (inner, outer);
1965169689Skan	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1966169689Skan	    {
1967169689Skan	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
1968169689Skan					  inner, 0);
1969169689Skan	      if (temp)
1970169689Skan		{
1971169689Skan		  emit_move_insn (dst, temp);
1972169689Skan		  done = true;
1973169689Skan		  finish--;
1974169689Skan		}
1975169689Skan	    }
1976169689Skan	}
1977169689Skan
1978169689Skan      /* Otherwise, simply initialize the result to zero.  */
1979169689Skan      if (!done)
1980169689Skan        emit_move_insn (dst, CONST0_RTX (outer));
198150397Sobrien    }
198250397Sobrien
198350397Sobrien  /* Process the pieces.  */
1984169689Skan  for (i = start; i < finish; i++)
198550397Sobrien    {
198690075Sobrien      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
198750397Sobrien      enum machine_mode mode = GET_MODE (tmps[i]);
198890075Sobrien      unsigned int bytelen = GET_MODE_SIZE (mode);
198996263Sobrien      rtx dest = dst;
199050397Sobrien
199150397Sobrien      /* Handle trailing fragments that run over the size of the struct.  */
199290075Sobrien      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
199350397Sobrien	{
1994132718Skan	  /* store_bit_field always takes its value from the lsb.
1995132718Skan	     Move the fragment to the lsb if it's not already there.  */
1996132718Skan	  if (
1997132718Skan#ifdef BLOCK_REG_PADDING
1998132718Skan	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1999132718Skan	      == (BYTES_BIG_ENDIAN ? upward : downward)
2000132718Skan#else
2001132718Skan	      BYTES_BIG_ENDIAN
2002132718Skan#endif
2003132718Skan	      )
200450397Sobrien	    {
200550397Sobrien	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2006169689Skan	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2007169689Skan				      build_int_cst (NULL_TREE, shift),
2008169689Skan				      tmps[i], 0);
200950397Sobrien	    }
201050397Sobrien	  bytelen = ssize - bytepos;
201150397Sobrien	}
201250397Sobrien
201396263Sobrien      if (GET_CODE (dst) == CONCAT)
201496263Sobrien	{
201596263Sobrien	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
201696263Sobrien	    dest = XEXP (dst, 0);
201796263Sobrien	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
201896263Sobrien	    {
201996263Sobrien	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
202096263Sobrien	      dest = XEXP (dst, 1);
202196263Sobrien	    }
2022169689Skan	  else
2023117395Skan	    {
2024169689Skan	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2025117395Skan	      dest = assign_stack_temp (GET_MODE (dest),
2026117395Skan				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2027117395Skan	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2028117395Skan			      tmps[i]);
2029117395Skan	      dst = dest;
2030117395Skan	      break;
2031117395Skan	    }
203296263Sobrien	}
203396263Sobrien
203450397Sobrien      /* Optimize the access just a bit.  */
2035169689Skan      if (MEM_P (dest)
2036132718Skan	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2037132718Skan	      || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
203890075Sobrien	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
203950397Sobrien	  && bytelen == GET_MODE_SIZE (mode))
204096263Sobrien	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
204150397Sobrien      else
204296263Sobrien	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2043169689Skan			 mode, tmps[i]);
204450397Sobrien    }
204550397Sobrien
204650397Sobrien  /* Copy from the pseudo into the (probable) hard reg.  */
2047117395Skan  if (orig_dst != dst)
204850397Sobrien    emit_move_insn (orig_dst, dst);
204950397Sobrien}
205050397Sobrien
205152284Sobrien/* Generate code to copy a BLKmode object of TYPE out of a
205252284Sobrien   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
205352284Sobrien   is null, a stack temporary is created.  TGTBLK is returned.
205452284Sobrien
2055132718Skan   The purpose of this routine is to handle functions that return
2056132718Skan   BLKmode structures in registers.  Some machines (the PA for example)
2057132718Skan   want to return all small structures in registers regardless of the
2058132718Skan   structure's alignment.  */
205952284Sobrien
206052284Sobrienrtx
2061132718Skancopy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
206252284Sobrien{
206390075Sobrien  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
206490075Sobrien  rtx src = NULL, dst = NULL;
206590075Sobrien  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2066132718Skan  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
206752284Sobrien
206890075Sobrien  if (tgtblk == 0)
206990075Sobrien    {
207090075Sobrien      tgtblk = assign_temp (build_qualified_type (type,
207190075Sobrien						  (TYPE_QUALS (type)
207290075Sobrien						   | TYPE_QUAL_CONST)),
207390075Sobrien			    0, 1, 1);
207490075Sobrien      preserve_temp_slots (tgtblk);
207590075Sobrien    }
207652284Sobrien
207790075Sobrien  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2078117395Skan     into a new pseudo which is a full word.  */
207952284Sobrien
208090075Sobrien  if (GET_MODE (srcreg) != BLKmode
208190075Sobrien      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2082169689Skan    srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
208352284Sobrien
2084132718Skan  /* If the structure doesn't take up a whole number of words, see whether
2085132718Skan     SRCREG is padded on the left or on the right.  If it's on the left,
2086132718Skan     set PADDING_CORRECTION to the number of bits to skip.
2087132718Skan
2088132718Skan     In most ABIs, the structure will be returned at the least end of
2089132718Skan     the register, which translates to right padding on little-endian
2090132718Skan     targets and left padding on big-endian targets.  The opposite
2091132718Skan     holds if the structure is returned at the most significant
2092132718Skan     end of the register.  */
2093132718Skan  if (bytes % UNITS_PER_WORD != 0
2094132718Skan      && (targetm.calls.return_in_msb (type)
2095132718Skan	  ? !BYTES_BIG_ENDIAN
2096132718Skan	  : BYTES_BIG_ENDIAN))
2097132718Skan    padding_correction
209890075Sobrien      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
209952284Sobrien
210090075Sobrien  /* Copy the structure BITSIZE bites at a time.
210190075Sobrien
210290075Sobrien     We could probably emit more efficient code for machines which do not use
210390075Sobrien     strict alignment, but it doesn't seem worth the effort at the current
210490075Sobrien     time.  */
2105132718Skan  for (bitpos = 0, xbitpos = padding_correction;
210690075Sobrien       bitpos < bytes * BITS_PER_UNIT;
210790075Sobrien       bitpos += bitsize, xbitpos += bitsize)
210890075Sobrien    {
210990075Sobrien      /* We need a new source operand each time xbitpos is on a
2110132718Skan	 word boundary and when xbitpos == padding_correction
211190075Sobrien	 (the first time through).  */
211290075Sobrien      if (xbitpos % BITS_PER_WORD == 0
2113132718Skan	  || xbitpos == padding_correction)
211490075Sobrien	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
211590075Sobrien				     GET_MODE (srcreg));
211690075Sobrien
211790075Sobrien      /* We need a new destination operand each time bitpos is on
211890075Sobrien	 a word boundary.  */
211990075Sobrien      if (bitpos % BITS_PER_WORD == 0)
212090075Sobrien	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
212190075Sobrien
212290075Sobrien      /* Use xbitpos for the source extraction (right justified) and
212390075Sobrien	 xbitpos for the destination store (left justified).  */
212490075Sobrien      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
212590075Sobrien		       extract_bit_field (src, bitsize,
212690075Sobrien					  xbitpos % BITS_PER_WORD, 1,
2127169689Skan					  NULL_RTX, word_mode, word_mode));
212890075Sobrien    }
212990075Sobrien
213090075Sobrien  return tgtblk;
213152284Sobrien}
213252284Sobrien
213318334Speter/* Add a USE expression for REG to the (possibly empty) list pointed
213418334Speter   to by CALL_FUSAGE.  REG must denote a hard register.  */
213518334Speter
213618334Spetervoid
2137132718Skanuse_reg (rtx *call_fusage, rtx reg)
213818334Speter{
2139169689Skan  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2140259563Spfg
214118334Speter  *call_fusage
214250397Sobrien    = gen_rtx_EXPR_LIST (VOIDmode,
214350397Sobrien			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
214418334Speter}
214518334Speter
214618334Speter/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
214718334Speter   starting at REGNO.  All of these registers must be hard registers.  */
214818334Speter
214918334Spetervoid
2150132718Skanuse_regs (rtx *call_fusage, int regno, int nregs)
215118334Speter{
215218334Speter  int i;
215318334Speter
2154169689Skan  gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
215518334Speter
215618334Speter  for (i = 0; i < nregs; i++)
2157117395Skan    use_reg (call_fusage, regno_reg_rtx[regno + i]);
215818334Speter}
215950397Sobrien
216050397Sobrien/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
216150397Sobrien   PARALLEL REGS.  This is for calls that pass values in multiple
216250397Sobrien   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
216350397Sobrien
216450397Sobrienvoid
2165132718Skanuse_group_regs (rtx *call_fusage, rtx regs)
216650397Sobrien{
216750397Sobrien  int i;
216850397Sobrien
216950397Sobrien  for (i = 0; i < XVECLEN (regs, 0); i++)
217050397Sobrien    {
217150397Sobrien      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
217250397Sobrien
217350397Sobrien      /* A NULL entry means the parameter goes both on the stack and in
217450397Sobrien	 registers.  This can also be a MEM for targets that pass values
217550397Sobrien	 partially on the stack and partially in registers.  */
2176169689Skan      if (reg != 0 && REG_P (reg))
217750397Sobrien	use_reg (call_fusage, reg);
217850397Sobrien    }
217950397Sobrien}
218018334Speter
218150397Sobrien
2182117395Skan/* Determine whether the LEN bytes generated by CONSTFUN can be
2183117395Skan   stored to memory using several move instructions.  CONSTFUNDATA is
2184117395Skan   a pointer which will be passed as argument in every CONSTFUN call.
2185117395Skan   ALIGN is maximum alignment we can assume.  Return nonzero if a
2186117395Skan   call to store_by_pieces should succeed.  */
2187117395Skan
218890075Sobrienint
2189132718Skancan_store_by_pieces (unsigned HOST_WIDE_INT len,
2190132718Skan		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2191132718Skan		     void *constfundata, unsigned int align)
219290075Sobrien{
2193169689Skan  unsigned HOST_WIDE_INT l;
2194169689Skan  unsigned int max_size;
219590075Sobrien  HOST_WIDE_INT offset = 0;
219690075Sobrien  enum machine_mode mode, tmode;
219790075Sobrien  enum insn_code icode;
219890075Sobrien  int reverse;
219990075Sobrien  rtx cst;
220090075Sobrien
2201119256Skan  if (len == 0)
2202119256Skan    return 1;
2203119256Skan
2204132718Skan  if (! STORE_BY_PIECES_P (len, align))
220590075Sobrien    return 0;
220690075Sobrien
2207169689Skan  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2208169689Skan  if (align >= GET_MODE_ALIGNMENT (tmode))
2209169689Skan    align = GET_MODE_ALIGNMENT (tmode);
2210169689Skan  else
2211169689Skan    {
2212169689Skan      enum machine_mode xmode;
221390075Sobrien
2214169689Skan      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2215169689Skan	   tmode != VOIDmode;
2216169689Skan	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2217169689Skan	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2218169689Skan	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2219169689Skan	  break;
2220169689Skan
2221169689Skan      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2222169689Skan    }
2223169689Skan
222490075Sobrien  /* We would first store what we can in the largest integer mode, then go to
222590075Sobrien     successively smaller modes.  */
222690075Sobrien
222790075Sobrien  for (reverse = 0;
222890075Sobrien       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
222990075Sobrien       reverse++)
223090075Sobrien    {
223190075Sobrien      l = len;
223290075Sobrien      mode = VOIDmode;
2233117395Skan      max_size = STORE_MAX_PIECES + 1;
223490075Sobrien      while (max_size > 1)
223590075Sobrien	{
223690075Sobrien	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
223790075Sobrien	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
223890075Sobrien	    if (GET_MODE_SIZE (tmode) < max_size)
223990075Sobrien	      mode = tmode;
224090075Sobrien
224190075Sobrien	  if (mode == VOIDmode)
224290075Sobrien	    break;
224390075Sobrien
224490075Sobrien	  icode = mov_optab->handlers[(int) mode].insn_code;
224590075Sobrien	  if (icode != CODE_FOR_nothing
224690075Sobrien	      && align >= GET_MODE_ALIGNMENT (mode))
224790075Sobrien	    {
224890075Sobrien	      unsigned int size = GET_MODE_SIZE (mode);
224990075Sobrien
225090075Sobrien	      while (l >= size)
225190075Sobrien		{
225290075Sobrien		  if (reverse)
225390075Sobrien		    offset -= size;
225490075Sobrien
225590075Sobrien		  cst = (*constfun) (constfundata, offset, mode);
225690075Sobrien		  if (!LEGITIMATE_CONSTANT_P (cst))
225790075Sobrien		    return 0;
225890075Sobrien
225990075Sobrien		  if (!reverse)
226090075Sobrien		    offset += size;
226190075Sobrien
226290075Sobrien		  l -= size;
226390075Sobrien		}
226490075Sobrien	    }
226590075Sobrien
226690075Sobrien	  max_size = GET_MODE_SIZE (mode);
226790075Sobrien	}
226890075Sobrien
226990075Sobrien      /* The code above should have handled everything.  */
2270169689Skan      gcc_assert (!l);
227190075Sobrien    }
227290075Sobrien
227390075Sobrien  return 1;
227490075Sobrien}
227590075Sobrien
227690075Sobrien/* Generate several move instructions to store LEN bytes generated by
227790075Sobrien   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
227890075Sobrien   pointer which will be passed as argument in every CONSTFUN call.
2279132718Skan   ALIGN is maximum alignment we can assume.
2280132718Skan   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2281132718Skan   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2282132718Skan   stpcpy.  */
228390075Sobrien
2284132718Skanrtx
2285132718Skanstore_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2286132718Skan		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2287132718Skan		 void *constfundata, unsigned int align, int endp)
228890075Sobrien{
228990075Sobrien  struct store_by_pieces data;
229090075Sobrien
2291119256Skan  if (len == 0)
2292132718Skan    {
2293169689Skan      gcc_assert (endp != 2);
2294132718Skan      return to;
2295132718Skan    }
2296119256Skan
2297169689Skan  gcc_assert (STORE_BY_PIECES_P (len, align));
229890075Sobrien  data.constfun = constfun;
229990075Sobrien  data.constfundata = constfundata;
230090075Sobrien  data.len = len;
230190075Sobrien  data.to = to;
230290075Sobrien  store_by_pieces_1 (&data, align);
2303132718Skan  if (endp)
2304132718Skan    {
2305132718Skan      rtx to1;
2306132718Skan
2307169689Skan      gcc_assert (!data.reverse);
2308132718Skan      if (data.autinc_to)
2309132718Skan	{
2310132718Skan	  if (endp == 2)
2311132718Skan	    {
2312132718Skan	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2313132718Skan		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2314132718Skan	      else
2315132718Skan		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2316132718Skan								-1));
2317132718Skan	    }
2318132718Skan	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2319132718Skan					   data.offset);
2320132718Skan	}
2321132718Skan      else
2322132718Skan	{
2323132718Skan	  if (endp == 2)
2324132718Skan	    --data.offset;
2325132718Skan	  to1 = adjust_address (data.to, QImode, data.offset);
2326132718Skan	}
2327132718Skan      return to1;
2328132718Skan    }
2329132718Skan  else
2330132718Skan    return data.to;
233190075Sobrien}
233290075Sobrien
233390075Sobrien/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2334169689Skan   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
233590075Sobrien
233650397Sobrienstatic void
2337132718Skanclear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
233850397Sobrien{
233990075Sobrien  struct store_by_pieces data;
234090075Sobrien
2341119256Skan  if (len == 0)
2342119256Skan    return;
2343119256Skan
234490075Sobrien  data.constfun = clear_by_pieces_1;
234590075Sobrien  data.constfundata = NULL;
234690075Sobrien  data.len = len;
234790075Sobrien  data.to = to;
234890075Sobrien  store_by_pieces_1 (&data, align);
234990075Sobrien}
235090075Sobrien
235190075Sobrien/* Callback routine for clear_by_pieces.
235290075Sobrien   Return const0_rtx unconditionally.  */
235390075Sobrien
235490075Sobrienstatic rtx
2355132718Skanclear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2356132718Skan		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2357132718Skan		   enum machine_mode mode ATTRIBUTE_UNUSED)
235890075Sobrien{
235990075Sobrien  return const0_rtx;
236090075Sobrien}
236190075Sobrien
236290075Sobrien/* Subroutine of clear_by_pieces and store_by_pieces.
236390075Sobrien   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2364169689Skan   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
236590075Sobrien
236690075Sobrienstatic void
2367132718Skanstore_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2368132718Skan		   unsigned int align ATTRIBUTE_UNUSED)
236990075Sobrien{
237090075Sobrien  rtx to_addr = XEXP (data->to, 0);
2371169689Skan  unsigned int max_size = STORE_MAX_PIECES + 1;
237252284Sobrien  enum machine_mode mode = VOIDmode, tmode;
237352284Sobrien  enum insn_code icode;
237450397Sobrien
237590075Sobrien  data->offset = 0;
237690075Sobrien  data->to_addr = to_addr;
237790075Sobrien  data->autinc_to
237850397Sobrien    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
237950397Sobrien       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
238050397Sobrien
238190075Sobrien  data->explicit_inc_to = 0;
238290075Sobrien  data->reverse
238350397Sobrien    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
238490075Sobrien  if (data->reverse)
238590075Sobrien    data->offset = data->len;
238650397Sobrien
238790075Sobrien  /* If storing requires more than two move insns,
238850397Sobrien     copy addresses to registers (to make displacements shorter)
238950397Sobrien     and use post-increment if available.  */
239090075Sobrien  if (!data->autinc_to
2391169689Skan      && move_by_pieces_ninsns (data->len, align, max_size) > 2)
239250397Sobrien    {
239390075Sobrien      /* Determine the main mode we'll be using.  */
239452284Sobrien      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
239552284Sobrien	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
239652284Sobrien	if (GET_MODE_SIZE (tmode) < max_size)
239752284Sobrien	  mode = tmode;
239852284Sobrien
239990075Sobrien      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
240050397Sobrien	{
240190075Sobrien	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
240290075Sobrien	  data->autinc_to = 1;
240390075Sobrien	  data->explicit_inc_to = -1;
240450397Sobrien	}
240590075Sobrien
240690075Sobrien      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
240790075Sobrien	  && ! data->autinc_to)
240850397Sobrien	{
240990075Sobrien	  data->to_addr = copy_addr_to_reg (to_addr);
241090075Sobrien	  data->autinc_to = 1;
241190075Sobrien	  data->explicit_inc_to = 1;
241250397Sobrien	}
241390075Sobrien
241490075Sobrien      if ( !data->autinc_to && CONSTANT_P (to_addr))
241590075Sobrien	data->to_addr = copy_addr_to_reg (to_addr);
241650397Sobrien    }
241750397Sobrien
2418169689Skan  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2419169689Skan  if (align >= GET_MODE_ALIGNMENT (tmode))
2420169689Skan    align = GET_MODE_ALIGNMENT (tmode);
2421169689Skan  else
2422169689Skan    {
2423169689Skan      enum machine_mode xmode;
242450397Sobrien
2425169689Skan      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2426169689Skan	   tmode != VOIDmode;
2427169689Skan	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2428169689Skan	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2429169689Skan	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2430169689Skan	  break;
2431169689Skan
2432169689Skan      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2433169689Skan    }
2434169689Skan
243590075Sobrien  /* First store what we can in the largest integer mode, then go to
243650397Sobrien     successively smaller modes.  */
243750397Sobrien
243850397Sobrien  while (max_size > 1)
243950397Sobrien    {
244050397Sobrien      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
244150397Sobrien	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
244250397Sobrien	if (GET_MODE_SIZE (tmode) < max_size)
244350397Sobrien	  mode = tmode;
244450397Sobrien
244550397Sobrien      if (mode == VOIDmode)
244650397Sobrien	break;
244750397Sobrien
244850397Sobrien      icode = mov_optab->handlers[(int) mode].insn_code;
244990075Sobrien      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
245090075Sobrien	store_by_pieces_2 (GEN_FCN (icode), mode, data);
245150397Sobrien
245250397Sobrien      max_size = GET_MODE_SIZE (mode);
245350397Sobrien    }
245450397Sobrien
245550397Sobrien  /* The code above should have handled everything.  */
2456169689Skan  gcc_assert (!data->len);
245750397Sobrien}
245850397Sobrien
245990075Sobrien/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
246050397Sobrien   with move instructions for mode MODE.  GENFUN is the gen_... function
246150397Sobrien   to make a move insn for that mode.  DATA has all the other info.  */
246250397Sobrien
246350397Sobrienstatic void
2464132718Skanstore_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2465132718Skan		   struct store_by_pieces *data)
246650397Sobrien{
246790075Sobrien  unsigned int size = GET_MODE_SIZE (mode);
246890075Sobrien  rtx to1, cst;
246950397Sobrien
247050397Sobrien  while (data->len >= size)
247150397Sobrien    {
247290075Sobrien      if (data->reverse)
247390075Sobrien	data->offset -= size;
247450397Sobrien
247590075Sobrien      if (data->autinc_to)
247690075Sobrien	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
247790075Sobrien					 data->offset);
247890075Sobrien      else
247990075Sobrien	to1 = adjust_address (data->to, mode, data->offset);
248050397Sobrien
248152284Sobrien      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
248290075Sobrien	emit_insn (gen_add2_insn (data->to_addr,
248390075Sobrien				  GEN_INT (-(HOST_WIDE_INT) size)));
248450397Sobrien
248590075Sobrien      cst = (*data->constfun) (data->constfundata, data->offset, mode);
248690075Sobrien      emit_insn ((*genfun) (to1, cst));
248790075Sobrien
248852284Sobrien      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
248950397Sobrien	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
249050397Sobrien
249190075Sobrien      if (! data->reverse)
249290075Sobrien	data->offset += size;
249350397Sobrien
249450397Sobrien      data->len -= size;
249550397Sobrien    }
249650397Sobrien}
249750397Sobrien
249890075Sobrien/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
249990075Sobrien   its length in bytes.  */
250018334Speter
250150397Sobrienrtx
2502169689Skanclear_storage (rtx object, rtx size, enum block_op_methods method)
250318334Speter{
2504169689Skan  enum machine_mode mode = GET_MODE (object);
2505169689Skan  unsigned int align;
250650397Sobrien
2507169689Skan  gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2508169689Skan
250990075Sobrien  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
251090075Sobrien     just move a zero.  Otherwise, do this a piece at a time.  */
2511169689Skan  if (mode != BLKmode
251290075Sobrien      && GET_CODE (size) == CONST_INT
2513169689Skan      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
251418334Speter    {
2515169689Skan      rtx zero = CONST0_RTX (mode);
2516169689Skan      if (zero != NULL)
2517169689Skan	{
2518169689Skan	  emit_move_insn (object, zero);
2519169689Skan	  return NULL;
2520169689Skan	}
252150397Sobrien
2522169689Skan      if (COMPLEX_MODE_P (mode))
252350397Sobrien	{
2524169689Skan	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2525169689Skan	  if (zero != NULL)
252650397Sobrien	    {
2527169689Skan	      write_complex_part (object, zero, 0);
2528169689Skan	      write_complex_part (object, zero, 1);
2529169689Skan	      return NULL;
2530117395Skan	    }
2531117395Skan	}
2532117395Skan    }
253350397Sobrien
2534169689Skan  if (size == const0_rtx)
2535169689Skan    return NULL;
2536169689Skan
2537169689Skan  align = MEM_ALIGN (object);
2538169689Skan
2539169689Skan  if (GET_CODE (size) == CONST_INT
2540169689Skan      && CLEAR_BY_PIECES_P (INTVAL (size), align))
2541169689Skan    clear_by_pieces (object, INTVAL (size), align);
2542169689Skan  else if (set_storage_via_setmem (object, size, const0_rtx, align))
2543169689Skan    ;
2544169689Skan  else
2545169689Skan    return clear_storage_via_libcall (object, size,
2546169689Skan				      method == BLOCK_OP_TAILCALL);
2547169689Skan
2548169689Skan  return NULL;
2549117395Skan}
255050397Sobrien
2551169689Skan/* A subroutine of clear_storage.  Expand a call to memset.
2552117395Skan   Return the return value of memset, 0 otherwise.  */
255350397Sobrien
2554117395Skanstatic rtx
2555169689Skanclear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2556117395Skan{
2557117395Skan  tree call_expr, arg_list, fn, object_tree, size_tree;
2558117395Skan  enum machine_mode size_mode;
2559117395Skan  rtx retval;
256050397Sobrien
2561169689Skan  /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2562169689Skan     place those into new pseudos into a VAR_DECL and use them later.  */
256350397Sobrien
2564117395Skan  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
256550397Sobrien
2566169689Skan  size_mode = TYPE_MODE (sizetype);
2567117395Skan  size = convert_to_mode (size_mode, size, 1);
2568117395Skan  size = copy_to_mode_reg (size_mode, size);
256950397Sobrien
2570117395Skan  /* It is incorrect to use the libcall calling conventions to call
2571117395Skan     memset in this context.  This could be a user call to memset and
2572117395Skan     the user may wish to examine the return value from memset.  For
2573117395Skan     targets where libcalls and normal calls have different conventions
2574169689Skan     for returning pointers, we could end up generating incorrect code.  */
257550397Sobrien
2576117395Skan  object_tree = make_tree (ptr_type_node, object);
2577169689Skan  size_tree = make_tree (sizetype, size);
257850397Sobrien
2579117395Skan  fn = clear_storage_libcall_fn (true);
2580117395Skan  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2581169689Skan  arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2582117395Skan  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
258350397Sobrien
2584117395Skan  /* Now we have to build up the CALL_EXPR itself.  */
2585117395Skan  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2586169689Skan  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2587169689Skan		      call_expr, arg_list, NULL_TREE);
2588169689Skan  CALL_EXPR_TAILCALL (call_expr) = tailcall;
258950397Sobrien
2590169689Skan  retval = expand_normal (call_expr);
259152284Sobrien
2592169689Skan  return retval;
2593117395Skan}
259490075Sobrien
2595117395Skan/* A subroutine of clear_storage_via_libcall.  Create the tree node
2596117395Skan   for the function we use for block clears.  The first time FOR_CALL
2597117395Skan   is true, we call assemble_external.  */
2598117395Skan
2599117395Skanstatic GTY(()) tree block_clear_fn;
2600117395Skan
2601132718Skanvoid
2602132718Skaninit_block_clear_fn (const char *asmspec)
2603117395Skan{
2604132718Skan  if (!block_clear_fn)
2605132718Skan    {
2606132718Skan      tree fn, args;
2607117395Skan
2608169689Skan      fn = get_identifier ("memset");
2609169689Skan      args = build_function_type_list (ptr_type_node, ptr_type_node,
2610169689Skan				       integer_type_node, sizetype,
2611169689Skan				       NULL_TREE);
2612117395Skan
2613117395Skan      fn = build_decl (FUNCTION_DECL, fn, args);
2614117395Skan      DECL_EXTERNAL (fn) = 1;
2615117395Skan      TREE_PUBLIC (fn) = 1;
2616117395Skan      DECL_ARTIFICIAL (fn) = 1;
2617117395Skan      TREE_NOTHROW (fn) = 1;
2618169689Skan      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2619169689Skan      DECL_VISIBILITY_SPECIFIED (fn) = 1;
2620117395Skan
2621117395Skan      block_clear_fn = fn;
262218334Speter    }
262350397Sobrien
2624132718Skan  if (asmspec)
2625169689Skan    set_user_assembler_name (block_clear_fn, asmspec);
2626132718Skan}
2627132718Skan
2628132718Skanstatic tree
2629132718Skanclear_storage_libcall_fn (int for_call)
2630132718Skan{
2631132718Skan  static bool emitted_extern;
2632132718Skan
2633132718Skan  if (!block_clear_fn)
2634132718Skan    init_block_clear_fn (NULL);
2635132718Skan
2636117395Skan  if (for_call && !emitted_extern)
2637117395Skan    {
2638117395Skan      emitted_extern = true;
2639169689Skan      make_decl_rtl (block_clear_fn);
2640132718Skan      assemble_external (block_clear_fn);
2641117395Skan    }
2642117395Skan
2643132718Skan  return block_clear_fn;
264418334Speter}
2645117395Skan
2646169689Skan/* Expand a setmem pattern; return true if successful.  */
264718334Speter
2648169689Skanbool
2649169689Skanset_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2650169689Skan{
2651169689Skan  /* Try the most limited insn first, because there's no point
2652169689Skan     including more than one in the machine description unless
2653169689Skan     the more limited one has some advantage.  */
265418334Speter
2655169689Skan  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2656169689Skan  enum machine_mode mode;
2657169689Skan
2658169689Skan  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2659169689Skan       mode = GET_MODE_WIDER_MODE (mode))
2660169689Skan    {
2661169689Skan      enum insn_code code = setmem_optab[(int) mode];
2662169689Skan      insn_operand_predicate_fn pred;
2663169689Skan
2664169689Skan      if (code != CODE_FOR_nothing
2665169689Skan	  /* We don't need MODE to be narrower than
2666169689Skan	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2667169689Skan	     the mode mask, as it is returned by the macro, it will
2668169689Skan	     definitely be less than the actual mode mask.  */
2669169689Skan	  && ((GET_CODE (size) == CONST_INT
2670169689Skan	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2671169689Skan		   <= (GET_MODE_MASK (mode) >> 1)))
2672169689Skan	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2673169689Skan	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2674169689Skan	      || (*pred) (object, BLKmode))
2675169689Skan	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2676169689Skan	      || (*pred) (opalign, VOIDmode)))
2677169689Skan	{
2678169689Skan	  rtx opsize, opchar;
2679169689Skan	  enum machine_mode char_mode;
2680169689Skan	  rtx last = get_last_insn ();
2681169689Skan	  rtx pat;
2682169689Skan
2683169689Skan	  opsize = convert_to_mode (mode, size, 1);
2684169689Skan	  pred = insn_data[(int) code].operand[1].predicate;
2685169689Skan	  if (pred != 0 && ! (*pred) (opsize, mode))
2686169689Skan	    opsize = copy_to_mode_reg (mode, opsize);
2687259563Spfg
2688169689Skan	  opchar = val;
2689169689Skan	  char_mode = insn_data[(int) code].operand[2].mode;
2690169689Skan	  if (char_mode != VOIDmode)
2691169689Skan	    {
2692169689Skan	      opchar = convert_to_mode (char_mode, opchar, 1);
2693169689Skan	      pred = insn_data[(int) code].operand[2].predicate;
2694169689Skan	      if (pred != 0 && ! (*pred) (opchar, char_mode))
2695169689Skan		opchar = copy_to_mode_reg (char_mode, opchar);
2696169689Skan	    }
2697169689Skan
2698169689Skan	  pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2699169689Skan	  if (pat)
2700169689Skan	    {
2701169689Skan	      emit_insn (pat);
2702169689Skan	      return true;
2703169689Skan	    }
2704169689Skan	  else
2705169689Skan	    delete_insns_since (last);
2706169689Skan	}
2707169689Skan    }
2708169689Skan
2709169689Skan  return false;
2710169689Skan}
2711169689Skan
2712169689Skan
2713169689Skan/* Write to one of the components of the complex value CPLX.  Write VAL to
2714169689Skan   the real part if IMAG_P is false, and the imaginary part if its true.  */
2715169689Skan
2716169689Skanstatic void
2717169689Skanwrite_complex_part (rtx cplx, rtx val, bool imag_p)
271818334Speter{
2719169689Skan  enum machine_mode cmode;
2720169689Skan  enum machine_mode imode;
2721169689Skan  unsigned ibitsize;
272218334Speter
2723169689Skan  if (GET_CODE (cplx) == CONCAT)
2724169689Skan    {
2725169689Skan      emit_move_insn (XEXP (cplx, imag_p), val);
2726169689Skan      return;
2727169689Skan    }
272818334Speter
2729169689Skan  cmode = GET_MODE (cplx);
2730169689Skan  imode = GET_MODE_INNER (cmode);
2731169689Skan  ibitsize = GET_MODE_BITSIZE (imode);
273218334Speter
2733169689Skan  /* For MEMs simplify_gen_subreg may generate an invalid new address
2734169689Skan     because, e.g., the original address is considered mode-dependent
2735169689Skan     by the target, which restricts simplify_subreg from invoking
2736169689Skan     adjust_address_nv.  Instead of preparing fallback support for an
2737169689Skan     invalid address, we call adjust_address_nv directly.  */
2738169689Skan  if (MEM_P (cplx))
273990075Sobrien    {
2740169689Skan      emit_move_insn (adjust_address_nv (cplx, imode,
2741169689Skan					 imag_p ? GET_MODE_SIZE (imode) : 0),
2742169689Skan		      val);
2743169689Skan      return;
2744169689Skan    }
2745117395Skan
2746169689Skan  /* If the sub-object is at least word sized, then we know that subregging
2747169689Skan     will work.  This special case is important, since store_bit_field
2748169689Skan     wants to operate on integer modes, and there's rarely an OImode to
2749169689Skan     correspond to TCmode.  */
2750169689Skan  if (ibitsize >= BITS_PER_WORD
2751169689Skan      /* For hard regs we have exact predicates.  Assume we can split
2752169689Skan	 the original object if it spans an even number of hard regs.
2753169689Skan	 This special case is important for SCmode on 64-bit platforms
2754169689Skan	 where the natural size of floating-point regs is 32-bit.  */
2755169689Skan      || (REG_P (cplx)
2756169689Skan	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2757169689Skan	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2758169689Skan    {
2759169689Skan      rtx part = simplify_gen_subreg (imode, cplx, cmode,
2760169689Skan				      imag_p ? GET_MODE_SIZE (imode) : 0);
2761169689Skan      if (part)
2762169689Skan        {
2763169689Skan	  emit_move_insn (part, val);
2764169689Skan	  return;
2765169689Skan	}
2766169689Skan      else
2767169689Skan	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2768169689Skan	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2769169689Skan    }
2770132718Skan
2771169689Skan  store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2772169689Skan}
2773169689Skan
2774169689Skan/* Extract one of the components of the complex value CPLX.  Extract the
2775169689Skan   real part if IMAG_P is false, and the imaginary part if it's true.  */
2776169689Skan
2777169689Skanstatic rtx
2778169689Skanread_complex_part (rtx cplx, bool imag_p)
2779169689Skan{
2780169689Skan  enum machine_mode cmode, imode;
2781169689Skan  unsigned ibitsize;
2782169689Skan
2783169689Skan  if (GET_CODE (cplx) == CONCAT)
2784169689Skan    return XEXP (cplx, imag_p);
2785169689Skan
2786169689Skan  cmode = GET_MODE (cplx);
2787169689Skan  imode = GET_MODE_INNER (cmode);
2788169689Skan  ibitsize = GET_MODE_BITSIZE (imode);
2789169689Skan
2790169689Skan  /* Special case reads from complex constants that got spilled to memory.  */
2791169689Skan  if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2792169689Skan    {
2793169689Skan      tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2794169689Skan      if (decl && TREE_CODE (decl) == COMPLEX_CST)
2795117395Skan	{
2796169689Skan	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2797169689Skan	  if (CONSTANT_CLASS_P (part))
2798169689Skan	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2799169689Skan	}
2800169689Skan    }
2801117395Skan
2802169689Skan  /* For MEMs simplify_gen_subreg may generate an invalid new address
2803169689Skan     because, e.g., the original address is considered mode-dependent
2804169689Skan     by the target, which restricts simplify_subreg from invoking
2805169689Skan     adjust_address_nv.  Instead of preparing fallback support for an
2806169689Skan     invalid address, we call adjust_address_nv directly.  */
2807169689Skan  if (MEM_P (cplx))
2808169689Skan    return adjust_address_nv (cplx, imode,
2809169689Skan			      imag_p ? GET_MODE_SIZE (imode) : 0);
2810169689Skan
2811169689Skan  /* If the sub-object is at least word sized, then we know that subregging
2812169689Skan     will work.  This special case is important, since extract_bit_field
2813169689Skan     wants to operate on integer modes, and there's rarely an OImode to
2814169689Skan     correspond to TCmode.  */
2815169689Skan  if (ibitsize >= BITS_PER_WORD
2816169689Skan      /* For hard regs we have exact predicates.  Assume we can split
2817169689Skan	 the original object if it spans an even number of hard regs.
2818169689Skan	 This special case is important for SCmode on 64-bit platforms
2819169689Skan	 where the natural size of floating-point regs is 32-bit.  */
2820169689Skan      || (REG_P (cplx)
2821169689Skan	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2822169689Skan	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2823169689Skan    {
2824169689Skan      rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2825169689Skan				     imag_p ? GET_MODE_SIZE (imode) : 0);
2826169689Skan      if (ret)
2827169689Skan        return ret;
2828169689Skan      else
2829169689Skan	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2830169689Skan	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2831169689Skan    }
2832169689Skan
2833169689Skan  return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2834169689Skan			    true, NULL_RTX, imode, imode);
2835169689Skan}
2836169689Skan
2837169689Skan/* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2838169689Skan   NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2839169689Skan   represented in NEW_MODE.  If FORCE is true, this will never happen, as
2840169689Skan   we'll force-create a SUBREG if needed.  */
2841169689Skan
2842169689Skanstatic rtx
2843169689Skanemit_move_change_mode (enum machine_mode new_mode,
2844169689Skan		       enum machine_mode old_mode, rtx x, bool force)
2845169689Skan{
2846169689Skan  rtx ret;
2847169689Skan
2848169689Skan  if (MEM_P (x))
2849169689Skan    {
2850169689Skan      /* We don't have to worry about changing the address since the
2851169689Skan	 size in bytes is supposed to be the same.  */
2852169689Skan      if (reload_in_progress)
2853169689Skan	{
2854169689Skan	  /* Copy the MEM to change the mode and move any
2855169689Skan	     substitutions from the old MEM to the new one.  */
2856169689Skan	  ret = adjust_address_nv (x, new_mode, 0);
2857169689Skan	  copy_replacements (x, ret);
2858117395Skan	}
2859169689Skan      else
2860169689Skan	ret = adjust_address (x, new_mode, 0);
286190075Sobrien    }
2862169689Skan  else
2863169689Skan    {
2864169689Skan      /* Note that we do want simplify_subreg's behavior of validating
2865169689Skan	 that the new mode is ok for a hard register.  If we were to use
2866169689Skan	 simplify_gen_subreg, we would create the subreg, but would
2867169689Skan	 probably run into the target not being able to implement it.  */
2868169689Skan      /* Except, of course, when FORCE is true, when this is exactly what
2869169689Skan	 we want.  Which is needed for CCmodes on some targets.  */
2870169689Skan      if (force)
2871169689Skan	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2872169689Skan      else
2873169689Skan	ret = simplify_subreg (new_mode, x, old_mode, 0);
2874169689Skan    }
287518334Speter
2876169689Skan  return ret;
2877169689Skan}
287818334Speter
2879169689Skan/* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
2880169689Skan   an integer mode of the same size as MODE.  Returns the instruction
2881169689Skan   emitted, or NULL if such a move could not be generated.  */
288218334Speter
2883169689Skanstatic rtx
2884169689Skanemit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2885169689Skan{
2886169689Skan  enum machine_mode imode;
2887169689Skan  enum insn_code code;
288818334Speter
2889169689Skan  /* There must exist a mode of the exact size we require.  */
2890169689Skan  imode = int_mode_for_mode (mode);
2891169689Skan  if (imode == BLKmode)
2892169689Skan    return NULL_RTX;
289390075Sobrien
2894169689Skan  /* The target must support moves in this mode.  */
2895169689Skan  code = mov_optab->handlers[imode].insn_code;
2896169689Skan  if (code == CODE_FOR_nothing)
2897169689Skan    return NULL_RTX;
289890075Sobrien
2899169689Skan  x = emit_move_change_mode (imode, mode, x, force);
2900169689Skan  if (x == NULL_RTX)
2901169689Skan    return NULL_RTX;
2902169689Skan  y = emit_move_change_mode (imode, mode, y, force);
2903169689Skan  if (y == NULL_RTX)
2904169689Skan    return NULL_RTX;
2905169689Skan  return emit_insn (GEN_FCN (code) (x, y));
290618334Speter}
290718334Speter
2908169689Skan/* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
2909169689Skan   Return an equivalent MEM that does not use an auto-increment.  */
291018334Speter
2911169689Skanstatic rtx
2912169689Skanemit_move_resolve_push (enum machine_mode mode, rtx x)
291318334Speter{
2914169689Skan  enum rtx_code code = GET_CODE (XEXP (x, 0));
2915169689Skan  HOST_WIDE_INT adjust;
2916169689Skan  rtx temp;
291718334Speter
2918169689Skan  adjust = GET_MODE_SIZE (mode);
2919169689Skan#ifdef PUSH_ROUNDING
2920169689Skan  adjust = PUSH_ROUNDING (adjust);
2921169689Skan#endif
2922169689Skan  if (code == PRE_DEC || code == POST_DEC)
2923169689Skan    adjust = -adjust;
2924169689Skan  else if (code == PRE_MODIFY || code == POST_MODIFY)
2925169689Skan    {
2926169689Skan      rtx expr = XEXP (XEXP (x, 0), 1);
2927169689Skan      HOST_WIDE_INT val;
292852284Sobrien
2929169689Skan      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2930169689Skan      gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2931169689Skan      val = INTVAL (XEXP (expr, 1));
2932169689Skan      if (GET_CODE (expr) == MINUS)
2933169689Skan	val = -val;
2934169689Skan      gcc_assert (adjust == val || adjust == -val);
2935169689Skan      adjust = val;
2936169689Skan    }
293718334Speter
2938169689Skan  /* Do not use anti_adjust_stack, since we don't want to update
2939169689Skan     stack_pointer_delta.  */
2940169689Skan  temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2941169689Skan			      GEN_INT (adjust), stack_pointer_rtx,
2942169689Skan			      0, OPTAB_LIB_WIDEN);
2943169689Skan  if (temp != stack_pointer_rtx)
2944169689Skan    emit_move_insn (stack_pointer_rtx, temp);
2945169689Skan
2946169689Skan  switch (code)
294718334Speter    {
2948169689Skan    case PRE_INC:
2949169689Skan    case PRE_DEC:
2950169689Skan    case PRE_MODIFY:
2951169689Skan      temp = stack_pointer_rtx;
2952169689Skan      break;
2953169689Skan    case POST_INC:
2954169689Skan    case POST_DEC:
2955169689Skan    case POST_MODIFY:
2956169689Skan      temp = plus_constant (stack_pointer_rtx, -adjust);
2957169689Skan      break;
2958169689Skan    default:
2959169689Skan      gcc_unreachable ();
2960169689Skan    }
296118334Speter
2962169689Skan  return replace_equiv_address (x, temp);
2963169689Skan}
2964169689Skan
2965169689Skan/* A subroutine of emit_move_complex.  Generate a move from Y into X.
2966169689Skan   X is known to satisfy push_operand, and MODE is known to be complex.
2967169689Skan   Returns the last instruction emitted.  */
2968169689Skan
2969169689Skanstatic rtx
2970169689Skanemit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2971169689Skan{
2972169689Skan  enum machine_mode submode = GET_MODE_INNER (mode);
2973169689Skan  bool imag_first;
2974169689Skan
297590075Sobrien#ifdef PUSH_ROUNDING
2976169689Skan  unsigned int submodesize = GET_MODE_SIZE (submode);
297790075Sobrien
2978169689Skan  /* In case we output to the stack, but the size is smaller than the
2979169689Skan     machine can push exactly, we need to use move instructions.  */
2980169689Skan  if (PUSH_ROUNDING (submodesize) != submodesize)
2981169689Skan    {
2982169689Skan      x = emit_move_resolve_push (mode, x);
2983169689Skan      return emit_move_insn (x, y);
2984169689Skan    }
298590075Sobrien#endif
298690075Sobrien
2987169689Skan  /* Note that the real part always precedes the imag part in memory
2988169689Skan     regardless of machine's endianness.  */
2989169689Skan  switch (GET_CODE (XEXP (x, 0)))
2990169689Skan    {
2991169689Skan    case PRE_DEC:
2992169689Skan    case POST_DEC:
2993169689Skan      imag_first = true;
2994169689Skan      break;
2995169689Skan    case PRE_INC:
2996169689Skan    case POST_INC:
2997169689Skan      imag_first = false;
2998169689Skan      break;
2999169689Skan    default:
3000169689Skan      gcc_unreachable ();
3001169689Skan    }
300290075Sobrien
3003169689Skan  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004169689Skan		  read_complex_part (y, imag_first));
3005169689Skan  return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3006169689Skan			 read_complex_part (y, !imag_first));
3007169689Skan}
300890075Sobrien
3009169689Skan/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3010169689Skan   MODE is known to be complex.  Returns the last instruction emitted.  */
3011169689Skan
3012169689Skanstatic rtx
3013169689Skanemit_move_complex (enum machine_mode mode, rtx x, rtx y)
3014169689Skan{
3015169689Skan  bool try_int;
3016169689Skan
3017169689Skan  /* Need to take special care for pushes, to maintain proper ordering
3018169689Skan     of the data, and possibly extra padding.  */
3019169689Skan  if (push_operand (x, mode))
3020169689Skan    return emit_move_complex_push (mode, x, y);
3021169689Skan
3022169689Skan  /* See if we can coerce the target into moving both values at once.  */
3023169689Skan
3024169689Skan  /* Move floating point as parts.  */
3025169689Skan  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3026169689Skan      && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3027169689Skan    try_int = false;
3028169689Skan  /* Not possible if the values are inherently not adjacent.  */
3029169689Skan  else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3030169689Skan    try_int = false;
3031169689Skan  /* Is possible if both are registers (or subregs of registers).  */
3032169689Skan  else if (register_operand (x, mode) && register_operand (y, mode))
3033169689Skan    try_int = true;
3034169689Skan  /* If one of the operands is a memory, and alignment constraints
3035169689Skan     are friendly enough, we may be able to do combined memory operations.
3036169689Skan     We do not attempt this if Y is a constant because that combination is
3037169689Skan     usually better with the by-parts thing below.  */
3038169689Skan  else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3039169689Skan	   && (!STRICT_ALIGNMENT
3040169689Skan	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3041169689Skan    try_int = true;
3042169689Skan  else
3043169689Skan    try_int = false;
3044169689Skan
3045169689Skan  if (try_int)
3046169689Skan    {
3047169689Skan      rtx ret;
3048169689Skan
3049169689Skan      /* For memory to memory moves, optimal behavior can be had with the
3050169689Skan	 existing block move logic.  */
3051169689Skan      if (MEM_P (x) && MEM_P (y))
3052169689Skan	{
3053169689Skan	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3054169689Skan			   BLOCK_OP_NO_LIBCALL);
3055169689Skan	  return get_last_insn ();
305690075Sobrien	}
305718334Speter
3058169689Skan      ret = emit_move_via_integer (mode, x, y, true);
3059169689Skan      if (ret)
3060169689Skan	return ret;
3061169689Skan    }
3062169689Skan
3063169689Skan  /* Show the output dies here.  This is necessary for SUBREGs
3064169689Skan     of pseudos since we cannot track their lifetimes correctly;
3065169689Skan     hard regs shouldn't appear here except as return values.  */
3066169689Skan  if (!reload_completed && !reload_in_progress
3067169689Skan      && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3068169689Skan    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069169689Skan
3070169689Skan  write_complex_part (x, read_complex_part (y, false), false);
3071169689Skan  write_complex_part (x, read_complex_part (y, true), true);
3072169689Skan  return get_last_insn ();
3073169689Skan}
3074169689Skan
3075169689Skan/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3076169689Skan   MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3077169689Skan
3078169689Skanstatic rtx
3079169689Skanemit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3080169689Skan{
3081169689Skan  rtx ret;
3082169689Skan
3083169689Skan  /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3084169689Skan  if (mode != CCmode)
3085169689Skan    {
3086169689Skan      enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3087169689Skan      if (code != CODE_FOR_nothing)
308818334Speter	{
3089169689Skan	  x = emit_move_change_mode (CCmode, mode, x, true);
3090169689Skan	  y = emit_move_change_mode (CCmode, mode, y, true);
3091169689Skan	  return emit_insn (GEN_FCN (code) (x, y));
309218334Speter	}
3093169689Skan    }
309470635Sobrien
3095169689Skan  /* Otherwise, find the MODE_INT mode of the same width.  */
3096169689Skan  ret = emit_move_via_integer (mode, x, y, false);
3097169689Skan  gcc_assert (ret != NULL);
3098169689Skan  return ret;
3099169689Skan}
310070635Sobrien
3101169689Skan/* Return true if word I of OP lies entirely in the
3102169689Skan   undefined bits of a paradoxical subreg.  */
310370635Sobrien
3104169689Skanstatic bool
3105169689Skanundefined_operand_subword_p (rtx op, int i)
3106169689Skan{
3107169689Skan  enum machine_mode innermode, innermostmode;
3108169689Skan  int offset;
3109169689Skan  if (GET_CODE (op) != SUBREG)
3110169689Skan    return false;
3111169689Skan  innermode = GET_MODE (op);
3112169689Skan  innermostmode = GET_MODE (SUBREG_REG (op));
3113169689Skan  offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3114169689Skan  /* The SUBREG_BYTE represents offset, as if the value were stored in
3115169689Skan     memory, except for a paradoxical subreg where we define
3116169689Skan     SUBREG_BYTE to be 0; undo this exception as in
3117169689Skan     simplify_subreg.  */
3118169689Skan  if (SUBREG_BYTE (op) == 0
3119169689Skan      && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3120169689Skan    {
3121169689Skan      int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3122169689Skan      if (WORDS_BIG_ENDIAN)
3123169689Skan	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3124169689Skan      if (BYTES_BIG_ENDIAN)
3125169689Skan	offset += difference % UNITS_PER_WORD;
3126169689Skan    }
3127169689Skan  if (offset >= GET_MODE_SIZE (innermostmode)
3128169689Skan      || offset <= -GET_MODE_SIZE (word_mode))
3129169689Skan    return true;
3130169689Skan  return false;
3131169689Skan}
313270635Sobrien
3133169689Skan/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3134169689Skan   MODE is any multi-word or full-word mode that lacks a move_insn
3135169689Skan   pattern.  Note that you will get better code if you define such
3136169689Skan   patterns, even if they must turn into multiple assembler instructions.  */
313770635Sobrien
3138169689Skanstatic rtx
3139169689Skanemit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3140169689Skan{
3141169689Skan  rtx last_insn = 0;
3142169689Skan  rtx seq, inner;
3143169689Skan  bool need_clobber;
3144169689Skan  int i;
3145259563Spfg
3146169689Skan  gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3147259563Spfg
3148169689Skan  /* If X is a push on the stack, do the push now and replace
3149169689Skan     X with a reference to the stack pointer.  */
3150169689Skan  if (push_operand (x, mode))
3151169689Skan    x = emit_move_resolve_push (mode, x);
315270635Sobrien
3153169689Skan  /* If we are in reload, see if either operand is a MEM whose address
3154169689Skan     is scheduled for replacement.  */
3155169689Skan  if (reload_in_progress && MEM_P (x)
3156169689Skan      && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3157169689Skan    x = replace_equiv_address_nv (x, inner);
3158169689Skan  if (reload_in_progress && MEM_P (y)
3159169689Skan      && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3160169689Skan    y = replace_equiv_address_nv (y, inner);
316190075Sobrien
3162169689Skan  start_sequence ();
316390075Sobrien
3164169689Skan  need_clobber = false;
3165169689Skan  for (i = 0;
3166169689Skan       i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3167169689Skan       i++)
3168169689Skan    {
3169169689Skan      rtx xpart = operand_subword (x, i, 1, mode);
3170169689Skan      rtx ypart;
317170635Sobrien
3172169689Skan      /* Do not generate code for a move if it would come entirely
3173169689Skan	 from the undefined bits of a paradoxical subreg.  */
3174169689Skan      if (undefined_operand_subword_p (y, i))
3175169689Skan	continue;
317670635Sobrien
3177169689Skan      ypart = operand_subword (y, i, 1, mode);
317850397Sobrien
3179169689Skan      /* If we can't get a part of Y, put Y into memory if it is a
3180169689Skan	 constant.  Otherwise, force it into a register.  Then we must
3181169689Skan	 be able to get a part of Y.  */
3182169689Skan      if (ypart == 0 && CONSTANT_P (y))
3183169689Skan	{
3184169689Skan	  y = use_anchored_address (force_const_mem (mode, y));
3185169689Skan	  ypart = operand_subword (y, i, 1, mode);
318618334Speter	}
3187169689Skan      else if (ypart == 0)
3188169689Skan	ypart = operand_subword_force (y, i, mode);
318918334Speter
3190169689Skan      gcc_assert (xpart && ypart);
3191169689Skan
3192169689Skan      need_clobber |= (GET_CODE (xpart) == SUBREG);
3193169689Skan
3194169689Skan      last_insn = emit_move_insn (xpart, ypart);
319518334Speter    }
319618334Speter
3197169689Skan  seq = get_insns ();
3198169689Skan  end_sequence ();
3199132718Skan
3200169689Skan  /* Show the output dies here.  This is necessary for SUBREGs
3201169689Skan     of pseudos since we cannot track their lifetimes correctly;
3202169689Skan     hard regs shouldn't appear here except as return values.
3203169689Skan     We never want to emit such a clobber after reload.  */
3204169689Skan  if (x != y
3205169689Skan      && ! (reload_in_progress || reload_completed)
3206169689Skan      && need_clobber != 0)
3207169689Skan    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3208132718Skan
3209169689Skan  emit_insn (seq);
3210132718Skan
3211169689Skan  return last_insn;
3212169689Skan}
3213132718Skan
3214169689Skan/* Low level part of emit_move_insn.
3215169689Skan   Called just like emit_move_insn, but assumes X and Y
3216169689Skan   are basically valid.  */
3217132718Skan
3218169689Skanrtx
3219169689Skanemit_move_insn_1 (rtx x, rtx y)
3220169689Skan{
3221169689Skan  enum machine_mode mode = GET_MODE (x);
3222169689Skan  enum insn_code code;
3223169689Skan
3224169689Skan  gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3225169689Skan
3226169689Skan  code = mov_optab->handlers[mode].insn_code;
3227169689Skan  if (code != CODE_FOR_nothing)
3228169689Skan    return emit_insn (GEN_FCN (code) (x, y));
3229169689Skan
3230169689Skan  /* Expand complex moves by moving real part and imag part.  */
3231169689Skan  if (COMPLEX_MODE_P (mode))
3232169689Skan    return emit_move_complex (mode, x, y);
3233169689Skan
3234169689Skan  if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3235169689Skan    {
3236169689Skan      rtx result = emit_move_via_integer (mode, x, y, true);
3237169689Skan
3238169689Skan      /* If we can't find an integer mode, use multi words.  */
3239169689Skan      if (result)
3240169689Skan	return result;
3241132718Skan      else
3242169689Skan	return emit_move_multi_word (mode, x, y);
3243132718Skan    }
3244132718Skan
3245169689Skan  if (GET_MODE_CLASS (mode) == MODE_CC)
3246169689Skan    return emit_move_ccmode (mode, x, y);
3247169689Skan
3248132718Skan  /* Try using a move pattern for the corresponding integer mode.  This is
3249132718Skan     only safe when simplify_subreg can convert MODE constants into integer
3250132718Skan     constants.  At present, it can only do this reliably if the value
3251132718Skan     fits within a HOST_WIDE_INT.  */
3252169689Skan  if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
325318334Speter    {
3254169689Skan      rtx ret = emit_move_via_integer (mode, x, y, false);
3255169689Skan      if (ret)
3256169689Skan	return ret;
3257169689Skan    }
325890075Sobrien
3259169689Skan  return emit_move_multi_word (mode, x, y);
3260169689Skan}
326118334Speter
3262169689Skan/* Generate code to copy Y into X.
3263169689Skan   Both Y and X must have the same mode, except that
3264169689Skan   Y can be a constant with VOIDmode.
3265169689Skan   This mode cannot be BLKmode; use emit_block_move for that.
3266117395Skan
3267169689Skan   Return the last instruction emitted.  */
326890075Sobrien
3269169689Skanrtx
3270169689Skanemit_move_insn (rtx x, rtx y)
3271169689Skan{
3272169689Skan  enum machine_mode mode = GET_MODE (x);
3273169689Skan  rtx y_cst = NULL_RTX;
3274169689Skan  rtx last_insn, set;
327590075Sobrien
3276169689Skan  gcc_assert (mode != BLKmode
3277169689Skan	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
327890075Sobrien
3279169689Skan  if (CONSTANT_P (y))
3280169689Skan    {
3281169689Skan      if (optimize
3282169689Skan	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3283169689Skan	  && (last_insn = compress_float_constant (x, y)))
3284169689Skan	return last_insn;
328590075Sobrien
3286169689Skan      y_cst = y;
328790075Sobrien
3288169689Skan      if (!LEGITIMATE_CONSTANT_P (y))
328918334Speter	{
3290169689Skan	  y = force_const_mem (mode, y);
329118334Speter
3292169689Skan	  /* If the target's cannot_force_const_mem prevented the spill,
3293169689Skan	     assume that the target's move expanders will also take care
3294169689Skan	     of the non-legitimate constant.  */
3295169689Skan	  if (!y)
3296169689Skan	    y = y_cst;
3297169689Skan	  else
3298169689Skan	    y = use_anchored_address (y);
3299169689Skan	}
3300169689Skan    }
330118334Speter
3302169689Skan  /* If X or Y are memory references, verify that their addresses are valid
3303169689Skan     for the machine.  */
3304169689Skan  if (MEM_P (x)
3305169689Skan      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3306169689Skan	   && ! push_operand (x, GET_MODE (x)))
3307169689Skan	  || (flag_force_addr
3308169689Skan	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3309169689Skan    x = validize_mem (x);
331018334Speter
3311169689Skan  if (MEM_P (y)
3312169689Skan      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3313169689Skan	  || (flag_force_addr
3314169689Skan	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3315169689Skan    y = validize_mem (y);
331670635Sobrien
3317169689Skan  gcc_assert (mode != BLKmode);
331818334Speter
3319169689Skan  last_insn = emit_move_insn_1 (x, y);
332070635Sobrien
3321169689Skan  if (y_cst && REG_P (x)
3322169689Skan      && (set = single_set (last_insn)) != NULL_RTX
3323169689Skan      && SET_DEST (set) == x
3324169689Skan      && ! rtx_equal_p (y_cst, SET_SRC (set)))
3325169689Skan    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
332670635Sobrien
3327169689Skan  return last_insn;
332818334Speter}
3329117395Skan
3330117395Skan/* If Y is representable exactly in a narrower mode, and the target can
3331117395Skan   perform the extension directly from constant or memory, then emit the
3332117395Skan   move as an extension.  */
3333117395Skan
3334117395Skanstatic rtx
3335132718Skancompress_float_constant (rtx x, rtx y)
3336117395Skan{
3337117395Skan  enum machine_mode dstmode = GET_MODE (x);
3338117395Skan  enum machine_mode orig_srcmode = GET_MODE (y);
3339117395Skan  enum machine_mode srcmode;
3340117395Skan  REAL_VALUE_TYPE r;
3341169689Skan  int oldcost, newcost;
3342117395Skan
3343117395Skan  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3344117395Skan
3345169689Skan  if (LEGITIMATE_CONSTANT_P (y))
3346169689Skan    oldcost = rtx_cost (y, SET);
3347169689Skan  else
3348169689Skan    oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3349169689Skan
3350117395Skan  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3351117395Skan       srcmode != orig_srcmode;
3352117395Skan       srcmode = GET_MODE_WIDER_MODE (srcmode))
3353117395Skan    {
3354117395Skan      enum insn_code ic;
3355117395Skan      rtx trunc_y, last_insn;
3356117395Skan
3357117395Skan      /* Skip if the target can't extend this way.  */
3358117395Skan      ic = can_extend_p (dstmode, srcmode, 0);
3359117395Skan      if (ic == CODE_FOR_nothing)
3360117395Skan	continue;
3361117395Skan
3362117395Skan      /* Skip if the narrowed value isn't exact.  */
3363117395Skan      if (! exact_real_truncate (srcmode, &r))
3364117395Skan	continue;
3365117395Skan
3366117395Skan      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3367117395Skan
3368117395Skan      if (LEGITIMATE_CONSTANT_P (trunc_y))
3369117395Skan	{
3370117395Skan	  /* Skip if the target needs extra instructions to perform
3371117395Skan	     the extension.  */
3372117395Skan	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3373117395Skan	    continue;
3374169689Skan	  /* This is valid, but may not be cheaper than the original. */
3375169689Skan	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3376169689Skan	  if (oldcost < newcost)
3377169689Skan	    continue;
3378117395Skan	}
3379117395Skan      else if (float_extend_from_mem[dstmode][srcmode])
3380169689Skan	{
3381169689Skan	  trunc_y = force_const_mem (srcmode, trunc_y);
3382169689Skan	  /* This is valid, but may not be cheaper than the original. */
3383169689Skan	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3384169689Skan	  if (oldcost < newcost)
3385169689Skan	    continue;
3386169689Skan	  trunc_y = validize_mem (trunc_y);
3387169689Skan	}
3388117395Skan      else
3389117395Skan	continue;
3390117395Skan
3391169689Skan      /* For CSE's benefit, force the compressed constant pool entry
3392169689Skan	 into a new pseudo.  This constant may be used in different modes,
3393169689Skan	 and if not, combine will put things back together for us.  */
3394169689Skan      trunc_y = force_reg (srcmode, trunc_y);
3395117395Skan      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3396117395Skan      last_insn = get_last_insn ();
3397117395Skan
3398169689Skan      if (REG_P (x))
3399132718Skan	set_unique_reg_note (last_insn, REG_EQUAL, y);
3400117395Skan
3401117395Skan      return last_insn;
3402117395Skan    }
3403117395Skan
3404117395Skan  return NULL_RTX;
3405117395Skan}
340618334Speter
340718334Speter/* Pushing data onto the stack.  */
340818334Speter
340918334Speter/* Push a block of length SIZE (perhaps variable)
341018334Speter   and return an rtx to address the beginning of the block.
341118334Speter   The value may be virtual_outgoing_args_rtx.
341218334Speter
341318334Speter   EXTRA is the number of bytes of padding to push in addition to SIZE.
341418334Speter   BELOW nonzero means this padding comes at low addresses;
341518334Speter   otherwise, the padding comes at high addresses.  */
341618334Speter
341718334Speterrtx
3418132718Skanpush_block (rtx size, int extra, int below)
341918334Speter{
342090075Sobrien  rtx temp;
342118334Speter
342218334Speter  size = convert_modes (Pmode, ptr_mode, size, 1);
342318334Speter  if (CONSTANT_P (size))
342418334Speter    anti_adjust_stack (plus_constant (size, extra));
3425169689Skan  else if (REG_P (size) && extra == 0)
342618334Speter    anti_adjust_stack (size);
342718334Speter  else
342818334Speter    {
342990075Sobrien      temp = copy_to_mode_reg (Pmode, size);
343018334Speter      if (extra != 0)
343118334Speter	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
343218334Speter			     temp, 0, OPTAB_LIB_WIDEN);
343318334Speter      anti_adjust_stack (temp);
343418334Speter    }
343518334Speter
343690075Sobrien#ifndef STACK_GROWS_DOWNWARD
343790075Sobrien  if (0)
343818334Speter#else
343990075Sobrien  if (1)
344090075Sobrien#endif
344190075Sobrien    {
344290075Sobrien      temp = virtual_outgoing_args_rtx;
344390075Sobrien      if (extra != 0 && below)
344490075Sobrien	temp = plus_constant (temp, extra);
344590075Sobrien    }
344618334Speter  else
344790075Sobrien    {
344890075Sobrien      if (GET_CODE (size) == CONST_INT)
344990075Sobrien	temp = plus_constant (virtual_outgoing_args_rtx,
345090075Sobrien			      -INTVAL (size) - (below ? 0 : extra));
345190075Sobrien      else if (extra != 0 && !below)
345290075Sobrien	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
345390075Sobrien			     negate_rtx (Pmode, plus_constant (size, extra)));
345490075Sobrien      else
345590075Sobrien	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
345690075Sobrien			     negate_rtx (Pmode, size));
345790075Sobrien    }
345818334Speter
345918334Speter  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
346018334Speter}
346118334Speter
346290075Sobrien#ifdef PUSH_ROUNDING
346318334Speter
346490075Sobrien/* Emit single push insn.  */
346550397Sobrien
346690075Sobrienstatic void
3467132718Skanemit_single_push_insn (enum machine_mode mode, rtx x, tree type)
346850397Sobrien{
346990075Sobrien  rtx dest_addr;
347090075Sobrien  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
347190075Sobrien  rtx dest;
347290075Sobrien  enum insn_code icode;
347390075Sobrien  insn_operand_predicate_fn pred;
347450397Sobrien
347590075Sobrien  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
347690075Sobrien  /* If there is push pattern, use it.  Otherwise try old way of throwing
347790075Sobrien     MEM representing push operation to move expander.  */
347890075Sobrien  icode = push_optab->handlers[(int) mode].insn_code;
347990075Sobrien  if (icode != CODE_FOR_nothing)
348090075Sobrien    {
348190075Sobrien      if (((pred = insn_data[(int) icode].operand[0].predicate)
348290075Sobrien	   && !((*pred) (x, mode))))
348390075Sobrien	x = force_reg (mode, x);
348490075Sobrien      emit_insn (GEN_FCN (icode) (x));
348590075Sobrien      return;
348690075Sobrien    }
348790075Sobrien  if (GET_MODE_SIZE (mode) == rounded_size)
348890075Sobrien    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3489132718Skan  /* If we are to pad downward, adjust the stack pointer first and
3490132718Skan     then store X into the stack location using an offset.  This is
3491132718Skan     because emit_move_insn does not know how to pad; it does not have
3492132718Skan     access to type.  */
3493132718Skan  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3494132718Skan    {
3495132718Skan      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3496132718Skan      HOST_WIDE_INT offset;
3497132718Skan
3498132718Skan      emit_move_insn (stack_pointer_rtx,
3499132718Skan		      expand_binop (Pmode,
3500132718Skan#ifdef STACK_GROWS_DOWNWARD
3501132718Skan				    sub_optab,
3502132718Skan#else
3503132718Skan				    add_optab,
3504132718Skan#endif
3505132718Skan				    stack_pointer_rtx,
3506132718Skan				    GEN_INT (rounded_size),
3507132718Skan				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3508132718Skan
3509132718Skan      offset = (HOST_WIDE_INT) padding_size;
3510132718Skan#ifdef STACK_GROWS_DOWNWARD
3511132718Skan      if (STACK_PUSH_CODE == POST_DEC)
3512132718Skan	/* We have already decremented the stack pointer, so get the
3513132718Skan	   previous value.  */
3514132718Skan	offset += (HOST_WIDE_INT) rounded_size;
3515132718Skan#else
3516132718Skan      if (STACK_PUSH_CODE == POST_INC)
3517132718Skan	/* We have already incremented the stack pointer, so get the
3518132718Skan	   previous value.  */
3519132718Skan	offset -= (HOST_WIDE_INT) rounded_size;
3520132718Skan#endif
3521132718Skan      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3522132718Skan    }
352350397Sobrien  else
352490075Sobrien    {
352590075Sobrien#ifdef STACK_GROWS_DOWNWARD
3526132718Skan      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
352790075Sobrien      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
352890075Sobrien				GEN_INT (-(HOST_WIDE_INT) rounded_size));
352990075Sobrien#else
3530132718Skan      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
353190075Sobrien      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
353290075Sobrien				GEN_INT (rounded_size));
353390075Sobrien#endif
353490075Sobrien      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
353590075Sobrien    }
353650397Sobrien
353790075Sobrien  dest = gen_rtx_MEM (mode, dest_addr);
353890075Sobrien
353990075Sobrien  if (type != 0)
354090075Sobrien    {
354190075Sobrien      set_mem_attributes (dest, type, 1);
354290075Sobrien
354390075Sobrien      if (flag_optimize_sibling_calls)
354490075Sobrien	/* Function incoming arguments may overlap with sibling call
354590075Sobrien	   outgoing arguments and we cannot allow reordering of reads
354690075Sobrien	   from function arguments with stores to outgoing arguments
354790075Sobrien	   of sibling calls.  */
354890075Sobrien	set_mem_alias_set (dest, 0);
354990075Sobrien    }
355090075Sobrien  emit_move_insn (dest, x);
355150397Sobrien}
355290075Sobrien#endif
355350397Sobrien
355418334Speter/* Generate code to push X onto the stack, assuming it has mode MODE and
355518334Speter   type TYPE.
355618334Speter   MODE is redundant except when X is a CONST_INT (since they don't
355718334Speter   carry mode info).
355818334Speter   SIZE is an rtx for the size of data to be copied (in bytes),
355918334Speter   needed only if X is BLKmode.
356018334Speter
356190075Sobrien   ALIGN (in bits) is maximum alignment we can assume.
356218334Speter
356318334Speter   If PARTIAL and REG are both nonzero, then copy that many of the first
3564169689Skan   bytes of X into registers starting with REG, and push the rest of X.
3565169689Skan   The amount of space pushed is decreased by PARTIAL bytes.
356618334Speter   REG must be a hard register in this case.
356718334Speter   If REG is zero but PARTIAL is not, take any all others actions for an
356818334Speter   argument partially in registers, but do not actually load any
356918334Speter   registers.
357018334Speter
357118334Speter   EXTRA is the amount in bytes of extra space to leave next to this arg.
357218334Speter   This is ignored if an argument block has already been allocated.
357318334Speter
357418334Speter   On a machine that lacks real push insns, ARGS_ADDR is the address of
357518334Speter   the bottom of the argument block for this call.  We use indexing off there
357618334Speter   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
357718334Speter   argument block has not been preallocated.
357818334Speter
357950397Sobrien   ARGS_SO_FAR is the size of args previously pushed for this call.
358018334Speter
358150397Sobrien   REG_PARM_STACK_SPACE is nonzero if functions require stack space
358250397Sobrien   for arguments passed in registers.  If nonzero, it will be the number
358350397Sobrien   of bytes required.  */
358450397Sobrien
358518334Spetervoid
3586132718Skanemit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3587132718Skan		unsigned int align, int partial, rtx reg, int extra,
3588132718Skan		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3589132718Skan		rtx alignment_pad)
359018334Speter{
359118334Speter  rtx xinner;
359218334Speter  enum direction stack_direction
359318334Speter#ifdef STACK_GROWS_DOWNWARD
359418334Speter    = downward;
359518334Speter#else
359618334Speter    = upward;
359718334Speter#endif
359818334Speter
359918334Speter  /* Decide where to pad the argument: `downward' for below,
360018334Speter     `upward' for above, or `none' for don't pad it.
360118334Speter     Default is below for small data on big-endian machines; else above.  */
360218334Speter  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
360318334Speter
3604117395Skan  /* Invert direction if stack is post-decrement.
360590075Sobrien     FIXME: why?  */
360690075Sobrien  if (STACK_PUSH_CODE == POST_DEC)
360718334Speter    if (where_pad != none)
360818334Speter      where_pad = (where_pad == downward ? upward : downward);
360918334Speter
3610169689Skan  xinner = x;
361118334Speter
361218334Speter  if (mode == BLKmode)
361318334Speter    {
361418334Speter      /* Copy a block into the stack, entirely or partially.  */
361518334Speter
361690075Sobrien      rtx temp;
3617169689Skan      int used;
3618132718Skan      int offset;
361918334Speter      int skip;
362090075Sobrien
3621169689Skan      offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3622169689Skan      used = partial - offset;
3623132718Skan
3624169689Skan      gcc_assert (size);
362518334Speter
362618334Speter      /* USED is now the # of bytes we need not copy to the stack
362718334Speter	 because registers will take care of them.  */
362818334Speter
362918334Speter      if (partial != 0)
363090075Sobrien	xinner = adjust_address (xinner, BLKmode, used);
363118334Speter
363218334Speter      /* If the partial register-part of the arg counts in its stack size,
363318334Speter	 skip the part of stack space corresponding to the registers.
363418334Speter	 Otherwise, start copying to the beginning of the stack space,
363518334Speter	 by setting SKIP to 0.  */
363650397Sobrien      skip = (reg_parm_stack_space == 0) ? 0 : used;
363718334Speter
363818334Speter#ifdef PUSH_ROUNDING
363918334Speter      /* Do it with several push insns if that doesn't take lots of insns
364018334Speter	 and if there is no difficulty with push insns that skip bytes
364118334Speter	 on the stack for alignment purposes.  */
364218334Speter      if (args_addr == 0
364390075Sobrien	  && PUSH_ARGS
364418334Speter	  && GET_CODE (size) == CONST_INT
364518334Speter	  && skip == 0
3646132718Skan	  && MEM_ALIGN (xinner) >= align
364752284Sobrien	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
364818334Speter	  /* Here we avoid the case of a structure whose weak alignment
364918334Speter	     forces many pushes of a small amount of data,
365018334Speter	     and such small pushes do rounding that causes trouble.  */
365190075Sobrien	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
365290075Sobrien	      || align >= BIGGEST_ALIGNMENT
365390075Sobrien	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
365490075Sobrien		  == (align / BITS_PER_UNIT)))
365518334Speter	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
365618334Speter	{
365718334Speter	  /* Push padding now if padding above and stack grows down,
365818334Speter	     or if padding below and stack grows up.
365918334Speter	     But if space already allocated, this has already been done.  */
366018334Speter	  if (extra && args_addr == 0
366118334Speter	      && where_pad != none && where_pad != stack_direction)
366218334Speter	    anti_adjust_stack (GEN_INT (extra));
366318334Speter
3664132718Skan	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
366518334Speter	}
366618334Speter      else
366790075Sobrien#endif /* PUSH_ROUNDING  */
366818334Speter	{
366990075Sobrien	  rtx target;
367090075Sobrien
367118334Speter	  /* Otherwise make space on the stack and copy the data
367218334Speter	     to the address of that space.  */
367318334Speter
367418334Speter	  /* Deduct words put into registers from the size we must copy.  */
367518334Speter	  if (partial != 0)
367618334Speter	    {
367718334Speter	      if (GET_CODE (size) == CONST_INT)
367818334Speter		size = GEN_INT (INTVAL (size) - used);
367918334Speter	      else
368018334Speter		size = expand_binop (GET_MODE (size), sub_optab, size,
368118334Speter				     GEN_INT (used), NULL_RTX, 0,
368218334Speter				     OPTAB_LIB_WIDEN);
368318334Speter	    }
368418334Speter
368518334Speter	  /* Get the address of the stack space.
368618334Speter	     In this case, we do not deal with EXTRA separately.
368718334Speter	     A single stack adjust will do.  */
368818334Speter	  if (! args_addr)
368918334Speter	    {
369018334Speter	      temp = push_block (size, extra, where_pad == downward);
369118334Speter	      extra = 0;
369218334Speter	    }
369318334Speter	  else if (GET_CODE (args_so_far) == CONST_INT)
369418334Speter	    temp = memory_address (BLKmode,
369518334Speter				   plus_constant (args_addr,
369618334Speter						  skip + INTVAL (args_so_far)));
369718334Speter	  else
369818334Speter	    temp = memory_address (BLKmode,
369950397Sobrien				   plus_constant (gen_rtx_PLUS (Pmode,
370050397Sobrien								args_addr,
370150397Sobrien								args_so_far),
370218334Speter						  skip));
3703117395Skan
3704117395Skan	  if (!ACCUMULATE_OUTGOING_ARGS)
3705117395Skan	    {
3706117395Skan	      /* If the source is referenced relative to the stack pointer,
3707117395Skan		 copy it to another register to stabilize it.  We do not need
3708117395Skan		 to do this if we know that we won't be changing sp.  */
3709117395Skan
3710117395Skan	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3711117395Skan		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3712117395Skan		temp = copy_to_reg (temp);
3713117395Skan	    }
3714117395Skan
371590075Sobrien	  target = gen_rtx_MEM (BLKmode, temp);
371690075Sobrien
3717169689Skan	  /* We do *not* set_mem_attributes here, because incoming arguments
3718169689Skan	     may overlap with sibling call outgoing arguments and we cannot
3719169689Skan	     allow reordering of reads from function arguments with stores
3720169689Skan	     to outgoing arguments of sibling calls.  We do, however, want
3721169689Skan	     to record the alignment of the stack slot.  */
3722117395Skan	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3723117395Skan	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3724117395Skan	  set_mem_align (target, align);
372550397Sobrien
3726117395Skan	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
372718334Speter	}
372818334Speter    }
372918334Speter  else if (partial > 0)
373018334Speter    {
373118334Speter      /* Scalar partly in registers.  */
373218334Speter
373318334Speter      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
373418334Speter      int i;
373518334Speter      int not_stack;
3736169689Skan      /* # bytes of start of argument
373718334Speter	 that we must make space for but need not store.  */
3738169689Skan      int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
373918334Speter      int args_offset = INTVAL (args_so_far);
374018334Speter      int skip;
374118334Speter
374218334Speter      /* Push padding now if padding above and stack grows down,
374318334Speter	 or if padding below and stack grows up.
374418334Speter	 But if space already allocated, this has already been done.  */
374518334Speter      if (extra && args_addr == 0
374618334Speter	  && where_pad != none && where_pad != stack_direction)
374718334Speter	anti_adjust_stack (GEN_INT (extra));
374818334Speter
374918334Speter      /* If we make space by pushing it, we might as well push
375018334Speter	 the real data.  Otherwise, we can leave OFFSET nonzero
375118334Speter	 and leave the space uninitialized.  */
375218334Speter      if (args_addr == 0)
375318334Speter	offset = 0;
375418334Speter
375518334Speter      /* Now NOT_STACK gets the number of words that we don't need to
3756169689Skan	 allocate on the stack.  Convert OFFSET to words too.  */
3757169689Skan      not_stack = (partial - offset) / UNITS_PER_WORD;
3758169689Skan      offset /= UNITS_PER_WORD;
375918334Speter
376018334Speter      /* If the partial register-part of the arg counts in its stack size,
376118334Speter	 skip the part of stack space corresponding to the registers.
376218334Speter	 Otherwise, start copying to the beginning of the stack space,
376318334Speter	 by setting SKIP to 0.  */
376450397Sobrien      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
376518334Speter
376618334Speter      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
376718334Speter	x = validize_mem (force_const_mem (mode, x));
376818334Speter
376918334Speter      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
377018334Speter	 SUBREGs of such registers are not allowed.  */
3771169689Skan      if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
377218334Speter	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
377318334Speter	x = copy_to_reg (x);
377418334Speter
377518334Speter      /* Loop over all the words allocated on the stack for this arg.  */
377618334Speter      /* We can do it by words, because any scalar bigger than a word
377718334Speter	 has a size a multiple of a word.  */
377818334Speter#ifndef PUSH_ARGS_REVERSED
377918334Speter      for (i = not_stack; i < size; i++)
378018334Speter#else
378118334Speter      for (i = size - 1; i >= not_stack; i--)
378218334Speter#endif
378318334Speter	if (i >= not_stack + offset)
378418334Speter	  emit_push_insn (operand_subword_force (x, i, mode),
378518334Speter			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
378618334Speter			  0, args_addr,
378718334Speter			  GEN_INT (args_offset + ((i - not_stack + skip)
378850397Sobrien						  * UNITS_PER_WORD)),
378990075Sobrien			  reg_parm_stack_space, alignment_pad);
379018334Speter    }
379118334Speter  else
379218334Speter    {
379318334Speter      rtx addr;
379490075Sobrien      rtx dest;
379518334Speter
379618334Speter      /* Push padding now if padding above and stack grows down,
379718334Speter	 or if padding below and stack grows up.
379818334Speter	 But if space already allocated, this has already been done.  */
379918334Speter      if (extra && args_addr == 0
380018334Speter	  && where_pad != none && where_pad != stack_direction)
380118334Speter	anti_adjust_stack (GEN_INT (extra));
380218334Speter
380318334Speter#ifdef PUSH_ROUNDING
380490075Sobrien      if (args_addr == 0 && PUSH_ARGS)
380590075Sobrien	emit_single_push_insn (mode, x, type);
380618334Speter      else
380718334Speter#endif
380850397Sobrien	{
380950397Sobrien	  if (GET_CODE (args_so_far) == CONST_INT)
381050397Sobrien	    addr
381150397Sobrien	      = memory_address (mode,
381290075Sobrien				plus_constant (args_addr,
381350397Sobrien					       INTVAL (args_so_far)));
381490075Sobrien	  else
381550397Sobrien	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
381650397Sobrien						       args_so_far));
381790075Sobrien	  dest = gen_rtx_MEM (mode, addr);
381890075Sobrien
3819169689Skan	  /* We do *not* set_mem_attributes here, because incoming arguments
3820169689Skan	     may overlap with sibling call outgoing arguments and we cannot
3821169689Skan	     allow reordering of reads from function arguments with stores
3822169689Skan	     to outgoing arguments of sibling calls.  We do, however, want
3823169689Skan	     to record the alignment of the stack slot.  */
3824169689Skan	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3825169689Skan	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3826169689Skan	  set_mem_align (dest, align);
3827169689Skan
382890075Sobrien	  emit_move_insn (dest, x);
382950397Sobrien	}
383018334Speter    }
383118334Speter
383218334Speter  /* If part should go in registers, copy that part
383318334Speter     into the appropriate registers.  Do this now, at the end,
383418334Speter     since mem-to-mem copies above may do function calls.  */
383518334Speter  if (partial > 0 && reg != 0)
383650397Sobrien    {
383750397Sobrien      /* Handle calls that pass values in multiple non-contiguous locations.
383850397Sobrien	 The Irix 6 ABI has examples of this.  */
383950397Sobrien      if (GET_CODE (reg) == PARALLEL)
3840132718Skan	emit_group_load (reg, x, type, -1);
384150397Sobrien      else
3842169689Skan	{
3843169689Skan	  gcc_assert (partial % UNITS_PER_WORD == 0);
3844169689Skan	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3845169689Skan	}
384650397Sobrien    }
384718334Speter
384818334Speter  if (extra && args_addr == 0 && where_pad == stack_direction)
384918334Speter    anti_adjust_stack (GEN_INT (extra));
385090075Sobrien
385190075Sobrien  if (alignment_pad && args_addr == 0)
385290075Sobrien    anti_adjust_stack (alignment_pad);
385318334Speter}
385418334Speter
385590075Sobrien/* Return X if X can be used as a subtarget in a sequence of arithmetic
385690075Sobrien   operations.  */
385790075Sobrien
385890075Sobrienstatic rtx
3859132718Skanget_subtarget (rtx x)
386090075Sobrien{
3861169689Skan  return (optimize
3862169689Skan          || x == 0
386390075Sobrien	   /* Only registers can be subtargets.  */
3864169689Skan	   || !REG_P (x)
386590075Sobrien	   /* Don't use hard regs to avoid extending their life.  */
386690075Sobrien	   || REGNO (x) < FIRST_PSEUDO_REGISTER
386790075Sobrien	  ? 0 : x);
386890075Sobrien}
386990075Sobrien
3870169689Skan/* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
3871169689Skan   FIELD is a bitfield.  Returns true if the optimization was successful,
3872169689Skan   and there's nothing else to do.  */
387318334Speter
3874169689Skanstatic bool
3875169689Skanoptimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3876169689Skan				 unsigned HOST_WIDE_INT bitpos,
3877169689Skan				 enum machine_mode mode1, rtx str_rtx,
3878169689Skan				 tree to, tree src)
387918334Speter{
3880169689Skan  enum machine_mode str_mode = GET_MODE (str_rtx);
3881169689Skan  unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3882169689Skan  tree op0, op1;
3883169689Skan  rtx value, result;
3884169689Skan  optab binop;
3885169689Skan
3886169689Skan  if (mode1 != VOIDmode
3887169689Skan      || bitsize >= BITS_PER_WORD
3888169689Skan      || str_bitsize > BITS_PER_WORD
3889169689Skan      || TREE_SIDE_EFFECTS (to)
3890169689Skan      || TREE_THIS_VOLATILE (to))
3891169689Skan    return false;
3892169689Skan
3893169689Skan  STRIP_NOPS (src);
3894169689Skan  if (!BINARY_CLASS_P (src)
3895169689Skan      || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3896169689Skan    return false;
3897169689Skan
3898169689Skan  op0 = TREE_OPERAND (src, 0);
3899169689Skan  op1 = TREE_OPERAND (src, 1);
3900169689Skan  STRIP_NOPS (op0);
3901169689Skan
3902169689Skan  if (!operand_equal_p (to, op0, 0))
3903169689Skan    return false;
3904169689Skan
3905169689Skan  if (MEM_P (str_rtx))
3906169689Skan    {
3907169689Skan      unsigned HOST_WIDE_INT offset1;
3908169689Skan
3909169689Skan      if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3910169689Skan	str_mode = word_mode;
3911169689Skan      str_mode = get_best_mode (bitsize, bitpos,
3912169689Skan				MEM_ALIGN (str_rtx), str_mode, 0);
3913169689Skan      if (str_mode == VOIDmode)
3914169689Skan	return false;
3915169689Skan      str_bitsize = GET_MODE_BITSIZE (str_mode);
3916169689Skan
3917169689Skan      offset1 = bitpos;
3918169689Skan      bitpos %= str_bitsize;
3919169689Skan      offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3920169689Skan      str_rtx = adjust_address (str_rtx, str_mode, offset1);
3921169689Skan    }
3922169689Skan  else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3923169689Skan    return false;
3924169689Skan
3925169689Skan  /* If the bit field covers the whole REG/MEM, store_field
3926169689Skan     will likely generate better code.  */
3927169689Skan  if (bitsize >= str_bitsize)
3928169689Skan    return false;
3929169689Skan
3930169689Skan  /* We can't handle fields split across multiple entities.  */
3931169689Skan  if (bitpos + bitsize > str_bitsize)
3932169689Skan    return false;
3933169689Skan
3934169689Skan  if (BYTES_BIG_ENDIAN)
3935169689Skan    bitpos = str_bitsize - bitpos - bitsize;
3936169689Skan
3937169689Skan  switch (TREE_CODE (src))
3938169689Skan    {
3939169689Skan    case PLUS_EXPR:
3940169689Skan    case MINUS_EXPR:
3941169689Skan      /* For now, just optimize the case of the topmost bitfield
3942169689Skan	 where we don't need to do any masking and also
3943169689Skan	 1 bit bitfields where xor can be used.
3944169689Skan	 We might win by one instruction for the other bitfields
3945169689Skan	 too if insv/extv instructions aren't used, so that
3946169689Skan	 can be added later.  */
3947169689Skan      if (bitpos + bitsize != str_bitsize
3948169689Skan	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3949169689Skan	break;
3950169689Skan
3951169689Skan      value = expand_expr (op1, NULL_RTX, str_mode, 0);
3952169689Skan      value = convert_modes (str_mode,
3953169689Skan			     TYPE_MODE (TREE_TYPE (op1)), value,
3954169689Skan			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3955169689Skan
3956169689Skan      /* We may be accessing data outside the field, which means
3957169689Skan	 we can alias adjacent data.  */
3958169689Skan      if (MEM_P (str_rtx))
3959169689Skan	{
3960169689Skan	  str_rtx = shallow_copy_rtx (str_rtx);
3961169689Skan	  set_mem_alias_set (str_rtx, 0);
3962169689Skan	  set_mem_expr (str_rtx, 0);
3963169689Skan	}
3964169689Skan
3965169689Skan      binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3966169689Skan      if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3967169689Skan	{
3968169689Skan	  value = expand_and (str_mode, value, const1_rtx, NULL);
3969169689Skan	  binop = xor_optab;
3970169689Skan	}
3971169689Skan      value = expand_shift (LSHIFT_EXPR, str_mode, value,
3972169689Skan			    build_int_cst (NULL_TREE, bitpos),
3973169689Skan			    NULL_RTX, 1);
3974169689Skan      result = expand_binop (str_mode, binop, str_rtx,
3975169689Skan			     value, str_rtx, 1, OPTAB_WIDEN);
3976169689Skan      if (result != str_rtx)
3977169689Skan	emit_move_insn (str_rtx, result);
3978169689Skan      return true;
3979169689Skan
3980169689Skan    case BIT_IOR_EXPR:
3981169689Skan    case BIT_XOR_EXPR:
3982169689Skan      if (TREE_CODE (op1) != INTEGER_CST)
3983169689Skan	break;
3984169689Skan      value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3985169689Skan      value = convert_modes (GET_MODE (str_rtx),
3986169689Skan			     TYPE_MODE (TREE_TYPE (op1)), value,
3987169689Skan			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3988169689Skan
3989169689Skan      /* We may be accessing data outside the field, which means
3990169689Skan	 we can alias adjacent data.  */
3991169689Skan      if (MEM_P (str_rtx))
3992169689Skan	{
3993169689Skan	  str_rtx = shallow_copy_rtx (str_rtx);
3994169689Skan	  set_mem_alias_set (str_rtx, 0);
3995169689Skan	  set_mem_expr (str_rtx, 0);
3996169689Skan	}
3997169689Skan
3998169689Skan      binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3999169689Skan      if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4000169689Skan	{
4001169689Skan	  rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4002169689Skan			      - 1);
4003169689Skan	  value = expand_and (GET_MODE (str_rtx), value, mask,
4004169689Skan			      NULL_RTX);
4005169689Skan	}
4006169689Skan      value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4007169689Skan			    build_int_cst (NULL_TREE, bitpos),
4008169689Skan			    NULL_RTX, 1);
4009169689Skan      result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4010169689Skan			     value, str_rtx, 1, OPTAB_WIDEN);
4011169689Skan      if (result != str_rtx)
4012169689Skan	emit_move_insn (str_rtx, result);
4013169689Skan      return true;
4014169689Skan
4015169689Skan    default:
4016169689Skan      break;
4017169689Skan    }
4018169689Skan
4019169689Skan  return false;
4020169689Skan}
4021169689Skan
4022169689Skan
4023169689Skan/* Expand an assignment that stores the value of FROM into TO.  */
4024169689Skan
4025169689Skanvoid
4026169689Skanexpand_assignment (tree to, tree from)
4027169689Skan{
402890075Sobrien  rtx to_rtx = 0;
402918334Speter  rtx result;
403018334Speter
403118334Speter  /* Don't crash if the lhs of the assignment was erroneous.  */
403218334Speter  if (TREE_CODE (to) == ERROR_MARK)
403318334Speter    {
4034169689Skan      result = expand_normal (from);
4035169689Skan      return;
403618334Speter    }
403718334Speter
4038169689Skan  /* Optimize away no-op moves without side-effects.  */
4039169689Skan  if (operand_equal_p (to, from, 0))
4040169689Skan    return;
4041169689Skan
404218334Speter  /* Assignment of a structure component needs special treatment
404318334Speter     if the structure component's rtx is not simply a MEM.
404418334Speter     Assignment of an array element at a constant index, and assignment of
404518334Speter     an array element in an unaligned packed structure field, has the same
404618334Speter     problem.  */
4047169689Skan  if (handled_component_p (to)
4048117395Skan      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
404918334Speter    {
405018334Speter      enum machine_mode mode1;
405190075Sobrien      HOST_WIDE_INT bitsize, bitpos;
405218334Speter      tree offset;
405318334Speter      int unsignedp;
405418334Speter      int volatilep = 0;
405518334Speter      tree tem;
405618334Speter
405718334Speter      push_temp_slots ();
405850397Sobrien      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4059169689Skan				 &unsignedp, &volatilep, true);
406018334Speter
406118334Speter      /* If we are going to use store_bit_field and extract_bit_field,
406218334Speter	 make sure to_rtx will be safe for multiple use.  */
406318334Speter
4064169689Skan      to_rtx = expand_normal (tem);
406518334Speter
406618334Speter      if (offset != 0)
406718334Speter	{
4068169689Skan	  rtx offset_rtx;
406918334Speter
4070169689Skan	  if (!MEM_P (to_rtx))
4071169689Skan	    {
4072169689Skan	      /* We can get constant negative offsets into arrays with broken
4073169689Skan		 user code.  Translate this to a trap instead of ICEing.  */
4074169689Skan	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4075169689Skan	      expand_builtin_trap ();
4076169689Skan	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4077169689Skan	    }
407850397Sobrien
4079169689Skan	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
408050397Sobrien#ifdef POINTERS_EXTEND_UNSIGNED
408190075Sobrien	  if (GET_MODE (offset_rtx) != Pmode)
4082117395Skan	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
408396263Sobrien#else
408496263Sobrien	  if (GET_MODE (offset_rtx) != ptr_mode)
408596263Sobrien	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
408650397Sobrien#endif
408750397Sobrien
408852284Sobrien	  /* A constant address in TO_RTX can have VOIDmode, we must not try
408952284Sobrien	     to call force_reg for that case.  Avoid that case.  */
4090169689Skan	  if (MEM_P (to_rtx)
409150397Sobrien	      && GET_MODE (to_rtx) == BLKmode
409252284Sobrien	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
409390075Sobrien	      && bitsize > 0
409490075Sobrien	      && (bitpos % bitsize) == 0
409550397Sobrien	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
409690075Sobrien	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
409750397Sobrien	    {
409896263Sobrien	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
409950397Sobrien	      bitpos = 0;
410050397Sobrien	    }
410150397Sobrien
410290075Sobrien	  to_rtx = offset_address (to_rtx, offset_rtx,
4103132718Skan				   highest_pow2_factor_for_target (to,
4104132718Skan				   				   offset));
410518334Speter	}
410690075Sobrien
4107169689Skan      /* Handle expand_expr of a complex value returning a CONCAT.  */
4108169689Skan      if (GET_CODE (to_rtx) == CONCAT)
410918334Speter	{
4110169689Skan	  if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4111169689Skan	    {
4112169689Skan	      gcc_assert (bitpos == 0);
4113169689Skan	      result = store_expr (from, to_rtx, false);
4114169689Skan	    }
4115169689Skan	  else
4116169689Skan	    {
4117169689Skan	      gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4118169689Skan	      result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4119169689Skan	    }
412018334Speter	}
4121169689Skan      else
412290075Sobrien	{
4123169689Skan	  if (MEM_P (to_rtx))
4124169689Skan	    {
4125169689Skan	      /* If the field is at offset zero, we could have been given the
4126169689Skan		 DECL_RTX of the parent struct.  Don't munge it.  */
4127169689Skan	      to_rtx = shallow_copy_rtx (to_rtx);
412890075Sobrien
4129169689Skan	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
413050397Sobrien
4131169689Skan	      /* Deal with volatile and readonly fields.  The former is only
4132169689Skan		 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4133169689Skan	      if (volatilep)
4134169689Skan		MEM_VOLATILE_P (to_rtx) = 1;
4135169689Skan	      if (component_uses_parent_alias_set (to))
4136169689Skan		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4137169689Skan	    }
4138169689Skan
4139169689Skan	  if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4140169689Skan					       to_rtx, to, from))
4141169689Skan	    result = NULL;
4142169689Skan	  else
4143169689Skan	    result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4144169689Skan				  TREE_TYPE (tem), get_alias_set (to));
414550397Sobrien	}
414650397Sobrien
4147169689Skan      if (result)
4148169689Skan	preserve_temp_slots (result);
414918334Speter      free_temp_slots ();
415018334Speter      pop_temp_slots ();
4151169689Skan      return;
415218334Speter    }
415318334Speter
415418334Speter  /* If the rhs is a function call and its value is not an aggregate,
415518334Speter     call the function before we start to compute the lhs.
415618334Speter     This is needed for correct code for cases such as
415718334Speter     val = setjmp (buf) on machines where reference to val
415818334Speter     requires loading up part of an address in a separate insn.
415918334Speter
416090075Sobrien     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
416190075Sobrien     since it might be a promoted variable where the zero- or sign- extension
416290075Sobrien     needs to be done.  Handling this in the normal way is safe because no
416390075Sobrien     computation is done before the call.  */
4164132718Skan  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
416550397Sobrien      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
416690075Sobrien      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4167169689Skan	    && REG_P (DECL_RTL (to))))
416818334Speter    {
416918334Speter      rtx value;
417018334Speter
417118334Speter      push_temp_slots ();
4172169689Skan      value = expand_normal (from);
417318334Speter      if (to_rtx == 0)
417490075Sobrien	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
417518334Speter
417650397Sobrien      /* Handle calls that return values in multiple non-contiguous locations.
417750397Sobrien	 The Irix 6 ABI has examples of this.  */
417850397Sobrien      if (GET_CODE (to_rtx) == PARALLEL)
4179132718Skan	emit_group_load (to_rtx, value, TREE_TYPE (from),
4180132718Skan			 int_size_in_bytes (TREE_TYPE (from)));
418150397Sobrien      else if (GET_MODE (to_rtx) == BLKmode)
4182117395Skan	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
418318334Speter      else
418452284Sobrien	{
4185132718Skan	  if (POINTER_TYPE_P (TREE_TYPE (to)))
418652284Sobrien	    value = convert_memory_address (GET_MODE (to_rtx), value);
418752284Sobrien	  emit_move_insn (to_rtx, value);
418852284Sobrien	}
418918334Speter      preserve_temp_slots (to_rtx);
419018334Speter      free_temp_slots ();
419118334Speter      pop_temp_slots ();
4192169689Skan      return;
419318334Speter    }
419418334Speter
419518334Speter  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
419618334Speter     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
419718334Speter
419818334Speter  if (to_rtx == 0)
419990075Sobrien    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
420018334Speter
420118334Speter  /* Don't move directly into a return register.  */
420290075Sobrien  if (TREE_CODE (to) == RESULT_DECL
4203169689Skan      && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
420418334Speter    {
420518334Speter      rtx temp;
420618334Speter
420718334Speter      push_temp_slots ();
420818334Speter      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
420990075Sobrien
421090075Sobrien      if (GET_CODE (to_rtx) == PARALLEL)
4211132718Skan	emit_group_load (to_rtx, temp, TREE_TYPE (from),
4212132718Skan			 int_size_in_bytes (TREE_TYPE (from)));
421390075Sobrien      else
421490075Sobrien	emit_move_insn (to_rtx, temp);
421590075Sobrien
421618334Speter      preserve_temp_slots (to_rtx);
421718334Speter      free_temp_slots ();
421818334Speter      pop_temp_slots ();
4219169689Skan      return;
422018334Speter    }
422118334Speter
422218334Speter  /* In case we are returning the contents of an object which overlaps
422318334Speter     the place the value is being stored, use a safe function when copying
422418334Speter     a value through a pointer into a structure value return block.  */
422518334Speter  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
422618334Speter      && current_function_returns_struct
422718334Speter      && !current_function_returns_pcc_struct)
422818334Speter    {
422918334Speter      rtx from_rtx, size;
423018334Speter
423118334Speter      push_temp_slots ();
423218334Speter      size = expr_size (from);
4233169689Skan      from_rtx = expand_normal (from);
423418334Speter
4235169689Skan      emit_library_call (memmove_libfunc, LCT_NORMAL,
4236169689Skan			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4237169689Skan			 XEXP (from_rtx, 0), Pmode,
4238169689Skan			 convert_to_mode (TYPE_MODE (sizetype),
4239169689Skan					  size, TYPE_UNSIGNED (sizetype)),
4240169689Skan			 TYPE_MODE (sizetype));
424118334Speter
424218334Speter      preserve_temp_slots (to_rtx);
424318334Speter      free_temp_slots ();
424418334Speter      pop_temp_slots ();
4245169689Skan      return;
424618334Speter    }
424718334Speter
424818334Speter  /* Compute FROM and store the value in the rtx we got.  */
424918334Speter
425018334Speter  push_temp_slots ();
4251169689Skan  result = store_expr (from, to_rtx, 0);
425218334Speter  preserve_temp_slots (result);
425318334Speter  free_temp_slots ();
425418334Speter  pop_temp_slots ();
4255169689Skan  return;
425618334Speter}
425718334Speter
425818334Speter/* Generate code for computing expression EXP,
425918334Speter   and storing the value into TARGET.
426018334Speter
426118334Speter   If the mode is BLKmode then we may return TARGET itself.
426218334Speter   It turns out that in BLKmode it doesn't cause a problem.
426318334Speter   because C has no operators that could combine two different
426418334Speter   assignments into the same BLKmode object with different values
426518334Speter   with no sequence point.  Will other languages need this to
426618334Speter   be more thorough?
426718334Speter
4268169689Skan   If CALL_PARAM_P is nonzero, this is a store into a call param on the
4269117395Skan   stack, and block moves may need to be treated specially.  */
4270117395Skan
427118334Speterrtx
4272169689Skanstore_expr (tree exp, rtx target, int call_param_p)
427318334Speter{
427490075Sobrien  rtx temp;
4275132718Skan  rtx alt_rtl = NULL_RTX;
427618334Speter  int dont_return_target = 0;
427718334Speter
4278117395Skan  if (VOID_TYPE_P (TREE_TYPE (exp)))
4279117395Skan    {
4280117395Skan      /* C++ can generate ?: expressions with a throw expression in one
4281117395Skan	 branch and an rvalue in the other. Here, we resolve attempts to
4282132718Skan	 store the throw expression's nonexistent result.  */
4283169689Skan      gcc_assert (!call_param_p);
4284117395Skan      expand_expr (exp, const0_rtx, VOIDmode, 0);
4285117395Skan      return NULL_RTX;
4286117395Skan    }
428718334Speter  if (TREE_CODE (exp) == COMPOUND_EXPR)
428818334Speter    {
428918334Speter      /* Perform first part of compound expression, then assign from second
429018334Speter	 part.  */
4291117395Skan      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4292169689Skan		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4293169689Skan      return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
429418334Speter    }
429518334Speter  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
429618334Speter    {
429718334Speter      /* For conditional expression, get safe form of the target.  Then
429818334Speter	 test the condition, doing the appropriate assignment on either
429918334Speter	 side.  This avoids the creation of unnecessary temporaries.
430018334Speter	 For non-BLKmode, it is more efficient not to do this.  */
430118334Speter
430218334Speter      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
430318334Speter
430418334Speter      do_pending_stack_adjust ();
430518334Speter      NO_DEFER_POP;
430618334Speter      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4307169689Skan      store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
430818334Speter      emit_jump_insn (gen_jump (lab2));
430918334Speter      emit_barrier ();
431018334Speter      emit_label (lab1);
4311169689Skan      store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
431218334Speter      emit_label (lab2);
431318334Speter      OK_DEFER_POP;
431450397Sobrien
4315169689Skan      return NULL_RTX;
431618334Speter    }
431718334Speter  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4318117395Skan    /* If this is a scalar in a register that is stored in a wider mode
431918334Speter       than the declared mode, compute the result into its declared mode
432018334Speter       and then convert to the wider mode.  Our value is the computed
432118334Speter       expression.  */
432218334Speter    {
432396263Sobrien      rtx inner_target = 0;
432496263Sobrien
4325169689Skan      /* We can do the conversion inside EXP, which will often result
4326169689Skan	 in some optimizations.  Do the conversion in two steps: first
4327169689Skan	 change the signedness, if needed, then the extend.  But don't
4328169689Skan	 do this if the type of EXP is a subtype of something else
4329169689Skan	 since then the conversion might involve more than just
4330169689Skan	 converting modes.  */
4331169689Skan      if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4332169689Skan	  && TREE_TYPE (TREE_TYPE (exp)) == 0
4333169689Skan	  && (!lang_hooks.reduce_bit_field_operations
4334169689Skan	      || (GET_MODE_PRECISION (GET_MODE (target))
4335169689Skan		  == TYPE_PRECISION (TREE_TYPE (exp)))))
433618334Speter	{
4337169689Skan	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
433818334Speter	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4339169689Skan	    exp = fold_convert
4340169689Skan	      (lang_hooks.types.signed_or_unsigned_type
4341117395Skan	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
434218334Speter
4343169689Skan	  exp = fold_convert (lang_hooks.types.type_for_mode
4344169689Skan				(GET_MODE (SUBREG_REG (target)),
4345169689Skan				 SUBREG_PROMOTED_UNSIGNED_P (target)),
4346169689Skan			      exp);
434796263Sobrien
434896263Sobrien	  inner_target = SUBREG_REG (target);
434918334Speter	}
435090075Sobrien
4351117395Skan      temp = expand_expr (exp, inner_target, VOIDmode,
4352169689Skan			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
435318334Speter
435418334Speter      /* If TEMP is a VOIDmode constant, use convert_modes to make
435518334Speter	 sure that we properly convert it.  */
435618334Speter      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
435790075Sobrien	{
435890075Sobrien	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
435990075Sobrien				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
436090075Sobrien	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
436190075Sobrien			        GET_MODE (target), temp,
436290075Sobrien			        SUBREG_PROMOTED_UNSIGNED_P (target));
436390075Sobrien	}
436418334Speter
436518334Speter      convert_move (SUBREG_REG (target), temp,
436618334Speter		    SUBREG_PROMOTED_UNSIGNED_P (target));
436790075Sobrien
4368169689Skan      return NULL_RTX;
436918334Speter    }
437018334Speter  else
437118334Speter    {
4372132718Skan      temp = expand_expr_real (exp, target, GET_MODE (target),
4373169689Skan			       (call_param_p
4374132718Skan				? EXPAND_STACK_PARM : EXPAND_NORMAL),
4375132718Skan			       &alt_rtl);
437618334Speter      /* Return TARGET if it's a specified hardware register.
437718334Speter	 If TARGET is a volatile mem ref, either return TARGET
437818334Speter	 or return a reg copied *from* TARGET; ANSI requires this.
437918334Speter
438018334Speter	 Otherwise, if TEMP is not TARGET, return TEMP
438118334Speter	 if it is constant (for efficiency),
438218334Speter	 or if we really want the correct value.  */
4383169689Skan      if (!(target && REG_P (target)
438418334Speter	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4385169689Skan	  && !(MEM_P (target) && MEM_VOLATILE_P (target))
438650397Sobrien	  && ! rtx_equal_p (temp, target)
4387169689Skan	  && CONSTANT_P (temp))
438818334Speter	dont_return_target = 1;
438918334Speter    }
439018334Speter
439118334Speter  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
439218334Speter     the same as that of TARGET, adjust the constant.  This is needed, for
439318334Speter     example, in case it is a CONST_DOUBLE and we want only a word-sized
439418334Speter     value.  */
439518334Speter  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
439618334Speter      && TREE_CODE (exp) != ERROR_MARK
439718334Speter      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
439818334Speter    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4399169689Skan			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
440018334Speter
440118334Speter  /* If value was not generated in the target, store it there.
4402132718Skan     Convert the value to TARGET's type first if necessary and emit the
4403132718Skan     pending incrementations that have been queued when expanding EXP.
4404132718Skan     Note that we cannot emit the whole queue blindly because this will
4405132718Skan     effectively disable the POST_INC optimization later.
4406132718Skan
440790075Sobrien     If TEMP and TARGET compare equal according to rtx_equal_p, but
440850397Sobrien     one or both of them are volatile memory refs, we have to distinguish
440950397Sobrien     two cases:
441050397Sobrien     - expand_expr has used TARGET.  In this case, we must not generate
441150397Sobrien       another copy.  This can be detected by TARGET being equal according
441250397Sobrien       to == .
441350397Sobrien     - expand_expr has not used TARGET - that means that the source just
441450397Sobrien       happens to have the same RTX form.  Since temp will have been created
441550397Sobrien       by expand_expr, it will compare unequal according to == .
441650397Sobrien       We must generate a copy in this case, to reach the correct number
441750397Sobrien       of volatile memory references.  */
441818334Speter
441950397Sobrien  if ((! rtx_equal_p (temp, target)
442050397Sobrien       || (temp != target && (side_effects_p (temp)
442150397Sobrien			      || side_effects_p (target))))
442290075Sobrien      && TREE_CODE (exp) != ERROR_MARK
4423132718Skan      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4424132718Skan	 but TARGET is not valid memory reference, TEMP will differ
4425132718Skan	 from TARGET although it is really the same location.  */
4426132718Skan      && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4427169689Skan      /* If there's nothing to copy, don't bother.  Don't call
4428169689Skan	 expr_size unless necessary, because some front-ends (C++)
4429169689Skan	 expr_size-hook must not be given objects that are not
4430169689Skan	 supposed to be bit-copied or bit-initialized.  */
4431117395Skan      && expr_size (exp) != const0_rtx)
443218334Speter    {
443318334Speter      if (GET_MODE (temp) != GET_MODE (target)
443418334Speter	  && GET_MODE (temp) != VOIDmode)
443518334Speter	{
4436169689Skan	  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
443718334Speter	  if (dont_return_target)
443818334Speter	    {
443918334Speter	      /* In this case, we will return TEMP,
444018334Speter		 so make sure it has the proper mode.
444118334Speter		 But don't forget to store the value into TARGET.  */
444218334Speter	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
444318334Speter	      emit_move_insn (target, temp);
444418334Speter	    }
4445259563Spfg	  else if (GET_MODE (target) == BLKmode)
4446259563Spfg	    emit_block_move (target, temp, expr_size (exp),
4447259563Spfg			     (call_param_p
4448259563Spfg			      ? BLOCK_OP_CALL_PARM
4449259563Spfg			      : BLOCK_OP_NORMAL));
445018334Speter	  else
445118334Speter	    convert_move (target, temp, unsignedp);
445218334Speter	}
445318334Speter
445418334Speter      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
445518334Speter	{
445690075Sobrien	  /* Handle copying a string constant into an array.  The string
445790075Sobrien	     constant may be shorter than the array.  So copy just the string's
445890075Sobrien	     actual length, and clear the rest.  First get the size of the data
445990075Sobrien	     type of the string, which is actually the size of the target.  */
446090075Sobrien	  rtx size = expr_size (exp);
446118334Speter
446218334Speter	  if (GET_CODE (size) == CONST_INT
446318334Speter	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4464117395Skan	    emit_block_move (target, temp, size,
4465169689Skan			     (call_param_p
4466117395Skan			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
446718334Speter	  else
446818334Speter	    {
446918334Speter	      /* Compute the size of the data to copy from the string.  */
447018334Speter	      tree copy_size
447118334Speter		= size_binop (MIN_EXPR,
447218334Speter			      make_tree (sizetype, size),
447390075Sobrien			      size_int (TREE_STRING_LENGTH (exp)));
4474117395Skan	      rtx copy_size_rtx
4475117395Skan		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4476169689Skan			       (call_param_p
4477117395Skan				? EXPAND_STACK_PARM : EXPAND_NORMAL));
447818334Speter	      rtx label = 0;
447918334Speter
448018334Speter	      /* Copy that much.  */
4481117395Skan	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4482169689Skan					       TYPE_UNSIGNED (sizetype));
4483117395Skan	      emit_block_move (target, temp, copy_size_rtx,
4484169689Skan			       (call_param_p
4485117395Skan				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
448618334Speter
448718334Speter	      /* Figure out how much is left in TARGET that we have to clear.
448818334Speter		 Do all calculations in ptr_mode.  */
448918334Speter	      if (GET_CODE (copy_size_rtx) == CONST_INT)
449018334Speter		{
449190075Sobrien		  size = plus_constant (size, -INTVAL (copy_size_rtx));
449290075Sobrien		  target = adjust_address (target, BLKmode,
449390075Sobrien					   INTVAL (copy_size_rtx));
449418334Speter		}
449518334Speter	      else
449618334Speter		{
449796263Sobrien		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
449818334Speter				       copy_size_rtx, NULL_RTX, 0,
449918334Speter				       OPTAB_LIB_WIDEN);
450018334Speter
450190075Sobrien#ifdef POINTERS_EXTEND_UNSIGNED
450290075Sobrien		  if (GET_MODE (copy_size_rtx) != Pmode)
4503117395Skan		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4504169689Skan						     TYPE_UNSIGNED (sizetype));
450590075Sobrien#endif
450690075Sobrien
450790075Sobrien		  target = offset_address (target, copy_size_rtx,
450890075Sobrien					   highest_pow2_factor (copy_size));
450918334Speter		  label = gen_label_rtx ();
451052284Sobrien		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
451190075Sobrien					   GET_MODE (size), 0, label);
451218334Speter		}
451318334Speter
451418334Speter	      if (size != const0_rtx)
4515169689Skan		clear_storage (target, size, BLOCK_OP_NORMAL);
451618334Speter
451718334Speter	      if (label)
451818334Speter		emit_label (label);
451918334Speter	    }
452018334Speter	}
452150397Sobrien      /* Handle calls that return values in multiple non-contiguous locations.
452250397Sobrien	 The Irix 6 ABI has examples of this.  */
452350397Sobrien      else if (GET_CODE (target) == PARALLEL)
4524132718Skan	emit_group_load (target, temp, TREE_TYPE (exp),
4525132718Skan			 int_size_in_bytes (TREE_TYPE (exp)));
452618334Speter      else if (GET_MODE (temp) == BLKmode)
4527117395Skan	emit_block_move (target, temp, expr_size (exp),
4528169689Skan			 (call_param_p
4529117395Skan			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
453018334Speter      else
4531169689Skan	{
4532169689Skan	  temp = force_operand (temp, target);
4533169689Skan	  if (temp != target)
4534169689Skan	    emit_move_insn (target, temp);
4535169689Skan	}
453618334Speter    }
453718334Speter
4538169689Skan  return NULL_RTX;
4539169689Skan}
4540169689Skan
4541169689Skan/* Helper for categorize_ctor_elements.  Identical interface.  */
454218334Speter
4543169689Skanstatic bool
4544169689Skancategorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4545169689Skan			    HOST_WIDE_INT *p_elt_count,
4546169689Skan			    bool *p_must_clear)
4547169689Skan{
4548169689Skan  unsigned HOST_WIDE_INT idx;
4549169689Skan  HOST_WIDE_INT nz_elts, elt_count;
4550169689Skan  tree value, purpose;
455118334Speter
4552169689Skan  /* Whether CTOR is a valid constant initializer, in accordance with what
4553169689Skan     initializer_constant_valid_p does.  If inferred from the constructor
4554169689Skan     elements, true until proven otherwise.  */
4555169689Skan  bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4556169689Skan  bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
455790075Sobrien
4558169689Skan  nz_elts = 0;
4559169689Skan  elt_count = 0;
4560169689Skan
4561169689Skan  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4562169689Skan    {
4563169689Skan      HOST_WIDE_INT mult;
4564169689Skan
4565169689Skan      mult = 1;
4566169689Skan      if (TREE_CODE (purpose) == RANGE_EXPR)
4567169689Skan	{
4568169689Skan	  tree lo_index = TREE_OPERAND (purpose, 0);
4569169689Skan	  tree hi_index = TREE_OPERAND (purpose, 1);
4570169689Skan
4571169689Skan	  if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4572169689Skan	    mult = (tree_low_cst (hi_index, 1)
4573169689Skan		    - tree_low_cst (lo_index, 1) + 1);
4574169689Skan	}
4575169689Skan
4576169689Skan      switch (TREE_CODE (value))
4577169689Skan	{
4578169689Skan	case CONSTRUCTOR:
4579169689Skan	  {
4580169689Skan	    HOST_WIDE_INT nz = 0, ic = 0;
4581259563Spfg
4582169689Skan	    bool const_elt_p
4583169689Skan	      = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4584169689Skan
4585169689Skan	    nz_elts += mult * nz;
4586169689Skan 	    elt_count += mult * ic;
4587169689Skan
4588169689Skan	    if (const_from_elts_p && const_p)
4589169689Skan	      const_p = const_elt_p;
4590169689Skan	  }
4591169689Skan	  break;
4592169689Skan
4593169689Skan	case INTEGER_CST:
4594169689Skan	case REAL_CST:
4595169689Skan	  if (!initializer_zerop (value))
4596169689Skan	    nz_elts += mult;
4597169689Skan	  elt_count += mult;
4598169689Skan	  break;
4599169689Skan
4600169689Skan	case STRING_CST:
4601169689Skan	  nz_elts += mult * TREE_STRING_LENGTH (value);
4602169689Skan	  elt_count += mult * TREE_STRING_LENGTH (value);
4603169689Skan	  break;
4604169689Skan
4605169689Skan	case COMPLEX_CST:
4606169689Skan	  if (!initializer_zerop (TREE_REALPART (value)))
4607169689Skan	    nz_elts += mult;
4608169689Skan	  if (!initializer_zerop (TREE_IMAGPART (value)))
4609169689Skan	    nz_elts += mult;
4610169689Skan	  elt_count += mult;
4611169689Skan	  break;
4612169689Skan
4613169689Skan	case VECTOR_CST:
4614169689Skan	  {
4615169689Skan	    tree v;
4616169689Skan	    for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4617169689Skan	      {
4618169689Skan		if (!initializer_zerop (TREE_VALUE (v)))
4619169689Skan		  nz_elts += mult;
4620169689Skan		elt_count += mult;
4621169689Skan	      }
4622169689Skan	  }
4623169689Skan	  break;
4624169689Skan
4625169689Skan	default:
4626169689Skan	  nz_elts += mult;
4627169689Skan	  elt_count += mult;
4628169689Skan
4629169689Skan	  if (const_from_elts_p && const_p)
4630169689Skan	    const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4631169689Skan		      != NULL_TREE;
4632169689Skan	  break;
4633169689Skan	}
4634169689Skan    }
4635169689Skan
4636169689Skan  if (!*p_must_clear
4637169689Skan      && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4638169689Skan	  || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4639169689Skan    {
4640169689Skan      tree init_sub_type;
4641169689Skan      bool clear_this = true;
4642169689Skan
4643169689Skan      if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4644169689Skan	{
4645169689Skan	  /* We don't expect more than one element of the union to be
4646169689Skan	     initialized.  Not sure what we should do otherwise... */
4647169689Skan          gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4648169689Skan		      == 1);
4649169689Skan
4650169689Skan          init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4651169689Skan						CONSTRUCTOR_ELTS (ctor),
4652169689Skan						0)->value);
4653169689Skan
4654169689Skan	  /* ??? We could look at each element of the union, and find the
4655169689Skan	     largest element.  Which would avoid comparing the size of the
4656169689Skan	     initialized element against any tail padding in the union.
4657169689Skan	     Doesn't seem worth the effort...  */
4658259563Spfg	  if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4659169689Skan				TYPE_SIZE (init_sub_type)) == 1)
4660169689Skan	    {
4661169689Skan	      /* And now we have to find out if the element itself is fully
4662169689Skan		 constructed.  E.g. for union { struct { int a, b; } s; } u
4663169689Skan		 = { .s = { .a = 1 } }.  */
4664169689Skan	      if (elt_count == count_type_elements (init_sub_type, false))
4665169689Skan		clear_this = false;
4666169689Skan	    }
4667169689Skan	}
4668169689Skan
4669169689Skan      *p_must_clear = clear_this;
4670169689Skan    }
4671169689Skan
4672169689Skan  *p_nz_elts += nz_elts;
4673169689Skan  *p_elt_count += elt_count;
4674169689Skan
4675169689Skan  return const_p;
467618334Speter}
467750397Sobrien
4678169689Skan/* Examine CTOR to discover:
4679169689Skan   * how many scalar fields are set to nonzero values,
4680169689Skan     and place it in *P_NZ_ELTS;
4681169689Skan   * how many scalar fields in total are in CTOR,
4682169689Skan     and place it in *P_ELT_COUNT.
4683169689Skan   * if a type is a union, and the initializer from the constructor
4684169689Skan     is not the largest element in the union, then set *p_must_clear.
4685169689Skan
4686169689Skan   Return whether or not CTOR is a valid static constant initializer, the same
4687169689Skan   as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
4688169689Skan
4689169689Skanbool
4690169689Skancategorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4691169689Skan			  HOST_WIDE_INT *p_elt_count,
4692169689Skan			  bool *p_must_clear)
469350397Sobrien{
4694169689Skan  *p_nz_elts = 0;
4695169689Skan  *p_elt_count = 0;
4696169689Skan  *p_must_clear = false;
469750397Sobrien
4698169689Skan  return
4699169689Skan    categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4700169689Skan}
4701169689Skan
4702169689Skan/* Count the number of scalars in TYPE.  Return -1 on overflow or
4703169689Skan   variable-sized.  If ALLOW_FLEXARR is true, don't count flexible
4704169689Skan   array member at the end of the structure.  */
4705169689Skan
4706169689SkanHOST_WIDE_INT
4707169689Skancount_type_elements (tree type, bool allow_flexarr)
4708169689Skan{
4709169689Skan  const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4710169689Skan  switch (TREE_CODE (type))
471150397Sobrien    {
4712169689Skan    case ARRAY_TYPE:
4713169689Skan      {
4714169689Skan	tree telts = array_type_nelts (type);
4715169689Skan	if (telts && host_integerp (telts, 1))
4716169689Skan	  {
4717169689Skan	    HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4718169689Skan	    HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4719169689Skan	    if (n == 0)
4720169689Skan	      return 0;
4721169689Skan	    else if (max / n > m)
4722169689Skan	      return n * m;
4723169689Skan	  }
4724169689Skan	return -1;
4725169689Skan      }
472650397Sobrien
4727169689Skan    case RECORD_TYPE:
4728169689Skan      {
4729169689Skan	HOST_WIDE_INT n = 0, t;
4730169689Skan	tree f;
473150397Sobrien
4732169689Skan	for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4733169689Skan	  if (TREE_CODE (f) == FIELD_DECL)
4734169689Skan	    {
4735169689Skan	      t = count_type_elements (TREE_TYPE (f), false);
4736169689Skan	      if (t < 0)
4737169689Skan		{
4738169689Skan		  /* Check for structures with flexible array member.  */
4739169689Skan		  tree tf = TREE_TYPE (f);
4740169689Skan		  if (allow_flexarr
4741169689Skan		      && TREE_CHAIN (f) == NULL
4742169689Skan		      && TREE_CODE (tf) == ARRAY_TYPE
4743169689Skan		      && TYPE_DOMAIN (tf)
4744169689Skan		      && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4745169689Skan		      && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4746169689Skan		      && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4747169689Skan		      && int_size_in_bytes (type) >= 0)
4748169689Skan		    break;
474950397Sobrien
4750169689Skan		  return -1;
4751169689Skan		}
4752169689Skan	      n += t;
4753169689Skan	    }
475450397Sobrien
4755169689Skan	return n;
4756169689Skan      }
475796263Sobrien
4758169689Skan    case UNION_TYPE:
4759169689Skan    case QUAL_UNION_TYPE:
4760235623Spfg      return -1;
476196263Sobrien
4762169689Skan    case COMPLEX_TYPE:
4763169689Skan      return 2;
476450397Sobrien
4765169689Skan    case VECTOR_TYPE:
4766169689Skan      return TYPE_VECTOR_SUBPARTS (type);
4767169689Skan
4768169689Skan    case INTEGER_TYPE:
4769169689Skan    case REAL_TYPE:
4770169689Skan    case ENUMERAL_TYPE:
4771169689Skan    case BOOLEAN_TYPE:
4772169689Skan    case POINTER_TYPE:
4773261188Spfg    /* APPLE LOCAL radar 5732232 - blocks */
4774261188Spfg    case BLOCK_POINTER_TYPE:
4775169689Skan    case OFFSET_TYPE:
4776169689Skan    case REFERENCE_TYPE:
477750397Sobrien      return 1;
477890075Sobrien
4779169689Skan    case VOID_TYPE:
4780169689Skan    case METHOD_TYPE:
4781169689Skan    case FUNCTION_TYPE:
4782169689Skan    case LANG_TYPE:
478350397Sobrien    default:
4784169689Skan      gcc_unreachable ();
478550397Sobrien    }
478650397Sobrien}
478750397Sobrien
478850397Sobrien/* Return 1 if EXP contains mostly (3/4)  zeros.  */
478950397Sobrien
4790169689Skanstatic int
4791132718Skanmostly_zeros_p (tree exp)
479250397Sobrien{
479350397Sobrien  if (TREE_CODE (exp) == CONSTRUCTOR)
4794169689Skan
479550397Sobrien    {
4796169689Skan      HOST_WIDE_INT nz_elts, count, elts;
4797169689Skan      bool must_clear;
479850397Sobrien
4799169689Skan      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4800169689Skan      if (must_clear)
4801169689Skan	return 1;
4802169689Skan
4803169689Skan      elts = count_type_elements (TREE_TYPE (exp), false);
4804169689Skan
4805169689Skan      return nz_elts < elts / 4;
480650397Sobrien    }
480750397Sobrien
4808169689Skan  return initializer_zerop (exp);
480950397Sobrien}
4810169689Skan
4811169689Skan/* Return 1 if EXP contains all zeros.  */
4812169689Skan
4813169689Skanstatic int
4814169689Skanall_zeros_p (tree exp)
4815169689Skan{
4816169689Skan  if (TREE_CODE (exp) == CONSTRUCTOR)
4817169689Skan
4818169689Skan    {
4819169689Skan      HOST_WIDE_INT nz_elts, count;
4820169689Skan      bool must_clear;
4821169689Skan
4822169689Skan      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4823169689Skan      return nz_elts == 0;
4824169689Skan    }
4825169689Skan
4826169689Skan  return initializer_zerop (exp);
4827169689Skan}
482850397Sobrien
482950397Sobrien/* Helper function for store_constructor.
483050397Sobrien   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
483150397Sobrien   TYPE is the type of the CONSTRUCTOR, not the element type.
483250397Sobrien   CLEARED is as for store_constructor.
483390075Sobrien   ALIAS_SET is the alias set to use for any stores.
483450397Sobrien
483550397Sobrien   This provides a recursive shortcut back to store_constructor when it isn't
483650397Sobrien   necessary to go through store_field.  This is so that we can pass through
483750397Sobrien   the cleared field to let store_constructor know that we may not have to
483850397Sobrien   clear a substructure if the outer structure has already been cleared.  */
483950397Sobrien
484050397Sobrienstatic void
4841132718Skanstore_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4842132718Skan			 HOST_WIDE_INT bitpos, enum machine_mode mode,
4843132718Skan			 tree exp, tree type, int cleared, int alias_set)
484450397Sobrien{
484550397Sobrien  if (TREE_CODE (exp) == CONSTRUCTOR
4846169689Skan      /* We can only call store_constructor recursively if the size and
4847169689Skan	 bit position are on a byte boundary.  */
484850397Sobrien      && bitpos % BITS_PER_UNIT == 0
4849169689Skan      && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4850117395Skan      /* If we have a nonzero bitpos for a register target, then we just
485150397Sobrien	 let store_field do the bitfield handling.  This is unlikely to
485250397Sobrien	 generate unnecessary clear instructions anyways.  */
4853169689Skan      && (bitpos == 0 || MEM_P (target)))
485450397Sobrien    {
4855169689Skan      if (MEM_P (target))
485690075Sobrien	target
485790075Sobrien	  = adjust_address (target,
485890075Sobrien			    GET_MODE (target) == BLKmode
485990075Sobrien			    || 0 != (bitpos
486090075Sobrien				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
486190075Sobrien			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
486290075Sobrien
486390075Sobrien
486490075Sobrien      /* Update the alias set, if required.  */
4865169689Skan      if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
486690075Sobrien	  && MEM_ALIAS_SET (target) != 0)
486790075Sobrien	{
486890075Sobrien	  target = copy_rtx (target);
486990075Sobrien	  set_mem_alias_set (target, alias_set);
487090075Sobrien	}
487190075Sobrien
487290075Sobrien      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
487350397Sobrien    }
487450397Sobrien  else
4875169689Skan    store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
487650397Sobrien}
487750397Sobrien
487818334Speter/* Store the value of constructor EXP into the rtx TARGET.
487990075Sobrien   TARGET is either a REG or a MEM; we know it cannot conflict, since
488090075Sobrien   safe_from_p has been called.
488190075Sobrien   CLEARED is true if TARGET is known to have been zero'd.
488290075Sobrien   SIZE is the number of bytes of TARGET we are allowed to modify: this
488390075Sobrien   may not be the same as the size of EXP if we are assigning to a field
488490075Sobrien   which has been packed to exclude padding bits.  */
488518334Speter
488618334Speterstatic void
4887132718Skanstore_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
488818334Speter{
488918334Speter  tree type = TREE_TYPE (exp);
489090075Sobrien#ifdef WORD_REGISTER_OPERATIONS
489190075Sobrien  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
489218334Speter#endif
489318334Speter
4894169689Skan  switch (TREE_CODE (type))
489518334Speter    {
4896169689Skan    case RECORD_TYPE:
4897169689Skan    case UNION_TYPE:
4898169689Skan    case QUAL_UNION_TYPE:
4899169689Skan      {
4900169689Skan	unsigned HOST_WIDE_INT idx;
4901169689Skan	tree field, value;
490218334Speter
4903169689Skan	/* If size is zero or the target is already cleared, do nothing.  */
4904169689Skan	if (size == 0 || cleared)
490590075Sobrien	  cleared = 1;
4906169689Skan	/* We either clear the aggregate or indicate the value is dead.  */
4907169689Skan	else if ((TREE_CODE (type) == UNION_TYPE
4908169689Skan		  || TREE_CODE (type) == QUAL_UNION_TYPE)
4909169689Skan		 && ! CONSTRUCTOR_ELTS (exp))
4910169689Skan	  /* If the constructor is empty, clear the union.  */
4911169689Skan	  {
4912169689Skan	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4913169689Skan	    cleared = 1;
4914169689Skan	  }
491518334Speter
4916169689Skan	/* If we are building a static constructor into a register,
4917169689Skan	   set the initial value as zero so we can fold the value into
4918169689Skan	   a constant.  But if more than one register is involved,
4919169689Skan	   this probably loses.  */
4920169689Skan	else if (REG_P (target) && TREE_STATIC (exp)
4921169689Skan		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4922169689Skan	  {
4923169689Skan	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4924169689Skan	    cleared = 1;
4925169689Skan	  }
492650397Sobrien
4927169689Skan        /* If the constructor has fewer fields than the structure or
4928169689Skan	   if we are initializing the structure to mostly zeros, clear
4929169689Skan	   the whole structure first.  Don't do this if TARGET is a
4930169689Skan	   register whose mode size isn't equal to SIZE since
4931169689Skan	   clear_storage can't handle this case.  */
4932169689Skan	else if (size > 0
4933169689Skan		 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4934169689Skan		      != fields_length (type))
4935169689Skan		     || mostly_zeros_p (exp))
4936169689Skan		 && (!REG_P (target)
4937169689Skan		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4938169689Skan			 == size)))
4939169689Skan	  {
4940169689Skan	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4941169689Skan	    cleared = 1;
4942169689Skan	  }
4943132718Skan
4944169689Skan	if (! cleared)
4945169689Skan	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4946132718Skan
4947169689Skan	/* Store each element of the constructor into the
4948169689Skan	   corresponding field of TARGET.  */
4949169689Skan	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4950169689Skan	  {
4951169689Skan	    enum machine_mode mode;
4952169689Skan	    HOST_WIDE_INT bitsize;
4953169689Skan	    HOST_WIDE_INT bitpos = 0;
4954169689Skan	    tree offset;
4955169689Skan	    rtx to_rtx = target;
4956259563Spfg
4957169689Skan	    /* Just ignore missing fields.  We cleared the whole
4958169689Skan	       structure, above, if any fields are missing.  */
4959169689Skan	    if (field == 0)
4960169689Skan	      continue;
4961259563Spfg
4962169689Skan	    if (cleared && initializer_zerop (value))
4963169689Skan	      continue;
4964259563Spfg
4965169689Skan	    if (host_integerp (DECL_SIZE (field), 1))
4966169689Skan	      bitsize = tree_low_cst (DECL_SIZE (field), 1);
4967169689Skan	    else
4968169689Skan	      bitsize = -1;
4969259563Spfg
4970169689Skan	    mode = DECL_MODE (field);
4971169689Skan	    if (DECL_BIT_FIELD (field))
4972169689Skan	      mode = VOIDmode;
4973259563Spfg
4974169689Skan	    offset = DECL_FIELD_OFFSET (field);
4975169689Skan	    if (host_integerp (offset, 0)
4976169689Skan		&& host_integerp (bit_position (field), 0))
4977169689Skan	      {
4978169689Skan		bitpos = int_bit_position (field);
4979169689Skan		offset = 0;
4980169689Skan	      }
4981169689Skan	    else
4982169689Skan	      bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4983259563Spfg
4984169689Skan	    if (offset)
4985169689Skan	      {
4986169689Skan		rtx offset_rtx;
4987259563Spfg
4988169689Skan		offset
4989169689Skan		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4990169689Skan						    make_tree (TREE_TYPE (exp),
4991169689Skan							       target));
499290075Sobrien
4993169689Skan		offset_rtx = expand_normal (offset);
4994169689Skan		gcc_assert (MEM_P (to_rtx));
4995259563Spfg
499650397Sobrien#ifdef POINTERS_EXTEND_UNSIGNED
4997169689Skan		if (GET_MODE (offset_rtx) != Pmode)
4998169689Skan		  offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
499996263Sobrien#else
5000169689Skan		if (GET_MODE (offset_rtx) != ptr_mode)
5001169689Skan		  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
500250397Sobrien#endif
500350397Sobrien
5004169689Skan		to_rtx = offset_address (to_rtx, offset_rtx,
5005169689Skan					 highest_pow2_factor (offset));
5006169689Skan	      }
500790075Sobrien
500852284Sobrien#ifdef WORD_REGISTER_OPERATIONS
5009169689Skan	    /* If this initializes a field that is smaller than a
5010169689Skan	       word, at the start of a word, try to widen it to a full
5011169689Skan	       word.  This special case allows us to output C++ member
5012169689Skan	       function initializations in a form that the optimizers
5013169689Skan	       can understand.  */
5014169689Skan	    if (REG_P (target)
5015169689Skan		&& bitsize < BITS_PER_WORD
5016169689Skan		&& bitpos % BITS_PER_WORD == 0
5017169689Skan		&& GET_MODE_CLASS (mode) == MODE_INT
5018169689Skan		&& TREE_CODE (value) == INTEGER_CST
5019169689Skan		&& exp_size >= 0
5020169689Skan		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5021169689Skan	      {
5022169689Skan		tree type = TREE_TYPE (value);
5023259563Spfg
5024169689Skan		if (TYPE_PRECISION (type) < BITS_PER_WORD)
5025169689Skan		  {
5026169689Skan		    type = lang_hooks.types.type_for_size
5027169689Skan		      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5028169689Skan		    value = fold_convert (type, value);
5029169689Skan		  }
5030259563Spfg
5031169689Skan		if (BYTES_BIG_ENDIAN)
5032169689Skan		  value
5033169689Skan		   = fold_build2 (LSHIFT_EXPR, type, value,
5034169689Skan				   build_int_cst (type,
5035169689Skan						  BITS_PER_WORD - bitsize));
5036169689Skan		bitsize = BITS_PER_WORD;
5037169689Skan		mode = word_mode;
5038169689Skan	      }
503952284Sobrien#endif
504090075Sobrien
5041169689Skan	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5042169689Skan		&& DECL_NONADDRESSABLE_P (field))
5043169689Skan	      {
5044169689Skan		to_rtx = copy_rtx (to_rtx);
5045169689Skan		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5046169689Skan	      }
5047259563Spfg
5048169689Skan	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
5049169689Skan				     value, type, cleared,
5050169689Skan				     get_alias_set (TREE_TYPE (field)));
5051169689Skan	  }
5052169689Skan	break;
5053169689Skan      }
5054169689Skan    case ARRAY_TYPE:
5055169689Skan      {
5056169689Skan	tree value, index;
5057169689Skan	unsigned HOST_WIDE_INT i;
5058169689Skan	int need_to_clear;
5059169689Skan	tree domain;
5060169689Skan	tree elttype = TREE_TYPE (type);
5061169689Skan	int const_bounds_p;
5062169689Skan	HOST_WIDE_INT minelt = 0;
5063169689Skan	HOST_WIDE_INT maxelt = 0;
506490075Sobrien
5065169689Skan	domain = TYPE_DOMAIN (type);
5066169689Skan	const_bounds_p = (TYPE_MIN_VALUE (domain)
5067169689Skan			  && TYPE_MAX_VALUE (domain)
5068169689Skan			  && host_integerp (TYPE_MIN_VALUE (domain), 0)
5069169689Skan			  && host_integerp (TYPE_MAX_VALUE (domain), 0));
507018334Speter
5071169689Skan	/* If we have constant bounds for the range of the type, get them.  */
5072169689Skan	if (const_bounds_p)
5073169689Skan	  {
5074169689Skan	    minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5075169689Skan	    maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5076169689Skan	  }
5077132718Skan
5078169689Skan	/* If the constructor has fewer elements than the array, clear
5079169689Skan           the whole array first.  Similarly if this is static
5080169689Skan           constructor of a non-BLKmode object.  */
5081169689Skan	if (cleared)
5082169689Skan	  need_to_clear = 0;
5083169689Skan	else if (REG_P (target) && TREE_STATIC (exp))
5084169689Skan	  need_to_clear = 1;
5085169689Skan	else
5086169689Skan	  {
5087169689Skan	    unsigned HOST_WIDE_INT idx;
5088169689Skan	    tree index, value;
5089169689Skan	    HOST_WIDE_INT count = 0, zero_count = 0;
5090169689Skan	    need_to_clear = ! const_bounds_p;
5091259563Spfg
5092169689Skan	    /* This loop is a more accurate version of the loop in
5093169689Skan	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5094169689Skan	       is also needed to check for missing elements.  */
5095169689Skan	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5096169689Skan	      {
5097169689Skan		HOST_WIDE_INT this_node_count;
5098132718Skan
5099169689Skan		if (need_to_clear)
5100169689Skan		  break;
5101259563Spfg
5102169689Skan		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5103169689Skan		  {
5104169689Skan		    tree lo_index = TREE_OPERAND (index, 0);
5105169689Skan		    tree hi_index = TREE_OPERAND (index, 1);
5106259563Spfg
5107169689Skan		    if (! host_integerp (lo_index, 1)
5108169689Skan			|| ! host_integerp (hi_index, 1))
5109169689Skan		      {
5110169689Skan			need_to_clear = 1;
5111169689Skan			break;
5112169689Skan		      }
5113259563Spfg
5114169689Skan		    this_node_count = (tree_low_cst (hi_index, 1)
5115169689Skan				       - tree_low_cst (lo_index, 1) + 1);
5116169689Skan		  }
5117169689Skan		else
5118169689Skan		  this_node_count = 1;
5119259563Spfg
5120169689Skan		count += this_node_count;
5121169689Skan		if (mostly_zeros_p (value))
5122169689Skan		  zero_count += this_node_count;
5123169689Skan	      }
5124259563Spfg
5125169689Skan	    /* Clear the entire array first if there are any missing
5126169689Skan	       elements, or if the incidence of zero elements is >=
5127169689Skan	       75%.  */
5128169689Skan	    if (! need_to_clear
5129169689Skan		&& (count < maxelt - minelt + 1
5130169689Skan		    || 4 * zero_count >= 3 * count))
5131169689Skan	      need_to_clear = 1;
5132169689Skan	  }
5133259563Spfg
5134169689Skan	if (need_to_clear && size > 0)
5135169689Skan	  {
5136169689Skan	    if (REG_P (target))
5137169689Skan	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5138169689Skan	    else
5139169689Skan	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5140169689Skan	    cleared = 1;
5141169689Skan	  }
514296263Sobrien
5143169689Skan	if (!cleared && REG_P (target))
5144169689Skan	  /* Inform later passes that the old value is dead.  */
5145169689Skan	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
514696263Sobrien
5147169689Skan	/* Store each element of the constructor into the
5148169689Skan	   corresponding element of TARGET, determined by counting the
5149169689Skan	   elements.  */
5150169689Skan	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5151169689Skan	  {
5152169689Skan	    enum machine_mode mode;
5153169689Skan	    HOST_WIDE_INT bitsize;
5154169689Skan	    HOST_WIDE_INT bitpos;
5155169689Skan	    int unsignedp;
5156169689Skan	    rtx xtarget = target;
5157259563Spfg
5158169689Skan	    if (cleared && initializer_zerop (value))
5159169689Skan	      continue;
5160259563Spfg
5161169689Skan	    unsignedp = TYPE_UNSIGNED (elttype);
5162169689Skan	    mode = TYPE_MODE (elttype);
5163169689Skan	    if (mode == BLKmode)
5164169689Skan	      bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5165169689Skan			 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5166169689Skan			 : -1);
5167169689Skan	    else
5168169689Skan	      bitsize = GET_MODE_BITSIZE (mode);
5169259563Spfg
5170169689Skan	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5171169689Skan	      {
5172169689Skan		tree lo_index = TREE_OPERAND (index, 0);
5173169689Skan		tree hi_index = TREE_OPERAND (index, 1);
5174169689Skan		rtx index_r, pos_rtx;
5175169689Skan		HOST_WIDE_INT lo, hi, count;
5176169689Skan		tree position;
5177259563Spfg
5178169689Skan		/* If the range is constant and "small", unroll the loop.  */
5179169689Skan		if (const_bounds_p
5180169689Skan		    && host_integerp (lo_index, 0)
5181169689Skan		    && host_integerp (hi_index, 0)
5182169689Skan		    && (lo = tree_low_cst (lo_index, 0),
5183169689Skan			hi = tree_low_cst (hi_index, 0),
5184169689Skan			count = hi - lo + 1,
5185169689Skan			(!MEM_P (target)
5186169689Skan			 || count <= 2
5187169689Skan			 || (host_integerp (TYPE_SIZE (elttype), 1)
5188169689Skan			     && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5189169689Skan				 <= 40 * 8)))))
5190169689Skan		  {
5191169689Skan		    lo -= minelt;  hi -= minelt;
5192169689Skan		    for (; lo <= hi; lo++)
5193169689Skan		      {
5194169689Skan			bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5195259563Spfg
5196169689Skan			if (MEM_P (target)
5197169689Skan			    && !MEM_KEEP_ALIAS_SET_P (target)
5198169689Skan			    && TREE_CODE (type) == ARRAY_TYPE
5199169689Skan			    && TYPE_NONALIASED_COMPONENT (type))
5200169689Skan			  {
5201169689Skan			    target = copy_rtx (target);
5202169689Skan			    MEM_KEEP_ALIAS_SET_P (target) = 1;
5203169689Skan			  }
5204259563Spfg
5205169689Skan			store_constructor_field
5206169689Skan			  (target, bitsize, bitpos, mode, value, type, cleared,
5207169689Skan			   get_alias_set (elttype));
5208169689Skan		      }
5209169689Skan		  }
5210169689Skan		else
5211169689Skan		  {
5212169689Skan		    rtx loop_start = gen_label_rtx ();
5213169689Skan		    rtx loop_end = gen_label_rtx ();
5214169689Skan		    tree exit_cond;
5215259563Spfg
5216169689Skan		    expand_normal (hi_index);
5217169689Skan		    unsignedp = TYPE_UNSIGNED (domain);
5218259563Spfg
5219169689Skan		    index = build_decl (VAR_DECL, NULL_TREE, domain);
5220259563Spfg
5221169689Skan		    index_r
5222169689Skan		      = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5223169689Skan						   &unsignedp, 0));
5224169689Skan		    SET_DECL_RTL (index, index_r);
5225169689Skan		    store_expr (lo_index, index_r, 0);
5226259563Spfg
5227169689Skan		    /* Build the head of the loop.  */
5228169689Skan		    do_pending_stack_adjust ();
5229169689Skan		    emit_label (loop_start);
523090075Sobrien
5231169689Skan		    /* Assign value to element index.  */
5232169689Skan		    position =
5233169689Skan		      fold_convert (ssizetype,
5234169689Skan				    fold_build2 (MINUS_EXPR,
5235169689Skan						 TREE_TYPE (index),
5236169689Skan						 index,
5237169689Skan						 TYPE_MIN_VALUE (domain)));
523890075Sobrien
5239169689Skan		    position =
5240169689Skan			size_binop (MULT_EXPR, position,
5241169689Skan				    fold_convert (ssizetype,
5242169689Skan						  TYPE_SIZE_UNIT (elttype)));
5243259563Spfg
5244169689Skan		    pos_rtx = expand_normal (position);
5245169689Skan		    xtarget = offset_address (target, pos_rtx,
5246169689Skan					      highest_pow2_factor (position));
5247169689Skan		    xtarget = adjust_address (xtarget, mode, 0);
5248169689Skan		    if (TREE_CODE (value) == CONSTRUCTOR)
5249169689Skan		      store_constructor (value, xtarget, cleared,
5250169689Skan					 bitsize / BITS_PER_UNIT);
5251169689Skan		    else
5252169689Skan		      store_expr (value, xtarget, 0);
525390075Sobrien
5254169689Skan		    /* Generate a conditional jump to exit the loop.  */
5255169689Skan		    exit_cond = build2 (LT_EXPR, integer_type_node,
5256169689Skan					index, hi_index);
5257169689Skan		    jumpif (exit_cond, loop_end);
5258259563Spfg
5259169689Skan		    /* Update the loop counter, and jump to the head of
5260169689Skan		       the loop.  */
5261169689Skan		    expand_assignment (index,
5262169689Skan				       build2 (PLUS_EXPR, TREE_TYPE (index),
5263169689Skan					       index, integer_one_node));
5264259563Spfg
5265169689Skan		    emit_jump (loop_start);
5266259563Spfg
5267169689Skan		    /* Build the end of the loop.  */
5268169689Skan		    emit_label (loop_end);
5269169689Skan		  }
5270169689Skan	      }
5271169689Skan	    else if ((index != 0 && ! host_integerp (index, 0))
5272169689Skan		     || ! host_integerp (TYPE_SIZE (elttype), 1))
5273169689Skan	      {
5274169689Skan		tree position;
5275259563Spfg
5276169689Skan		if (index == 0)
5277169689Skan		  index = ssize_int (1);
5278259563Spfg
5279169689Skan		if (minelt)
5280169689Skan		  index = fold_convert (ssizetype,
5281169689Skan					fold_build2 (MINUS_EXPR,
5282169689Skan						     TREE_TYPE (index),
5283169689Skan						     index,
5284169689Skan						     TYPE_MIN_VALUE (domain)));
5285259563Spfg
5286169689Skan		position =
5287169689Skan		  size_binop (MULT_EXPR, index,
5288169689Skan			      fold_convert (ssizetype,
5289169689Skan					    TYPE_SIZE_UNIT (elttype)));
5290169689Skan		xtarget = offset_address (target,
5291169689Skan					  expand_normal (position),
5292169689Skan					  highest_pow2_factor (position));
5293169689Skan		xtarget = adjust_address (xtarget, mode, 0);
5294169689Skan		store_expr (value, xtarget, 0);
5295169689Skan	      }
5296169689Skan	    else
5297169689Skan	      {
5298169689Skan		if (index != 0)
5299169689Skan		  bitpos = ((tree_low_cst (index, 0) - minelt)
5300169689Skan			    * tree_low_cst (TYPE_SIZE (elttype), 1));
5301169689Skan		else
5302169689Skan		  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5303259563Spfg
5304169689Skan		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5305169689Skan		    && TREE_CODE (type) == ARRAY_TYPE
5306169689Skan		    && TYPE_NONALIASED_COMPONENT (type))
5307169689Skan		  {
5308169689Skan		    target = copy_rtx (target);
5309169689Skan		    MEM_KEEP_ALIAS_SET_P (target) = 1;
5310169689Skan		  }
5311169689Skan		store_constructor_field (target, bitsize, bitpos, mode, value,
5312169689Skan					 type, cleared, get_alias_set (elttype));
5313169689Skan	      }
5314169689Skan	  }
5315169689Skan	break;
5316169689Skan      }
531790075Sobrien
5318169689Skan    case VECTOR_TYPE:
5319169689Skan      {
5320169689Skan	unsigned HOST_WIDE_INT idx;
5321169689Skan	constructor_elt *ce;
5322169689Skan	int i;
5323169689Skan	int need_to_clear;
5324169689Skan	int icode = 0;
5325169689Skan	tree elttype = TREE_TYPE (type);
5326169689Skan	int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5327169689Skan	enum machine_mode eltmode = TYPE_MODE (elttype);
5328169689Skan	HOST_WIDE_INT bitsize;
5329169689Skan	HOST_WIDE_INT bitpos;
5330169689Skan	rtvec vector = NULL;
5331169689Skan	unsigned n_elts;
5332259563Spfg
5333169689Skan	gcc_assert (eltmode != BLKmode);
5334259563Spfg
5335169689Skan	n_elts = TYPE_VECTOR_SUBPARTS (type);
5336169689Skan	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5337169689Skan	  {
5338169689Skan	    enum machine_mode mode = GET_MODE (target);
5339259563Spfg
5340169689Skan	    icode = (int) vec_init_optab->handlers[mode].insn_code;
5341169689Skan	    if (icode != CODE_FOR_nothing)
5342169689Skan	      {
5343169689Skan		unsigned int i;
5344259563Spfg
5345169689Skan		vector = rtvec_alloc (n_elts);
5346169689Skan		for (i = 0; i < n_elts; i++)
5347169689Skan		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5348169689Skan	      }
5349169689Skan	  }
5350259563Spfg
5351169689Skan	/* If the constructor has fewer elements than the vector,
5352169689Skan	   clear the whole array first.  Similarly if this is static
5353169689Skan	   constructor of a non-BLKmode object.  */
5354169689Skan	if (cleared)
5355169689Skan	  need_to_clear = 0;
5356169689Skan	else if (REG_P (target) && TREE_STATIC (exp))
5357169689Skan	  need_to_clear = 1;
5358169689Skan	else
5359169689Skan	  {
5360169689Skan	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5361169689Skan	    tree value;
5362259563Spfg
5363169689Skan	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5364169689Skan	      {
5365169689Skan		int n_elts_here = tree_low_cst
5366169689Skan		  (int_const_binop (TRUNC_DIV_EXPR,
5367169689Skan				    TYPE_SIZE (TREE_TYPE (value)),
5368169689Skan				    TYPE_SIZE (elttype), 0), 1);
5369259563Spfg
5370169689Skan		count += n_elts_here;
5371169689Skan		if (mostly_zeros_p (value))
5372169689Skan		  zero_count += n_elts_here;
5373169689Skan	      }
537490075Sobrien
5375169689Skan	    /* Clear the entire vector first if there are any missing elements,
5376169689Skan	       or if the incidence of zero elements is >= 75%.  */
5377169689Skan	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5378169689Skan	  }
5379259563Spfg
5380169689Skan	if (need_to_clear && size > 0 && !vector)
5381169689Skan	  {
5382169689Skan	    if (REG_P (target))
5383169689Skan	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5384169689Skan	    else
5385169689Skan	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5386169689Skan	    cleared = 1;
5387169689Skan	  }
5388259563Spfg
538918334Speter	/* Inform later passes that the old value is dead.  */
5390169689Skan	if (!cleared && !vector && REG_P (target))
5391169689Skan	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
539218334Speter
5393169689Skan        /* Store each element of the constructor into the corresponding
5394169689Skan	   element of TARGET, determined by counting the elements.  */
5395169689Skan	for (idx = 0, i = 0;
5396169689Skan	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5397169689Skan	     idx++, i += bitsize / elt_size)
5398169689Skan	  {
5399169689Skan	    HOST_WIDE_INT eltpos;
5400169689Skan	    tree value = ce->value;
5401259563Spfg
5402169689Skan	    bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5403169689Skan	    if (cleared && initializer_zerop (value))
5404169689Skan	      continue;
5405259563Spfg
5406169689Skan	    if (ce->index)
5407169689Skan	      eltpos = tree_low_cst (ce->index, 1);
5408169689Skan	    else
5409169689Skan	      eltpos = i;
5410259563Spfg
5411169689Skan	    if (vector)
5412169689Skan	      {
5413169689Skan	        /* Vector CONSTRUCTORs should only be built from smaller
5414169689Skan		   vectors in the case of BLKmode vectors.  */
5415169689Skan		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5416169689Skan		RTVEC_ELT (vector, eltpos)
5417169689Skan		  = expand_normal (value);
5418169689Skan	      }
5419169689Skan	    else
5420169689Skan	      {
5421169689Skan		enum machine_mode value_mode =
5422169689Skan		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5423169689Skan		  ? TYPE_MODE (TREE_TYPE (value))
5424169689Skan		  : eltmode;
5425169689Skan		bitpos = eltpos * elt_size;
5426169689Skan		store_constructor_field (target, bitsize, bitpos,
5427169689Skan					 value_mode, value, type,
5428169689Skan					 cleared, get_alias_set (elttype));
5429169689Skan	      }
5430169689Skan	  }
5431259563Spfg
5432169689Skan	if (vector)
5433169689Skan	  emit_insn (GEN_FCN (icode)
5434169689Skan		     (target,
5435169689Skan		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
5436169689Skan	break;
5437169689Skan      }
5438259563Spfg
5439169689Skan    default:
5440169689Skan      gcc_unreachable ();
544118334Speter    }
544218334Speter}
544318334Speter
544418334Speter/* Store the value of EXP (an expression tree)
544518334Speter   into a subfield of TARGET which has mode MODE and occupies
544618334Speter   BITSIZE bits, starting BITPOS bits from the start of TARGET.
544718334Speter   If MODE is VOIDmode, it means that we are storing into a bit-field.
544818334Speter
5449169689Skan   Always return const0_rtx unless we have something particular to
5450169689Skan   return.
545118334Speter
545290075Sobrien   TYPE is the type of the underlying object,
545318334Speter
545452284Sobrien   ALIAS_SET is the alias set for the destination.  This value will
545552284Sobrien   (in general) be different from that for TARGET, since TARGET is a
545652284Sobrien   reference to the containing structure.  */
545752284Sobrien
545818334Speterstatic rtx
5459132718Skanstore_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5460169689Skan	     enum machine_mode mode, tree exp, tree type, int alias_set)
546118334Speter{
546218334Speter  HOST_WIDE_INT width_mask = 0;
546318334Speter
546450397Sobrien  if (TREE_CODE (exp) == ERROR_MARK)
546550397Sobrien    return const0_rtx;
546650397Sobrien
546790075Sobrien  /* If we have nothing to store, do nothing unless the expression has
546890075Sobrien     side-effects.  */
546990075Sobrien  if (bitsize == 0)
547090075Sobrien    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5471132718Skan  else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
547218334Speter    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
547318334Speter
547418334Speter  /* If we are storing into an unaligned field of an aligned union that is
547518334Speter     in a register, we may have the mode of TARGET being an integer mode but
547618334Speter     MODE == BLKmode.  In that case, get an aligned object whose size and
547718334Speter     alignment are the same as TARGET and store TARGET into it (we can avoid
547818334Speter     the store if the field being stored is the entire width of TARGET).  Then
547918334Speter     call ourselves recursively to store the field into a BLKmode version of
548018334Speter     that object.  Finally, load from the object into TARGET.  This is not
548118334Speter     very efficient in general, but should only be slightly more expensive
548218334Speter     than the otherwise-required unaligned accesses.  Perhaps this can be
5483132718Skan     cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5484132718Skan     twice, once with emit_move_insn and once via store_field.  */
548518334Speter
548618334Speter  if (mode == BLKmode
5487169689Skan      && (REG_P (target) || GET_CODE (target) == SUBREG))
548818334Speter    {
5489132718Skan      rtx object = assign_temp (type, 0, 1, 1);
549090075Sobrien      rtx blk_object = adjust_address (object, BLKmode, 0);
549118334Speter
549290075Sobrien      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
549318334Speter	emit_move_insn (object, target);
549418334Speter
5495169689Skan      store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
549618334Speter
549718334Speter      emit_move_insn (target, object);
549818334Speter
549990075Sobrien      /* We want to return the BLKmode version of the data.  */
550018334Speter      return blk_object;
550118334Speter    }
550218334Speter
550390075Sobrien  if (GET_CODE (target) == CONCAT)
550490075Sobrien    {
550590075Sobrien      /* We're storing into a struct containing a single __complex.  */
550690075Sobrien
5507169689Skan      gcc_assert (!bitpos);
550890075Sobrien      return store_expr (exp, target, 0);
550990075Sobrien    }
551090075Sobrien
551118334Speter  /* If the structure is in a register or if the component
551218334Speter     is a bit field, we cannot use addressing to access it.
551318334Speter     Use bit-field techniques or SUBREG to store in it.  */
551418334Speter
551518334Speter  if (mode == VOIDmode
551652284Sobrien      || (mode != BLKmode && ! direct_store[(int) mode]
551752284Sobrien	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
551852284Sobrien	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5519169689Skan      || REG_P (target)
552018334Speter      || GET_CODE (target) == SUBREG
552118334Speter      /* If the field isn't aligned enough to store as an ordinary memref,
552218334Speter	 store it as a bit field.  */
5523132718Skan      || (mode != BLKmode
5524132718Skan	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5525132718Skan		|| bitpos % GET_MODE_ALIGNMENT (mode))
5526132718Skan	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5527132718Skan	      || (bitpos % BITS_PER_UNIT != 0)))
552890075Sobrien      /* If the RHS and field are a constant size and the size of the
552990075Sobrien	 RHS isn't the same size as the bitfield, we must use bitfield
553090075Sobrien	 operations.  */
553190075Sobrien      || (bitsize >= 0
553290075Sobrien	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
553390075Sobrien	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
553418334Speter    {
5535169689Skan      rtx temp;
553618334Speter
5537169689Skan      /* If EXP is a NOP_EXPR of precision less than its mode, then that
5538169689Skan	 implies a mask operation.  If the precision is the same size as
5539169689Skan	 the field we're storing into, that mask is redundant.  This is
5540169689Skan	 particularly common with bit field assignments generated by the
5541169689Skan	 C front end.  */
5542169689Skan      if (TREE_CODE (exp) == NOP_EXPR)
5543169689Skan	{
5544169689Skan	  tree type = TREE_TYPE (exp);
5545169689Skan	  if (INTEGRAL_TYPE_P (type)
5546169689Skan	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5547169689Skan	      && bitsize == TYPE_PRECISION (type))
5548169689Skan	    {
5549169689Skan	      type = TREE_TYPE (TREE_OPERAND (exp, 0));
5550169689Skan	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5551169689Skan		exp = TREE_OPERAND (exp, 0);
5552169689Skan	    }
5553169689Skan	}
5554169689Skan
5555169689Skan      temp = expand_normal (exp);
5556169689Skan
555750397Sobrien      /* If BITSIZE is narrower than the size of the type of EXP
555850397Sobrien	 we will be narrowing TEMP.  Normally, what's wanted are the
555950397Sobrien	 low-order bits.  However, if EXP's type is a record and this is
556050397Sobrien	 big-endian machine, we want the upper BITSIZE bits.  */
556150397Sobrien      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
556290075Sobrien	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
556350397Sobrien	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
556450397Sobrien	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
556550397Sobrien			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
556650397Sobrien				       - bitsize),
5567132718Skan			     NULL_RTX, 1);
556850397Sobrien
556918334Speter      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
557018334Speter	 MODE.  */
557118334Speter      if (mode != VOIDmode && mode != BLKmode
557218334Speter	  && mode != TYPE_MODE (TREE_TYPE (exp)))
557318334Speter	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
557418334Speter
557550397Sobrien      /* If the modes of TARGET and TEMP are both BLKmode, both
557650397Sobrien	 must be in memory and BITPOS must be aligned on a byte
557750397Sobrien	 boundary.  If so, we simply do a block copy.  */
557850397Sobrien      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
557950397Sobrien	{
5580169689Skan	  gcc_assert (MEM_P (target) && MEM_P (temp)
5581169689Skan		      && !(bitpos % BITS_PER_UNIT));
558250397Sobrien
558390075Sobrien	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
558450397Sobrien	  emit_block_move (target, temp,
558550397Sobrien			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5586117395Skan				    / BITS_PER_UNIT),
5587117395Skan			   BLOCK_OP_NORMAL);
558850397Sobrien
5589169689Skan	  return const0_rtx;
559050397Sobrien	}
559150397Sobrien
559218334Speter      /* Store the value in the bitfield.  */
5593169689Skan      store_bit_field (target, bitsize, bitpos, mode, temp);
559490075Sobrien
559518334Speter      return const0_rtx;
559618334Speter    }
559718334Speter  else
559818334Speter    {
559918334Speter      /* Now build a reference to just the desired component.  */
5600169689Skan      rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
560118334Speter
560290075Sobrien      if (to_rtx == target)
560390075Sobrien	to_rtx = copy_rtx (to_rtx);
560490075Sobrien
560552284Sobrien      MEM_SET_IN_STRUCT_P (to_rtx, 1);
560690075Sobrien      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
560790075Sobrien	set_mem_alias_set (to_rtx, alias_set);
560818334Speter
5609169689Skan      return store_expr (exp, to_rtx, 0);
561018334Speter    }
561118334Speter}
561218334Speter
561318334Speter/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
561490075Sobrien   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
561590075Sobrien   codes and find the ultimate containing object, which we return.
561618334Speter
561718334Speter   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
561818334Speter   bit position, and *PUNSIGNEDP to the signedness of the field.
561918334Speter   If the position of the field is variable, we store a tree
562018334Speter   giving the variable offset (in units) in *POFFSET.
562118334Speter   This offset is in addition to the bit position.
562218334Speter   If the position is not variable, we store 0 in *POFFSET.
562318334Speter
562418334Speter   If any of the extraction expressions is volatile,
562518334Speter   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
562618334Speter
562718334Speter   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
562818334Speter   is a mode that can be used to access the field.  In that case, *PBITSIZE
562918334Speter   is redundant.
563018334Speter
563118334Speter   If the field describes a variable-sized object, *PMODE is set to
563218334Speter   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5633169689Skan   this case, but the address of the object can be found.
563418334Speter
5635169689Skan   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5636169689Skan   look through nodes that serve as markers of a greater alignment than
5637169689Skan   the one that can be deduced from the expression.  These nodes make it
5638169689Skan   possible for front-ends to prevent temporaries from being created by
5639169689Skan   the middle-end on alignment considerations.  For that purpose, the
5640169689Skan   normal operating mode at high-level is to always pass FALSE so that
5641169689Skan   the ultimate containing object is really returned; moreover, the
5642169689Skan   associated predicate handled_component_p will always return TRUE
5643169689Skan   on these nodes, thus indicating that they are essentially handled
5644169689Skan   by get_inner_reference.  TRUE should only be passed when the caller
5645169689Skan   is scanning the expression in order to build another representation
5646169689Skan   and specifically knows how to handle these nodes; as such, this is
5647169689Skan   the normal operating mode in the RTL expanders.  */
5648169689Skan
564918334Spetertree
5650132718Skanget_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5651132718Skan		     HOST_WIDE_INT *pbitpos, tree *poffset,
5652132718Skan		     enum machine_mode *pmode, int *punsignedp,
5653169689Skan		     int *pvolatilep, bool keep_aligning)
565418334Speter{
565518334Speter  tree size_tree = 0;
565618334Speter  enum machine_mode mode = VOIDmode;
565790075Sobrien  tree offset = size_zero_node;
565890075Sobrien  tree bit_offset = bitsize_zero_node;
565918334Speter
566090075Sobrien  /* First get the mode, signedness, and size.  We do this from just the
566190075Sobrien     outermost expression.  */
566218334Speter  if (TREE_CODE (exp) == COMPONENT_REF)
566318334Speter    {
566418334Speter      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
566518334Speter      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
566618334Speter	mode = DECL_MODE (TREE_OPERAND (exp, 1));
566790075Sobrien
5668169689Skan      *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
566918334Speter    }
567018334Speter  else if (TREE_CODE (exp) == BIT_FIELD_REF)
567118334Speter    {
567218334Speter      size_tree = TREE_OPERAND (exp, 1);
5673169689Skan      *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
567418334Speter    }
567518334Speter  else
567618334Speter    {
567718334Speter      mode = TYPE_MODE (TREE_TYPE (exp));
5678169689Skan      *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
567990075Sobrien
568052284Sobrien      if (mode == BLKmode)
568152284Sobrien	size_tree = TYPE_SIZE (TREE_TYPE (exp));
568290075Sobrien      else
568390075Sobrien	*pbitsize = GET_MODE_BITSIZE (mode);
568490075Sobrien    }
568552284Sobrien
568690075Sobrien  if (size_tree != 0)
568718334Speter    {
568890075Sobrien      if (! host_integerp (size_tree, 1))
568918334Speter	mode = BLKmode, *pbitsize = -1;
569018334Speter      else
569190075Sobrien	*pbitsize = tree_low_cst (size_tree, 1);
569218334Speter    }
569318334Speter
5694220150Smm  *pmode = mode;
5695220150Smm
569618334Speter  /* Compute cumulative bit-offset for nested component-refs and array-refs,
569718334Speter     and find the ultimate containing object.  */
569818334Speter  while (1)
569918334Speter    {
5700169689Skan      switch (TREE_CODE (exp))
570118334Speter	{
5702169689Skan	case BIT_FIELD_REF:
5703169689Skan	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5704169689Skan				   TREE_OPERAND (exp, 2));
5705169689Skan	  break;
570618334Speter
5707169689Skan	case COMPONENT_REF:
5708169689Skan	  {
5709169689Skan	    tree field = TREE_OPERAND (exp, 1);
5710169689Skan	    tree this_offset = component_ref_field_offset (exp);
571118334Speter
5712169689Skan	    /* If this field hasn't been filled in yet, don't go past it.
5713169689Skan	       This should only happen when folding expressions made during
5714169689Skan	       type construction.  */
5715169689Skan	    if (this_offset == 0)
5716169689Skan	      break;
571718334Speter
5718169689Skan	    offset = size_binop (PLUS_EXPR, offset, this_offset);
5719169689Skan	    bit_offset = size_binop (PLUS_EXPR, bit_offset,
5720169689Skan				     DECL_FIELD_BIT_OFFSET (field));
572118334Speter
5722169689Skan	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5723169689Skan	  }
5724169689Skan	  break;
572518334Speter
5726169689Skan	case ARRAY_REF:
5727169689Skan	case ARRAY_RANGE_REF:
5728169689Skan	  {
5729169689Skan	    tree index = TREE_OPERAND (exp, 1);
5730169689Skan	    tree low_bound = array_ref_low_bound (exp);
5731169689Skan	    tree unit_size = array_ref_element_size (exp);
573218334Speter
5733169689Skan	    /* We assume all arrays have sizes that are a multiple of a byte.
5734169689Skan	       First subtract the lower bound, if any, in the type of the
5735169689Skan	       index, then convert to sizetype and multiply by the size of
5736169689Skan	       the array element.  */
5737169689Skan	    if (! integer_zerop (low_bound))
5738169689Skan	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5739169689Skan				   index, low_bound);
574052284Sobrien
5741169689Skan	    offset = size_binop (PLUS_EXPR, offset,
5742169689Skan			         size_binop (MULT_EXPR,
5743169689Skan					     fold_convert (sizetype, index),
5744169689Skan					     unit_size));
5745169689Skan	  }
5746169689Skan	  break;
574718334Speter
5748169689Skan	case REALPART_EXPR:
5749169689Skan	  break;
575050397Sobrien
5751169689Skan	case IMAGPART_EXPR:
5752169689Skan	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5753169689Skan				   bitsize_int (*pbitsize));
5754169689Skan	  break;
575550397Sobrien
5756169689Skan	case VIEW_CONVERT_EXPR:
5757169689Skan	  if (keep_aligning && STRICT_ALIGNMENT
5758169689Skan	      && (TYPE_ALIGN (TREE_TYPE (exp))
5759169689Skan	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5760169689Skan	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5761169689Skan		  < BIGGEST_ALIGNMENT)
5762169689Skan	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5763169689Skan		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5764169689Skan	    goto done;
5765169689Skan	  break;
5766169689Skan
5767169689Skan	default:
5768169689Skan	  goto done;
576918334Speter	}
5770132718Skan
577118334Speter      /* If any reference in the chain is volatile, the effect is volatile.  */
577218334Speter      if (TREE_THIS_VOLATILE (exp))
577318334Speter	*pvolatilep = 1;
577450397Sobrien
577518334Speter      exp = TREE_OPERAND (exp, 0);
577618334Speter    }
5777169689Skan done:
577818334Speter
577990075Sobrien  /* If OFFSET is constant, see if we can return the whole thing as a
5780220150Smm     constant bit position.  Make sure to handle overflow during
5781220150Smm     this conversion.  */
5782220150Smm  if (host_integerp (offset, 0))
5783220150Smm    {
5784220150Smm      double_int tem = double_int_mul (tree_to_double_int (offset),
5785220150Smm				       uhwi_to_double_int (BITS_PER_UNIT));
5786220150Smm      tem = double_int_add (tem, tree_to_double_int (bit_offset));
5787220150Smm      if (double_int_fits_in_shwi_p (tem))
5788220150Smm	{
5789220150Smm	  *pbitpos = double_int_to_shwi (tem);
5790220150Smm	  *poffset = NULL_TREE;
5791220150Smm	  return exp;
5792220150Smm	}
5793220150Smm    }
579418334Speter
5795220150Smm  /* Otherwise, split it up.  */
5796220150Smm  *pbitpos = tree_low_cst (bit_offset, 0);
5797220150Smm  *poffset = offset;
5798220150Smm
579918334Speter  return exp;
580018334Speter}
580150397Sobrien
5802220150Smm/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5803220150Smm   look for whether EXP or any nested component-refs within EXP is marked
5804220150Smm   as PACKED.  */
5805220150Smm
5806220150Smmbool
5807220150Smmcontains_packed_reference (tree exp)
5808220150Smm{
5809220150Smm  bool packed_p = false;
5810220150Smm
5811220150Smm  while (1)
5812220150Smm    {
5813220150Smm      switch (TREE_CODE (exp))
5814220150Smm	{
5815220150Smm	case COMPONENT_REF:
5816220150Smm	  {
5817220150Smm	    tree field = TREE_OPERAND (exp, 1);
5818220150Smm	    packed_p = DECL_PACKED (field)
5819220150Smm		       || TYPE_PACKED (TREE_TYPE (field))
5820220150Smm		       || TYPE_PACKED (TREE_TYPE (exp));
5821220150Smm	    if (packed_p)
5822220150Smm	      goto done;
5823220150Smm	  }
5824220150Smm	  break;
5825220150Smm
5826220150Smm	case BIT_FIELD_REF:
5827220150Smm	case ARRAY_REF:
5828220150Smm	case ARRAY_RANGE_REF:
5829220150Smm	case REALPART_EXPR:
5830220150Smm	case IMAGPART_EXPR:
5831220150Smm	case VIEW_CONVERT_EXPR:
5832220150Smm	  break;
5833220150Smm
5834220150Smm	default:
5835220150Smm	  goto done;
5836220150Smm	}
5837220150Smm      exp = TREE_OPERAND (exp, 0);
5838220150Smm    }
5839220150Smm done:
5840220150Smm  return packed_p;
5841220150Smm}
5842220150Smm
5843169689Skan/* Return a tree of sizetype representing the size, in bytes, of the element
5844169689Skan   of EXP, an ARRAY_REF.  */
5845169689Skan
5846169689Skantree
5847169689Skanarray_ref_element_size (tree exp)
5848169689Skan{
5849169689Skan  tree aligned_size = TREE_OPERAND (exp, 3);
5850169689Skan  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5851169689Skan
5852169689Skan  /* If a size was specified in the ARRAY_REF, it's the size measured
5853169689Skan     in alignment units of the element type.  So multiply by that value.  */
5854169689Skan  if (aligned_size)
5855169689Skan    {
5856169689Skan      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5857169689Skan	 sizetype from another type of the same width and signedness.  */
5858169689Skan      if (TREE_TYPE (aligned_size) != sizetype)
5859169689Skan	aligned_size = fold_convert (sizetype, aligned_size);
5860169689Skan      return size_binop (MULT_EXPR, aligned_size,
5861169689Skan		         size_int (TYPE_ALIGN_UNIT (elmt_type)));
5862169689Skan    }
5863169689Skan
5864169689Skan  /* Otherwise, take the size from that of the element type.  Substitute
5865169689Skan     any PLACEHOLDER_EXPR that we have.  */
5866169689Skan  else
5867169689Skan    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5868169689Skan}
5869169689Skan
5870169689Skan/* Return a tree representing the lower bound of the array mentioned in
5871169689Skan   EXP, an ARRAY_REF.  */
5872169689Skan
5873169689Skantree
5874169689Skanarray_ref_low_bound (tree exp)
5875169689Skan{
5876169689Skan  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5877169689Skan
5878169689Skan  /* If a lower bound is specified in EXP, use it.  */
5879169689Skan  if (TREE_OPERAND (exp, 2))
5880169689Skan    return TREE_OPERAND (exp, 2);
5881169689Skan
5882169689Skan  /* Otherwise, if there is a domain type and it has a lower bound, use it,
5883169689Skan     substituting for a PLACEHOLDER_EXPR as needed.  */
5884169689Skan  if (domain_type && TYPE_MIN_VALUE (domain_type))
5885169689Skan    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5886169689Skan
5887169689Skan  /* Otherwise, return a zero of the appropriate type.  */
5888169689Skan  return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5889169689Skan}
5890169689Skan
5891169689Skan/* Return a tree representing the upper bound of the array mentioned in
5892169689Skan   EXP, an ARRAY_REF.  */
5893169689Skan
5894169689Skantree
5895169689Skanarray_ref_up_bound (tree exp)
5896169689Skan{
5897169689Skan  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5898169689Skan
5899169689Skan  /* If there is a domain type and it has an upper bound, use it, substituting
5900169689Skan     for a PLACEHOLDER_EXPR as needed.  */
5901169689Skan  if (domain_type && TYPE_MAX_VALUE (domain_type))
5902169689Skan    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5903169689Skan
5904169689Skan  /* Otherwise fail.  */
5905169689Skan  return NULL_TREE;
5906169689Skan}
5907169689Skan
5908169689Skan/* Return a tree representing the offset, in bytes, of the field referenced
5909169689Skan   by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
5910169689Skan
5911169689Skantree
5912169689Skancomponent_ref_field_offset (tree exp)
5913169689Skan{
5914169689Skan  tree aligned_offset = TREE_OPERAND (exp, 2);
5915169689Skan  tree field = TREE_OPERAND (exp, 1);
5916169689Skan
5917169689Skan  /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5918169689Skan     in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
5919169689Skan     value.  */
5920169689Skan  if (aligned_offset)
5921169689Skan    {
5922169689Skan      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5923169689Skan	 sizetype from another type of the same width and signedness.  */
5924169689Skan      if (TREE_TYPE (aligned_offset) != sizetype)
5925169689Skan	aligned_offset = fold_convert (sizetype, aligned_offset);
5926169689Skan      return size_binop (MULT_EXPR, aligned_offset,
5927169689Skan		         size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5928169689Skan    }
5929169689Skan
5930169689Skan  /* Otherwise, take the offset from that of the field.  Substitute
5931169689Skan     any PLACEHOLDER_EXPR that we have.  */
5932169689Skan  else
5933169689Skan    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5934169689Skan}
5935169689Skan
593690075Sobrien/* Return 1 if T is an expression that get_inner_reference handles.  */
593790075Sobrien
593890075Sobrienint
5939132718Skanhandled_component_p (tree t)
594050397Sobrien{
594190075Sobrien  switch (TREE_CODE (t))
594250397Sobrien    {
594390075Sobrien    case BIT_FIELD_REF:
594490075Sobrien    case COMPONENT_REF:
594590075Sobrien    case ARRAY_REF:
594690075Sobrien    case ARRAY_RANGE_REF:
594790075Sobrien    case VIEW_CONVERT_EXPR:
5948169689Skan    case REALPART_EXPR:
5949169689Skan    case IMAGPART_EXPR:
595090075Sobrien      return 1;
595190075Sobrien
595250397Sobrien    default:
595390075Sobrien      return 0;
595450397Sobrien    }
595550397Sobrien}
595618334Speter
595790075Sobrien/* Given an rtx VALUE that may contain additions and multiplications, return
595890075Sobrien   an equivalent value that just refers to a register, memory, or constant.
595990075Sobrien   This is done by generating instructions to perform the arithmetic and
596090075Sobrien   returning a pseudo-register containing the value.
596118334Speter
596218334Speter   The returned value may be a REG, SUBREG, MEM or constant.  */
596318334Speter
596418334Speterrtx
5965132718Skanforce_operand (rtx value, rtx target)
596618334Speter{
5967104752Skan  rtx op1, op2;
596818334Speter  /* Use subtarget as the target for operand 0 of a binary operation.  */
596990075Sobrien  rtx subtarget = get_subtarget (target);
5970104752Skan  enum rtx_code code = GET_CODE (value);
597118334Speter
5972169689Skan  /* Check for subreg applied to an expression produced by loop optimizer.  */
5973169689Skan  if (code == SUBREG
5974169689Skan      && !REG_P (SUBREG_REG (value))
5975169689Skan      && !MEM_P (SUBREG_REG (value)))
5976169689Skan    {
5977169689Skan      value = simplify_gen_subreg (GET_MODE (value),
5978169689Skan				   force_reg (GET_MODE (SUBREG_REG (value)),
5979169689Skan					      force_operand (SUBREG_REG (value),
5980169689Skan							     NULL_RTX)),
5981169689Skan				   GET_MODE (SUBREG_REG (value)),
5982169689Skan				   SUBREG_BYTE (value));
5983169689Skan      code = GET_CODE (value);
5984169689Skan    }
5985169689Skan
598650397Sobrien  /* Check for a PIC address load.  */
5987104752Skan  if ((code == PLUS || code == MINUS)
598850397Sobrien      && XEXP (value, 0) == pic_offset_table_rtx
598950397Sobrien      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
599050397Sobrien	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
599150397Sobrien	  || GET_CODE (XEXP (value, 1)) == CONST))
599250397Sobrien    {
599350397Sobrien      if (!subtarget)
599450397Sobrien	subtarget = gen_reg_rtx (GET_MODE (value));
599550397Sobrien      emit_move_insn (subtarget, value);
599650397Sobrien      return subtarget;
599750397Sobrien    }
599850397Sobrien
5999169689Skan  if (ARITHMETIC_P (value))
600018334Speter    {
600118334Speter      op2 = XEXP (value, 1);
6002169689Skan      if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
600318334Speter	subtarget = 0;
6004104752Skan      if (code == MINUS && GET_CODE (op2) == CONST_INT)
600518334Speter	{
6006104752Skan	  code = PLUS;
600718334Speter	  op2 = negate_rtx (GET_MODE (value), op2);
600818334Speter	}
600918334Speter
601018334Speter      /* Check for an addition with OP2 a constant integer and our first
6011104752Skan         operand a PLUS of a virtual register and something else.  In that
6012104752Skan         case, we want to emit the sum of the virtual register and the
6013104752Skan         constant first and then add the other value.  This allows virtual
6014104752Skan         register instantiation to simply modify the constant rather than
6015104752Skan         creating another one around this addition.  */
6016104752Skan      if (code == PLUS && GET_CODE (op2) == CONST_INT
601718334Speter	  && GET_CODE (XEXP (value, 0)) == PLUS
6018169689Skan	  && REG_P (XEXP (XEXP (value, 0), 0))
601918334Speter	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
602018334Speter	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
602118334Speter	{
6022104752Skan	  rtx temp = expand_simple_binop (GET_MODE (value), code,
6023104752Skan					  XEXP (XEXP (value, 0), 0), op2,
6024104752Skan					  subtarget, 0, OPTAB_LIB_WIDEN);
6025104752Skan	  return expand_simple_binop (GET_MODE (value), code, temp,
6026104752Skan				      force_operand (XEXP (XEXP (value,
6027104752Skan								 0), 1), 0),
6028104752Skan				      target, 0, OPTAB_LIB_WIDEN);
602918334Speter	}
603090075Sobrien
6031104752Skan      op1 = force_operand (XEXP (value, 0), subtarget);
6032104752Skan      op2 = force_operand (op2, NULL_RTX);
6033104752Skan      switch (code)
6034104752Skan	{
6035104752Skan	case MULT:
6036104752Skan	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
6037104752Skan	case DIV:
6038104752Skan	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
6039104752Skan	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
6040104752Skan					target, 1, OPTAB_LIB_WIDEN);
6041104752Skan	  else
6042104752Skan	    return expand_divmod (0,
6043104752Skan				  FLOAT_MODE_P (GET_MODE (value))
6044104752Skan				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
6045104752Skan				  GET_MODE (value), op1, op2, target, 0);
6046104752Skan	  break;
6047104752Skan	case MOD:
6048104752Skan	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6049104752Skan				target, 0);
6050104752Skan	  break;
6051104752Skan	case UDIV:
6052104752Skan	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6053104752Skan				target, 1);
6054104752Skan	  break;
6055104752Skan	case UMOD:
6056104752Skan	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6057104752Skan				target, 1);
6058104752Skan	  break;
6059104752Skan	case ASHIFTRT:
6060104752Skan	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6061104752Skan				      target, 0, OPTAB_LIB_WIDEN);
6062104752Skan	  break;
6063104752Skan	default:
6064104752Skan	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6065104752Skan				      target, 1, OPTAB_LIB_WIDEN);
6066104752Skan	}
606718334Speter    }
6068169689Skan  if (UNARY_P (value))
6069104752Skan    {
6070169689Skan      if (!target)
6071169689Skan	target = gen_reg_rtx (GET_MODE (value));
6072104752Skan      op1 = force_operand (XEXP (value, 0), NULL_RTX);
6073169689Skan      switch (code)
6074169689Skan	{
6075169689Skan	case ZERO_EXTEND:
6076169689Skan	case SIGN_EXTEND:
6077169689Skan	case TRUNCATE:
6078169689Skan	case FLOAT_EXTEND:
6079169689Skan	case FLOAT_TRUNCATE:
6080169689Skan	  convert_move (target, op1, code == ZERO_EXTEND);
6081169689Skan	  return target;
6082169689Skan
6083169689Skan	case FIX:
6084169689Skan	case UNSIGNED_FIX:
6085169689Skan	  expand_fix (target, op1, code == UNSIGNED_FIX);
6086169689Skan	  return target;
6087169689Skan
6088169689Skan	case FLOAT:
6089169689Skan	case UNSIGNED_FLOAT:
6090169689Skan	  expand_float (target, op1, code == UNSIGNED_FLOAT);
6091169689Skan	  return target;
6092169689Skan
6093169689Skan	default:
6094169689Skan	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6095169689Skan	}
6096104752Skan    }
609790075Sobrien
609890075Sobrien#ifdef INSN_SCHEDULING
609990075Sobrien  /* On machines that have insn scheduling, we want all memory reference to be
610090075Sobrien     explicit, so we need to deal with such paradoxical SUBREGs.  */
6101169689Skan  if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
610290075Sobrien      && (GET_MODE_SIZE (GET_MODE (value))
610390075Sobrien	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
610490075Sobrien    value
610590075Sobrien      = simplify_gen_subreg (GET_MODE (value),
610690075Sobrien			     force_reg (GET_MODE (SUBREG_REG (value)),
610790075Sobrien					force_operand (SUBREG_REG (value),
610890075Sobrien						       NULL_RTX)),
610990075Sobrien			     GET_MODE (SUBREG_REG (value)),
611090075Sobrien			     SUBREG_BYTE (value));
611190075Sobrien#endif
611290075Sobrien
611318334Speter  return value;
611418334Speter}
611518334Speter
611618334Speter/* Subroutine of expand_expr: return nonzero iff there is no way that
611750397Sobrien   EXP can reference X, which is being modified.  TOP_P is nonzero if this
611850397Sobrien   call is going to be used to determine whether we need a temporary
611950397Sobrien   for EXP, as opposed to a recursive call to this function.
612018334Speter
612150397Sobrien   It is always safe for this routine to return zero since it merely
612250397Sobrien   searches for optimization opportunities.  */
612350397Sobrien
612490075Sobrienint
6125132718Skansafe_from_p (rtx x, tree exp, int top_p)
612618334Speter{
612718334Speter  rtx exp_rtl = 0;
612818334Speter  int i, nops;
612918334Speter
613018334Speter  if (x == 0
613118334Speter      /* If EXP has varying size, we MUST use a target since we currently
613250397Sobrien	 have no way of allocating temporaries of variable size
613350397Sobrien	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
613450397Sobrien	 So we assume here that something at a higher level has prevented a
613518334Speter	 clash.  This is somewhat bogus, but the best we can do.  Only
613650397Sobrien	 do this when X is BLKmode and when we are at the top level.  */
613790075Sobrien      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
613818334Speter	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
613950397Sobrien	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
614050397Sobrien	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
614150397Sobrien	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
614250397Sobrien	      != INTEGER_CST)
614390075Sobrien	  && GET_MODE (x) == BLKmode)
614490075Sobrien      /* If X is in the outgoing argument area, it is always safe.  */
6145169689Skan      || (MEM_P (x)
614690075Sobrien	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
614790075Sobrien	      || (GET_CODE (XEXP (x, 0)) == PLUS
614890075Sobrien		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
614918334Speter    return 1;
615018334Speter
615118334Speter  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
615218334Speter     find the underlying pseudo.  */
615318334Speter  if (GET_CODE (x) == SUBREG)
615418334Speter    {
615518334Speter      x = SUBREG_REG (x);
6156169689Skan      if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
615718334Speter	return 0;
615818334Speter    }
615918334Speter
616090075Sobrien  /* Now look at our tree code and possibly recurse.  */
616118334Speter  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
616218334Speter    {
6163169689Skan    case tcc_declaration:
616496263Sobrien      exp_rtl = DECL_RTL_IF_SET (exp);
616518334Speter      break;
616618334Speter
6167169689Skan    case tcc_constant:
616818334Speter      return 1;
616918334Speter
6170169689Skan    case tcc_exceptional:
617118334Speter      if (TREE_CODE (exp) == TREE_LIST)
6172117395Skan	{
6173117395Skan	  while (1)
6174117395Skan	    {
6175117395Skan	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6176117395Skan		return 0;
6177117395Skan	      exp = TREE_CHAIN (exp);
6178117395Skan	      if (!exp)
6179117395Skan		return 1;
6180117395Skan	      if (TREE_CODE (exp) != TREE_LIST)
6181117395Skan		return safe_from_p (x, exp, 0);
6182117395Skan	    }
6183117395Skan	}
6184169689Skan      else if (TREE_CODE (exp) == CONSTRUCTOR)
6185169689Skan	{
6186169689Skan	  constructor_elt *ce;
6187169689Skan	  unsigned HOST_WIDE_INT idx;
6188169689Skan
6189169689Skan	  for (idx = 0;
6190169689Skan	       VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6191169689Skan	       idx++)
6192169689Skan	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6193169689Skan		|| !safe_from_p (x, ce->value, 0))
6194169689Skan	      return 0;
6195169689Skan	  return 1;
6196169689Skan	}
619750397Sobrien      else if (TREE_CODE (exp) == ERROR_MARK)
619850397Sobrien	return 1;	/* An already-visited SAVE_EXPR? */
619918334Speter      else
620018334Speter	return 0;
620118334Speter
6202169689Skan    case tcc_statement:
6203169689Skan      /* The only case we look at here is the DECL_INITIAL inside a
6204169689Skan	 DECL_EXPR.  */
6205169689Skan      return (TREE_CODE (exp) != DECL_EXPR
6206169689Skan	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6207169689Skan	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6208169689Skan	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6209169689Skan
6210169689Skan    case tcc_binary:
6211169689Skan    case tcc_comparison:
6212117395Skan      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6213117395Skan	return 0;
6214132718Skan      /* Fall through.  */
6215117395Skan
6216169689Skan    case tcc_unary:
621750397Sobrien      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
621818334Speter
6219169689Skan    case tcc_expression:
6220169689Skan    case tcc_reference:
622118334Speter      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
622218334Speter	 the expression.  If it is set, we conflict iff we are that rtx or
622318334Speter	 both are in memory.  Otherwise, we check all operands of the
622418334Speter	 expression recursively.  */
622518334Speter
622618334Speter      switch (TREE_CODE (exp))
622718334Speter	{
622818334Speter	case ADDR_EXPR:
622990075Sobrien	  /* If the operand is static or we are static, we can't conflict.
623090075Sobrien	     Likewise if we don't conflict with the operand at all.  */
623190075Sobrien	  if (staticp (TREE_OPERAND (exp, 0))
623290075Sobrien	      || TREE_STATIC (exp)
623390075Sobrien	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
623490075Sobrien	    return 1;
623518334Speter
623690075Sobrien	  /* Otherwise, the only way this can conflict is if we are taking
623790075Sobrien	     the address of a DECL a that address if part of X, which is
623890075Sobrien	     very rare.  */
623990075Sobrien	  exp = TREE_OPERAND (exp, 0);
624090075Sobrien	  if (DECL_P (exp))
624190075Sobrien	    {
624290075Sobrien	      if (!DECL_RTL_SET_P (exp)
6243169689Skan		  || !MEM_P (DECL_RTL (exp)))
624490075Sobrien		return 0;
624590075Sobrien	      else
624690075Sobrien		exp_rtl = XEXP (DECL_RTL (exp), 0);
624790075Sobrien	    }
624890075Sobrien	  break;
624990075Sobrien
6250169689Skan	case MISALIGNED_INDIRECT_REF:
6251169689Skan	case ALIGN_INDIRECT_REF:
625218334Speter	case INDIRECT_REF:
6253169689Skan	  if (MEM_P (x)
625490075Sobrien	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
625590075Sobrien					get_alias_set (exp)))
625618334Speter	    return 0;
625718334Speter	  break;
625818334Speter
625918334Speter	case CALL_EXPR:
626090075Sobrien	  /* Assume that the call will clobber all hard registers and
626190075Sobrien	     all of memory.  */
6262169689Skan	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6263169689Skan	      || MEM_P (x))
626490075Sobrien	    return 0;
626518334Speter	  break;
626618334Speter
626718334Speter	case WITH_CLEANUP_EXPR:
626818334Speter	case CLEANUP_POINT_EXPR:
6269169689Skan	  /* Lowered by gimplify.c.  */
6270169689Skan	  gcc_unreachable ();
627118334Speter
627218334Speter	case SAVE_EXPR:
6273169689Skan	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
627418334Speter
627550397Sobrien	default:
627650397Sobrien	  break;
627718334Speter	}
627818334Speter
627918334Speter      /* If we have an rtx, we do not need to scan our operands.  */
628018334Speter      if (exp_rtl)
628118334Speter	break;
628218334Speter
6283169689Skan      nops = TREE_CODE_LENGTH (TREE_CODE (exp));
628418334Speter      for (i = 0; i < nops; i++)
628518334Speter	if (TREE_OPERAND (exp, i) != 0
628650397Sobrien	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
628718334Speter	  return 0;
628890075Sobrien
628990075Sobrien      /* If this is a language-specific tree code, it may require
629090075Sobrien	 special handling.  */
629190075Sobrien      if ((unsigned int) TREE_CODE (exp)
629290075Sobrien	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6293169689Skan	  && !lang_hooks.safe_from_p (x, exp))
629490075Sobrien	return 0;
6295169689Skan      break;
6296169689Skan
6297169689Skan    case tcc_type:
6298169689Skan      /* Should never get a type here.  */
6299169689Skan      gcc_unreachable ();
630018334Speter    }
630118334Speter
630218334Speter  /* If we have an rtl, find any enclosed object.  Then see if we conflict
630318334Speter     with it.  */
630418334Speter  if (exp_rtl)
630518334Speter    {
630618334Speter      if (GET_CODE (exp_rtl) == SUBREG)
630718334Speter	{
630818334Speter	  exp_rtl = SUBREG_REG (exp_rtl);
6309169689Skan	  if (REG_P (exp_rtl)
631018334Speter	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
631118334Speter	    return 0;
631218334Speter	}
631318334Speter
631418334Speter      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
631590075Sobrien	 are memory and they conflict.  */
631618334Speter      return ! (rtx_equal_p (x, exp_rtl)
6317169689Skan		|| (MEM_P (x) && MEM_P (exp_rtl)
631896263Sobrien		    && true_dependence (exp_rtl, VOIDmode, x,
631990075Sobrien					rtx_addr_varies_p)));
632018334Speter    }
632118334Speter
632218334Speter  /* If we reach here, it is safe.  */
632318334Speter  return 1;
632418334Speter}
632518334Speter
632690075Sobrien
632790075Sobrien/* Return the highest power of two that EXP is known to be a multiple of.
632890075Sobrien   This is used in updating alignment of MEMs in array references.  */
632950397Sobrien
6330169689Skanunsigned HOST_WIDE_INT
6331132718Skanhighest_pow2_factor (tree exp)
633290075Sobrien{
6333132718Skan  unsigned HOST_WIDE_INT c0, c1;
633490075Sobrien
633590075Sobrien  switch (TREE_CODE (exp))
633690075Sobrien    {
633790075Sobrien    case INTEGER_CST:
633896263Sobrien      /* We can find the lowest bit that's a one.  If the low
633996263Sobrien	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
634096263Sobrien	 We need to handle this case since we can find it in a COND_EXPR,
6341132718Skan	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
634296263Sobrien	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
634390075Sobrien	 later ICE.  */
634496263Sobrien      if (TREE_CONSTANT_OVERFLOW (exp))
634590075Sobrien	return BIGGEST_ALIGNMENT;
634696263Sobrien      else
634790075Sobrien	{
634896263Sobrien	  /* Note: tree_low_cst is intentionally not used here,
634996263Sobrien	     we don't care about the upper bits.  */
635096263Sobrien	  c0 = TREE_INT_CST_LOW (exp);
635196263Sobrien	  c0 &= -c0;
635296263Sobrien	  return c0 ? c0 : BIGGEST_ALIGNMENT;
635390075Sobrien	}
635490075Sobrien      break;
635590075Sobrien
635690075Sobrien    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
635790075Sobrien      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
635890075Sobrien      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
635990075Sobrien      return MIN (c0, c1);
636090075Sobrien
636190075Sobrien    case MULT_EXPR:
636290075Sobrien      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
636390075Sobrien      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
636490075Sobrien      return c0 * c1;
636590075Sobrien
636690075Sobrien    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
636790075Sobrien    case CEIL_DIV_EXPR:
636890075Sobrien      if (integer_pow2p (TREE_OPERAND (exp, 1))
636990075Sobrien	  && host_integerp (TREE_OPERAND (exp, 1), 1))
637090075Sobrien	{
637190075Sobrien	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
637290075Sobrien	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
637390075Sobrien	  return MAX (1, c0 / c1);
637490075Sobrien	}
637590075Sobrien      break;
637690075Sobrien
637790075Sobrien    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6378169689Skan    case SAVE_EXPR:
637990075Sobrien      return highest_pow2_factor (TREE_OPERAND (exp, 0));
638090075Sobrien
638190075Sobrien    case COMPOUND_EXPR:
638290075Sobrien      return highest_pow2_factor (TREE_OPERAND (exp, 1));
638390075Sobrien
638490075Sobrien    case COND_EXPR:
638590075Sobrien      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
638690075Sobrien      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
638790075Sobrien      return MIN (c0, c1);
638890075Sobrien
638990075Sobrien    default:
639090075Sobrien      break;
639190075Sobrien    }
639290075Sobrien
639390075Sobrien  return 1;
639490075Sobrien}
639596263Sobrien
6396132718Skan/* Similar, except that the alignment requirements of TARGET are
6397132718Skan   taken into account.  Assume it is at least as aligned as its
6398132718Skan   type, unless it is a COMPONENT_REF in which case the layout of
6399132718Skan   the structure gives the alignment.  */
640096263Sobrien
6401132718Skanstatic unsigned HOST_WIDE_INT
6402132718Skanhighest_pow2_factor_for_target (tree target, tree exp)
640396263Sobrien{
6404132718Skan  unsigned HOST_WIDE_INT target_align, factor;
640596263Sobrien
640696263Sobrien  factor = highest_pow2_factor (exp);
6407132718Skan  if (TREE_CODE (target) == COMPONENT_REF)
6408169689Skan    target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6409132718Skan  else
6410169689Skan    target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6411132718Skan  return MAX (factor, target_align);
641296263Sobrien}
641318334Speter
6414169689Skan/* Expands variable VAR.  */
641590075Sobrien
6416169689Skanvoid
6417169689Skanexpand_var (tree var)
641890075Sobrien{
6419169689Skan  if (DECL_EXTERNAL (var))
6420169689Skan    return;
642190075Sobrien
6422169689Skan  if (TREE_STATIC (var))
6423169689Skan    /* If this is an inlined copy of a static local variable,
6424169689Skan       look up the original decl.  */
6425169689Skan    var = DECL_ORIGIN (var);
6426169689Skan
6427169689Skan  if (TREE_STATIC (var)
6428169689Skan      ? !TREE_ASM_WRITTEN (var)
6429169689Skan      : !DECL_RTL_SET_P (var))
643090075Sobrien    {
6431169689Skan      if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6432169689Skan	/* Should be ignored.  */;
6433169689Skan      else if (lang_hooks.expand_decl (var))
6434169689Skan	/* OK.  */;
6435169689Skan      else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6436169689Skan	expand_decl (var);
6437169689Skan      else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6438169689Skan	rest_of_decl_compilation (var, 0, 0);
6439169689Skan      else
6440169689Skan	/* No expansion needed.  */
6441169689Skan	gcc_assert (TREE_CODE (var) == TYPE_DECL
6442169689Skan		    || TREE_CODE (var) == CONST_DECL
6443169689Skan		    || TREE_CODE (var) == FUNCTION_DECL
6444169689Skan		    || TREE_CODE (var) == LABEL_DECL);
644590075Sobrien    }
644690075Sobrien}
6447132718Skan
6448132718Skan/* Subroutine of expand_expr.  Expand the two operands of a binary
6449132718Skan   expression EXP0 and EXP1 placing the results in OP0 and OP1.
6450132718Skan   The value may be stored in TARGET if TARGET is nonzero.  The
6451132718Skan   MODIFIER argument is as documented by expand_expr.  */
6452132718Skan
6453132718Skanstatic void
6454132718Skanexpand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6455132718Skan		 enum expand_modifier modifier)
6456132718Skan{
6457132718Skan  if (! safe_from_p (target, exp1, 1))
6458132718Skan    target = 0;
6459132718Skan  if (operand_equal_p (exp0, exp1, 0))
6460132718Skan    {
6461132718Skan      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6462132718Skan      *op1 = copy_rtx (*op0);
6463132718Skan    }
6464132718Skan  else
6465132718Skan    {
6466132718Skan      /* If we need to preserve evaluation order, copy exp0 into its own
6467132718Skan	 temporary variable so that it can't be clobbered by exp1.  */
6468132718Skan      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6469132718Skan	exp0 = save_expr (exp0);
6470132718Skan      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6471132718Skan      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6472132718Skan    }
6473132718Skan}
6474132718Skan
647590075Sobrien
6476169689Skan/* Return a MEM that contains constant EXP.  DEFER is as for
6477169689Skan   output_constant_def and MODIFIER is as for expand_expr.  */
6478169689Skan
6479169689Skanstatic rtx
6480169689Skanexpand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6481169689Skan{
6482169689Skan  rtx mem;
6483169689Skan
6484169689Skan  mem = output_constant_def (exp, defer);
6485169689Skan  if (modifier != EXPAND_INITIALIZER)
6486169689Skan    mem = use_anchored_address (mem);
6487169689Skan  return mem;
6488169689Skan}
6489169689Skan
6490169689Skan/* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
6491169689Skan   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6492169689Skan
6493169689Skanstatic rtx
6494169689Skanexpand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6495169689Skan		         enum expand_modifier modifier)
6496169689Skan{
6497169689Skan  rtx result, subtarget;
6498169689Skan  tree inner, offset;
6499169689Skan  HOST_WIDE_INT bitsize, bitpos;
6500169689Skan  int volatilep, unsignedp;
6501169689Skan  enum machine_mode mode1;
6502169689Skan
6503169689Skan  /* If we are taking the address of a constant and are at the top level,
6504169689Skan     we have to use output_constant_def since we can't call force_const_mem
6505169689Skan     at top level.  */
6506169689Skan  /* ??? This should be considered a front-end bug.  We should not be
6507169689Skan     generating ADDR_EXPR of something that isn't an LVALUE.  The only
6508169689Skan     exception here is STRING_CST.  */
6509169689Skan  if (TREE_CODE (exp) == CONSTRUCTOR
6510169689Skan      || CONSTANT_CLASS_P (exp))
6511169689Skan    return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6512169689Skan
6513169689Skan  /* Everything must be something allowed by is_gimple_addressable.  */
6514169689Skan  switch (TREE_CODE (exp))
6515169689Skan    {
6516169689Skan    case INDIRECT_REF:
6517169689Skan      /* This case will happen via recursion for &a->b.  */
6518169689Skan      return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6519169689Skan
6520169689Skan    case CONST_DECL:
6521169689Skan      /* Recurse and make the output_constant_def clause above handle this.  */
6522169689Skan      return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6523169689Skan				      tmode, modifier);
6524169689Skan
6525169689Skan    case REALPART_EXPR:
6526169689Skan      /* The real part of the complex number is always first, therefore
6527169689Skan	 the address is the same as the address of the parent object.  */
6528169689Skan      offset = 0;
6529169689Skan      bitpos = 0;
6530169689Skan      inner = TREE_OPERAND (exp, 0);
6531169689Skan      break;
6532169689Skan
6533169689Skan    case IMAGPART_EXPR:
6534169689Skan      /* The imaginary part of the complex number is always second.
6535169689Skan	 The expression is therefore always offset by the size of the
6536169689Skan	 scalar type.  */
6537169689Skan      offset = 0;
6538169689Skan      bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6539169689Skan      inner = TREE_OPERAND (exp, 0);
6540169689Skan      break;
6541169689Skan
6542169689Skan    default:
6543169689Skan      /* If the object is a DECL, then expand it for its rtl.  Don't bypass
6544169689Skan	 expand_expr, as that can have various side effects; LABEL_DECLs for
6545169689Skan	 example, may not have their DECL_RTL set yet.  Assume language
6546169689Skan	 specific tree nodes can be expanded in some interesting way.  */
6547169689Skan      if (DECL_P (exp)
6548169689Skan	  || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6549169689Skan	{
6550169689Skan	  result = expand_expr (exp, target, tmode,
6551169689Skan				modifier == EXPAND_INITIALIZER
6552169689Skan				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6553169689Skan
6554169689Skan	  /* If the DECL isn't in memory, then the DECL wasn't properly
6555169689Skan	     marked TREE_ADDRESSABLE, which will be either a front-end
6556169689Skan	     or a tree optimizer bug.  */
6557169689Skan	  gcc_assert (MEM_P (result));
6558169689Skan	  result = XEXP (result, 0);
6559169689Skan
6560169689Skan	  /* ??? Is this needed anymore?  */
6561169689Skan	  if (DECL_P (exp) && !TREE_USED (exp) == 0)
6562169689Skan	    {
6563169689Skan	      assemble_external (exp);
6564169689Skan	      TREE_USED (exp) = 1;
6565169689Skan	    }
6566169689Skan
6567169689Skan	  if (modifier != EXPAND_INITIALIZER
6568169689Skan	      && modifier != EXPAND_CONST_ADDRESS)
6569169689Skan	    result = force_operand (result, target);
6570169689Skan	  return result;
6571169689Skan	}
6572169689Skan
6573169689Skan      /* Pass FALSE as the last argument to get_inner_reference although
6574169689Skan	 we are expanding to RTL.  The rationale is that we know how to
6575169689Skan	 handle "aligning nodes" here: we can just bypass them because
6576169689Skan	 they won't change the final object whose address will be returned
6577169689Skan	 (they actually exist only for that purpose).  */
6578169689Skan      inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6579169689Skan				   &mode1, &unsignedp, &volatilep, false);
6580169689Skan      break;
6581169689Skan    }
6582169689Skan
6583169689Skan  /* We must have made progress.  */
6584169689Skan  gcc_assert (inner != exp);
6585169689Skan
6586169689Skan  subtarget = offset || bitpos ? NULL_RTX : target;
6587169689Skan  result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6588169689Skan
6589169689Skan  if (offset)
6590169689Skan    {
6591169689Skan      rtx tmp;
6592169689Skan
6593169689Skan      if (modifier != EXPAND_NORMAL)
6594169689Skan	result = force_operand (result, NULL);
6595169689Skan      tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6596169689Skan
6597169689Skan      result = convert_memory_address (tmode, result);
6598169689Skan      tmp = convert_memory_address (tmode, tmp);
6599169689Skan
6600169689Skan      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6601169689Skan	result = gen_rtx_PLUS (tmode, result, tmp);
6602169689Skan      else
6603169689Skan	{
6604169689Skan	  subtarget = bitpos ? NULL_RTX : target;
6605169689Skan	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6606169689Skan					1, OPTAB_LIB_WIDEN);
6607169689Skan	}
6608169689Skan    }
6609169689Skan
6610169689Skan  if (bitpos)
6611169689Skan    {
6612169689Skan      /* Someone beforehand should have rejected taking the address
6613169689Skan	 of such an object.  */
6614169689Skan      gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6615169689Skan
6616169689Skan      result = plus_constant (result, bitpos / BITS_PER_UNIT);
6617169689Skan      if (modifier < EXPAND_SUM)
6618169689Skan	result = force_operand (result, target);
6619169689Skan    }
6620169689Skan
6621169689Skan  return result;
6622169689Skan}
6623169689Skan
6624169689Skan/* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
6625169689Skan   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6626169689Skan
6627169689Skanstatic rtx
6628169689Skanexpand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6629169689Skan		       enum expand_modifier modifier)
6630169689Skan{
6631169689Skan  enum machine_mode rmode;
6632169689Skan  rtx result;
6633169689Skan
6634169689Skan  /* Target mode of VOIDmode says "whatever's natural".  */
6635169689Skan  if (tmode == VOIDmode)
6636169689Skan    tmode = TYPE_MODE (TREE_TYPE (exp));
6637169689Skan
6638169689Skan  /* We can get called with some Weird Things if the user does silliness
6639169689Skan     like "(short) &a".  In that case, convert_memory_address won't do
6640169689Skan     the right thing, so ignore the given target mode.  */
6641169689Skan  if (tmode != Pmode && tmode != ptr_mode)
6642169689Skan    tmode = Pmode;
6643169689Skan
6644169689Skan  result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6645169689Skan				    tmode, modifier);
6646169689Skan
6647169689Skan  /* Despite expand_expr claims concerning ignoring TMODE when not
6648169689Skan     strictly convenient, stuff breaks if we don't honor it.  Note
6649169689Skan     that combined with the above, we only do this for pointer modes.  */
6650169689Skan  rmode = GET_MODE (result);
6651169689Skan  if (rmode == VOIDmode)
6652169689Skan    rmode = tmode;
6653169689Skan  if (rmode != tmode)
6654169689Skan    result = convert_memory_address (tmode, result);
6655169689Skan
6656169689Skan  return result;
6657169689Skan}
6658169689Skan
6659169689Skan
666018334Speter/* expand_expr: generate code for computing expression EXP.
666118334Speter   An rtx for the computed value is returned.  The value is never null.
666218334Speter   In the case of a void EXP, const0_rtx is returned.
666318334Speter
666418334Speter   The value may be stored in TARGET if TARGET is nonzero.
666518334Speter   TARGET is just a suggestion; callers must assume that
666618334Speter   the rtx returned may not be the same as TARGET.
666718334Speter
666818334Speter   If TARGET is CONST0_RTX, it means that the value will be ignored.
666918334Speter
667018334Speter   If TMODE is not VOIDmode, it suggests generating the
667118334Speter   result in mode TMODE.  But this is done only when convenient.
667218334Speter   Otherwise, TMODE is ignored and the value generated in its natural mode.
667318334Speter   TMODE is just a suggestion; callers must assume that
667418334Speter   the rtx returned may not have mode TMODE.
667518334Speter
667618334Speter   Note that TARGET may have neither TMODE nor MODE.  In that case, it
667718334Speter   probably will not be used.
667818334Speter
667918334Speter   If MODIFIER is EXPAND_SUM then when EXP is an addition
668018334Speter   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
668118334Speter   or a nest of (PLUS ...) and (MINUS ...) where the terms are
668218334Speter   products as above, or REG or MEM, or constant.
668318334Speter   Ordinarily in such cases we would output mul or add instructions
668418334Speter   and then return a pseudo reg containing the sum.
668518334Speter
668618334Speter   EXPAND_INITIALIZER is much like EXPAND_SUM except that
668718334Speter   it also marks a label as absolutely required (it can't be dead).
668818334Speter   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
668918334Speter   This is used for outputting expressions used in initializers.
669018334Speter
669118334Speter   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
669218334Speter   with a constant address even if that address is not normally legitimate.
6693117395Skan   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
669418334Speter
6695117395Skan   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6696117395Skan   a call parameter.  Such targets require special care as we haven't yet
6697117395Skan   marked TARGET so that it's safe from being trashed by libcalls.  We
6698117395Skan   don't want to use TARGET for anything but the final result;
6699117395Skan   Intermediate values must go elsewhere.   Additionally, calls to
6700169689Skan   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6701117395Skan
6702132718Skan   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6703132718Skan   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6704132718Skan   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
6705132718Skan   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6706132718Skan   recursively.  */
6707132718Skan
6708169689Skanstatic rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6709169689Skan			       enum expand_modifier, rtx *);
6710169689Skan
671118334Speterrtx
6712132718Skanexpand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6713132718Skan		  enum expand_modifier modifier, rtx *alt_rtl)
671418334Speter{
6715169689Skan  int rn = -1;
6716169689Skan  rtx ret, last = NULL;
6717169689Skan
6718169689Skan  /* Handle ERROR_MARK before anybody tries to access its type.  */
6719169689Skan  if (TREE_CODE (exp) == ERROR_MARK
6720169689Skan      || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6721169689Skan    {
6722169689Skan      ret = CONST0_RTX (tmode);
6723169689Skan      return ret ? ret : const0_rtx;
6724169689Skan    }
6725169689Skan
6726169689Skan  if (flag_non_call_exceptions)
6727169689Skan    {
6728169689Skan      rn = lookup_stmt_eh_region (exp);
6729169689Skan      /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw.  */
6730169689Skan      if (rn >= 0)
6731169689Skan	last = get_last_insn ();
6732169689Skan    }
6733169689Skan
6734169689Skan  /* If this is an expression of some kind and it has an associated line
6735169689Skan     number, then emit the line number before expanding the expression.
6736169689Skan
6737169689Skan     We need to save and restore the file and line information so that
6738169689Skan     errors discovered during expansion are emitted with the right
6739169689Skan     information.  It would be better of the diagnostic routines
6740169689Skan     used the file/line information embedded in the tree nodes rather
6741169689Skan     than globals.  */
6742169689Skan  if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6743169689Skan    {
6744169689Skan      location_t saved_location = input_location;
6745169689Skan      input_location = EXPR_LOCATION (exp);
6746169689Skan      emit_line_note (input_location);
6747169689Skan
6748169689Skan      /* Record where the insns produced belong.  */
6749169689Skan      record_block_change (TREE_BLOCK (exp));
6750169689Skan
6751169689Skan      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6752169689Skan
6753169689Skan      input_location = saved_location;
6754169689Skan    }
6755169689Skan  else
6756169689Skan    {
6757169689Skan      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6758169689Skan    }
6759169689Skan
6760169689Skan  /* If using non-call exceptions, mark all insns that may trap.
6761169689Skan     expand_call() will mark CALL_INSNs before we get to this code,
6762169689Skan     but it doesn't handle libcalls, and these may trap.  */
6763169689Skan  if (rn >= 0)
6764169689Skan    {
6765169689Skan      rtx insn;
6766169689Skan      for (insn = next_real_insn (last); insn;
6767169689Skan	   insn = next_real_insn (insn))
6768169689Skan	{
6769169689Skan	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6770169689Skan	      /* If we want exceptions for non-call insns, any
6771169689Skan		 may_trap_p instruction may throw.  */
6772169689Skan	      && GET_CODE (PATTERN (insn)) != CLOBBER
6773169689Skan	      && GET_CODE (PATTERN (insn)) != USE
6774169689Skan	      && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6775169689Skan	    {
6776169689Skan	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6777169689Skan						  REG_NOTES (insn));
6778169689Skan	    }
6779169689Skan	}
6780169689Skan    }
6781169689Skan
6782169689Skan  return ret;
6783169689Skan}
6784169689Skan
6785169689Skanstatic rtx
6786169689Skanexpand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6787169689Skan		    enum expand_modifier modifier, rtx *alt_rtl)
6788169689Skan{
6789169689Skan  rtx op0, op1, temp, decl_rtl;
679018334Speter  tree type = TREE_TYPE (exp);
6791169689Skan  int unsignedp;
679290075Sobrien  enum machine_mode mode;
679390075Sobrien  enum tree_code code = TREE_CODE (exp);
679418334Speter  optab this_optab;
679552284Sobrien  rtx subtarget, original_target;
679652284Sobrien  int ignore;
6797169689Skan  tree context, subexp0, subexp1;
6798169689Skan  bool reduce_bit_field = false;
6799169689Skan#define REDUCE_BIT_FIELD(expr)	(reduce_bit_field && !ignore		  \
6800169689Skan				 ? reduce_to_bit_field_precision ((expr), \
6801169689Skan								  target, \
6802169689Skan								  type)	  \
6803169689Skan				 : (expr))
680418334Speter
6805169689Skan  mode = TYPE_MODE (type);
6806169689Skan  unsignedp = TYPE_UNSIGNED (type);
6807169689Skan  if (lang_hooks.reduce_bit_field_operations
6808169689Skan      && TREE_CODE (type) == INTEGER_TYPE
6809169689Skan      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
681052284Sobrien    {
6811169689Skan      /* An operation in what may be a bit-field type needs the
6812169689Skan	 result to be reduced to the precision of the bit-field type,
6813169689Skan	 which is narrower than that of the type's mode.  */
6814169689Skan      reduce_bit_field = true;
6815169689Skan      if (modifier == EXPAND_STACK_PARM)
6816169689Skan	target = 0;
681752284Sobrien    }
681852284Sobrien
681952284Sobrien  /* Use subtarget as the target for operand 0 of a binary operation.  */
682090075Sobrien  subtarget = get_subtarget (target);
682152284Sobrien  original_target = target;
682252284Sobrien  ignore = (target == const0_rtx
682352284Sobrien	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6824169689Skan		 || code == CONVERT_EXPR || code == COND_EXPR
6825169689Skan		 || code == VIEW_CONVERT_EXPR)
682652284Sobrien		&& TREE_CODE (type) == VOID_TYPE));
682752284Sobrien
682818334Speter  /* If we are going to ignore this result, we need only do something
682918334Speter     if there is a side-effect somewhere in the expression.  If there
683018334Speter     is, short-circuit the most common cases here.  Note that we must
683118334Speter     not call expand_expr with anything but const0_rtx in case this
683218334Speter     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
683318334Speter
683418334Speter  if (ignore)
683518334Speter    {
683618334Speter      if (! TREE_SIDE_EFFECTS (exp))
683718334Speter	return const0_rtx;
683818334Speter
683990075Sobrien      /* Ensure we reference a volatile object even if value is ignored, but
684090075Sobrien	 don't do this if all we are doing is taking its address.  */
684118334Speter      if (TREE_THIS_VOLATILE (exp)
684218334Speter	  && TREE_CODE (exp) != FUNCTION_DECL
684390075Sobrien	  && mode != VOIDmode && mode != BLKmode
684490075Sobrien	  && modifier != EXPAND_CONST_ADDRESS)
684518334Speter	{
684690075Sobrien	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6847169689Skan	  if (MEM_P (temp))
684818334Speter	    temp = copy_to_reg (temp);
684918334Speter	  return const0_rtx;
685018334Speter	}
685118334Speter
6852169689Skan      if (TREE_CODE_CLASS (code) == tcc_unary
6853169689Skan	  || code == COMPONENT_REF || code == INDIRECT_REF)
685490075Sobrien	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
685590075Sobrien			    modifier);
685690075Sobrien
6857169689Skan      else if (TREE_CODE_CLASS (code) == tcc_binary
6858169689Skan	       || TREE_CODE_CLASS (code) == tcc_comparison
685990075Sobrien	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
686018334Speter	{
686190075Sobrien	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
686290075Sobrien	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
686318334Speter	  return const0_rtx;
686418334Speter	}
686590075Sobrien      else if (code == BIT_FIELD_REF)
686690075Sobrien	{
686790075Sobrien	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
686890075Sobrien	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
686990075Sobrien	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
687090075Sobrien	  return const0_rtx;
687190075Sobrien	}
687218334Speter
687318334Speter      target = 0;
687418334Speter    }
687518334Speter
687618334Speter
687718334Speter  switch (code)
687818334Speter    {
687918334Speter    case LABEL_DECL:
688018334Speter      {
688118334Speter	tree function = decl_function_context (exp);
688290075Sobrien
6883169689Skan	temp = label_rtx (exp);
6884169689Skan	temp = gen_rtx_LABEL_REF (Pmode, temp);
6885169689Skan
688650397Sobrien	if (function != current_function_decl
6887169689Skan	    && function != 0)
6888169689Skan	  LABEL_REF_NONLOCAL_P (temp) = 1;
6889169689Skan
6890169689Skan	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
689118334Speter	return temp;
689218334Speter      }
689318334Speter
6894169689Skan    case SSA_NAME:
6895169689Skan      return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6896169689Skan				 NULL);
6897169689Skan
689818334Speter    case PARM_DECL:
689918334Speter    case VAR_DECL:
690018334Speter      /* If a static var's type was incomplete when the decl was written,
690118334Speter	 but the type is complete now, lay out the decl now.  */
6902117395Skan      if (DECL_SIZE (exp) == 0
6903117395Skan	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
690418334Speter	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6905117395Skan	layout_decl (exp, 0);
690690075Sobrien
690750397Sobrien      /* ... fall through ...  */
690850397Sobrien
690918334Speter    case FUNCTION_DECL:
691018334Speter    case RESULT_DECL:
6911169689Skan      decl_rtl = DECL_RTL (exp);
6912169689Skan      gcc_assert (decl_rtl);
691318334Speter
691418334Speter      /* Ensure variable marked as used even if it doesn't go through
691518334Speter	 a parser.  If it hasn't be used yet, write out an external
691618334Speter	 definition.  */
691718334Speter      if (! TREE_USED (exp))
691818334Speter	{
691918334Speter	  assemble_external (exp);
692018334Speter	  TREE_USED (exp) = 1;
692118334Speter	}
692218334Speter
692350397Sobrien      /* Show we haven't gotten RTL for this yet.  */
692450397Sobrien      temp = 0;
692550397Sobrien
6926169689Skan      /* Variables inherited from containing functions should have
6927169689Skan	 been lowered by this point.  */
692818334Speter      context = decl_function_context (exp);
6929169689Skan      gcc_assert (!context
6930169689Skan		  || context == current_function_decl
6931169689Skan		  || TREE_STATIC (exp)
6932169689Skan		  /* ??? C++ creates functions that are not TREE_STATIC.  */
6933169689Skan		  || TREE_CODE (exp) == FUNCTION_DECL);
693418334Speter
693518334Speter      /* This is the case of an array whose size is to be determined
693618334Speter	 from its initializer, while the initializer is still being parsed.
693718334Speter	 See expand_decl.  */
693818334Speter
6939169689Skan      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6940169689Skan	temp = validize_mem (decl_rtl);
694118334Speter
694218334Speter      /* If DECL_RTL is memory, we are in the normal case and either
694318334Speter	 the address is not valid or it is not a register and -fforce-addr
694418334Speter	 is specified, get the address into a register.  */
694518334Speter
6946169689Skan      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6947132718Skan	{
6948132718Skan	  if (alt_rtl)
6949169689Skan	    *alt_rtl = decl_rtl;
6950169689Skan	  decl_rtl = use_anchored_address (decl_rtl);
6951169689Skan	  if (modifier != EXPAND_CONST_ADDRESS
6952169689Skan	      && modifier != EXPAND_SUM
6953169689Skan	      && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6954169689Skan		  || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6955169689Skan	    temp = replace_equiv_address (decl_rtl,
6956169689Skan					  copy_rtx (XEXP (decl_rtl, 0)));
6957132718Skan	}
695818334Speter
695950397Sobrien      /* If we got something, return it.  But first, set the alignment
696090075Sobrien	 if the address is a register.  */
696150397Sobrien      if (temp != 0)
696250397Sobrien	{
6963169689Skan	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
696490075Sobrien	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
696550397Sobrien
696650397Sobrien	  return temp;
696750397Sobrien	}
696850397Sobrien
696918334Speter      /* If the mode of DECL_RTL does not match that of the decl, it
697018334Speter	 must be a promoted value.  We return a SUBREG of the wanted mode,
697118334Speter	 but mark it so that we know that it was already extended.  */
697218334Speter
6973169689Skan      if (REG_P (decl_rtl)
6974169689Skan	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
697518334Speter	{
6976169689Skan	  enum machine_mode pmode;
6977259563Spfg
697818334Speter	  /* Get the signedness used for this variable.  Ensure we get the
697918334Speter	     same mode we got when the variable was declared.  */
6980169689Skan	  pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6981169689Skan				(TREE_CODE (exp) == RESULT_DECL
6982169689Skan				 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6983169689Skan	  gcc_assert (GET_MODE (decl_rtl) == pmode);
698418334Speter
6985169689Skan	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
698618334Speter	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6987117395Skan	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
698818334Speter	  return temp;
698918334Speter	}
699018334Speter
6991169689Skan      return decl_rtl;
699218334Speter
699318334Speter    case INTEGER_CST:
699496263Sobrien      temp = immed_double_const (TREE_INT_CST_LOW (exp),
699590075Sobrien				 TREE_INT_CST_HIGH (exp), mode);
699618334Speter
699796263Sobrien      /* ??? If overflow is set, fold will have done an incomplete job,
699896263Sobrien	 which can result in (plus xx (const_int 0)), which can get
699996263Sobrien	 simplified by validate_replace_rtx during virtual register
700096263Sobrien	 instantiation, which can result in unrecognizable insns.
700196263Sobrien	 Avoid this by forcing all overflows into registers.  */
700296263Sobrien      if (TREE_CONSTANT_OVERFLOW (exp)
700396263Sobrien	  && modifier != EXPAND_INITIALIZER)
700496263Sobrien	temp = force_reg (mode, temp);
700596263Sobrien
700696263Sobrien      return temp;
700796263Sobrien
7008119256Skan    case VECTOR_CST:
7009169689Skan      {
7010169689Skan	tree tmp = NULL_TREE;
7011169689Skan	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7012169689Skan	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7013169689Skan	  return const_vector_from_tree (exp);
7014169689Skan	if (GET_MODE_CLASS (mode) == MODE_INT)
7015169689Skan	  {
7016169689Skan	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7017169689Skan	    if (type_for_mode)
7018169689Skan	      tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7019169689Skan	  }
7020169689Skan	if (!tmp)
7021169689Skan	  tmp = build_constructor_from_list (type,
7022169689Skan					     TREE_VECTOR_CST_ELTS (exp));
7023169689Skan	return expand_expr (tmp, ignore ? const0_rtx : target,
7024169689Skan			    tmode, modifier);
7025169689Skan      }
7026119256Skan
702718334Speter    case CONST_DECL:
7028117395Skan      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
702918334Speter
703018334Speter    case REAL_CST:
703118334Speter      /* If optimized, generate immediate CONST_DOUBLE
703290075Sobrien	 which will be turned into memory by reload if necessary.
703390075Sobrien
703418334Speter	 We used to force a register so that loop.c could see it.  But
703518334Speter	 this does not allow gen_* patterns to perform optimizations with
703618334Speter	 the constants.  It also produces two insns in cases like "x = 1.0;".
703718334Speter	 On most machines, floating-point constants are not permitted in
703818334Speter	 many insns, so we'd end up copying it to a register in any case.
703918334Speter
704018334Speter	 Now, we do the copying in expand_binop, if appropriate.  */
7041117395Skan      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7042117395Skan					   TYPE_MODE (TREE_TYPE (exp)));
704318334Speter
704418334Speter    case COMPLEX_CST:
7045132718Skan      /* Handle evaluating a complex constant in a CONCAT target.  */
7046132718Skan      if (original_target && GET_CODE (original_target) == CONCAT)
7047132718Skan	{
7048132718Skan	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7049132718Skan	  rtx rtarg, itarg;
7050132718Skan
7051132718Skan	  rtarg = XEXP (original_target, 0);
7052132718Skan	  itarg = XEXP (original_target, 1);
7053132718Skan
7054132718Skan	  /* Move the real and imaginary parts separately.  */
7055132718Skan	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7056132718Skan	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7057132718Skan
7058132718Skan	  if (op0 != rtarg)
7059132718Skan	    emit_move_insn (rtarg, op0);
7060132718Skan	  if (op1 != itarg)
7061132718Skan	    emit_move_insn (itarg, op1);
7062132718Skan
7063132718Skan	  return original_target;
7064132718Skan	}
7065132718Skan
7066132718Skan      /* ... fall through ...  */
7067132718Skan
706818334Speter    case STRING_CST:
7069169689Skan      temp = expand_expr_constant (exp, 1, modifier);
707018334Speter
7071132718Skan      /* temp contains a constant address.
707218334Speter	 On RISC machines where a constant address isn't valid,
707318334Speter	 make some insns to get that address into a register.  */
7074132718Skan      if (modifier != EXPAND_CONST_ADDRESS
707518334Speter	  && modifier != EXPAND_INITIALIZER
707618334Speter	  && modifier != EXPAND_SUM
7077132718Skan	  && (! memory_address_p (mode, XEXP (temp, 0))
7078132718Skan	      || flag_force_addr))
7079132718Skan	return replace_equiv_address (temp,
7080132718Skan				      copy_rtx (XEXP (temp, 0)));
7081132718Skan      return temp;
708218334Speter
7083169689Skan    case SAVE_EXPR:
708450397Sobrien      {
7085169689Skan	tree val = TREE_OPERAND (exp, 0);
7086169689Skan	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7087132718Skan
7088169689Skan	if (!SAVE_EXPR_RESOLVED_P (exp))
7089169689Skan	  {
7090169689Skan	    /* We can indeed still hit this case, typically via builtin
7091169689Skan	       expanders calling save_expr immediately before expanding
7092169689Skan	       something.  Assume this means that we only have to deal
7093169689Skan	       with non-BLKmode values.  */
7094169689Skan	    gcc_assert (GET_MODE (ret) != BLKmode);
709550397Sobrien
7096169689Skan	    val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7097169689Skan	    DECL_ARTIFICIAL (val) = 1;
7098169689Skan	    DECL_IGNORED_P (val) = 1;
7099169689Skan	    TREE_OPERAND (exp, 0) = val;
7100169689Skan	    SAVE_EXPR_RESOLVED_P (exp) = 1;
710118334Speter
7102169689Skan	    if (!CONSTANT_P (ret))
7103169689Skan	      ret = copy_to_reg (ret);
7104169689Skan	    SET_DECL_RTL (val, ret);
7105169689Skan	  }
710650397Sobrien
7107169689Skan        return ret;
710850397Sobrien      }
710950397Sobrien
711052284Sobrien    case GOTO_EXPR:
711152284Sobrien      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
711252284Sobrien	expand_goto (TREE_OPERAND (exp, 0));
711352284Sobrien      else
711452284Sobrien	expand_computed_goto (TREE_OPERAND (exp, 0));
711552284Sobrien      return const0_rtx;
711652284Sobrien
711718334Speter    case CONSTRUCTOR:
711818334Speter      /* If we don't need the result, just ensure we evaluate any
711918334Speter	 subexpressions.  */
712018334Speter      if (ignore)
712118334Speter	{
7122169689Skan	  unsigned HOST_WIDE_INT idx;
7123169689Skan	  tree value;
712490075Sobrien
7125169689Skan	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7126169689Skan	    expand_expr (value, const0_rtx, VOIDmode, 0);
712790075Sobrien
712818334Speter	  return const0_rtx;
712918334Speter	}
713018334Speter
7131169689Skan      /* Try to avoid creating a temporary at all.  This is possible
7132169689Skan	 if all of the initializer is zero.
7133169689Skan	 FIXME: try to handle all [0..255] initializers we can handle
7134169689Skan	 with memset.  */
7135169689Skan      else if (TREE_STATIC (exp)
7136169689Skan	       && !TREE_ADDRESSABLE (exp)
7137169689Skan	       && target != 0 && mode == BLKmode
7138169689Skan	       && all_zeros_p (exp))
7139169689Skan	{
7140169689Skan	  clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7141169689Skan	  return target;
7142169689Skan	}
7143169689Skan
714418334Speter      /* All elts simple constants => refer to a constant in memory.  But
714518334Speter	 if this is a non-BLKmode mode, let it store a field at a time
714618334Speter	 since that should make a CONST_INT or CONST_DOUBLE when we
714718334Speter	 fold.  Likewise, if we have a target we can use, it is best to
714818334Speter	 store directly into the target unless the type is large enough
714918334Speter	 that memcpy will be used.  If we are making an initializer and
7150117395Skan	 all operands are constant, put it in memory as well.
7151117395Skan
7152117395Skan	FIXME: Avoid trying to fill vector constructors piece-meal.
7153117395Skan	Output them with output_constant_def below unless we're sure
7154117395Skan	they're zeros.  This should go away when vector initializers
7155117395Skan	are treated like VECTOR_CST instead of arrays.
7156117395Skan      */
715718334Speter      else if ((TREE_STATIC (exp)
715818334Speter		&& ((mode == BLKmode
715950397Sobrien		     && ! (target != 0 && safe_from_p (target, exp, 1)))
716018334Speter		    || TREE_ADDRESSABLE (exp)
716190075Sobrien		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
716290075Sobrien			&& (! MOVE_BY_PIECES_P
716390075Sobrien			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
716490075Sobrien			     TYPE_ALIGN (type)))
7165169689Skan			&& ! mostly_zeros_p (exp))))
7166132718Skan	       || ((modifier == EXPAND_INITIALIZER
7167132718Skan		    || modifier == EXPAND_CONST_ADDRESS)
7168132718Skan		   && TREE_CONSTANT (exp)))
716918334Speter	{
7170169689Skan	  rtx constructor = expand_expr_constant (exp, 1, modifier);
717190075Sobrien
717218334Speter	  if (modifier != EXPAND_CONST_ADDRESS
717318334Speter	      && modifier != EXPAND_INITIALIZER
717490075Sobrien	      && modifier != EXPAND_SUM)
717590075Sobrien	    constructor = validize_mem (constructor);
717690075Sobrien
717718334Speter	  return constructor;
717818334Speter	}
717918334Speter      else
718018334Speter	{
718150397Sobrien	  /* Handle calls that pass values in multiple non-contiguous
718250397Sobrien	     locations.  The Irix 6 ABI has examples of this.  */
718350397Sobrien	  if (target == 0 || ! safe_from_p (target, exp, 1)
7184117395Skan	      || GET_CODE (target) == PARALLEL
7185117395Skan	      || modifier == EXPAND_STACK_PARM)
718690075Sobrien	    target
718790075Sobrien	      = assign_temp (build_qualified_type (type,
718890075Sobrien						   (TYPE_QUALS (type)
718990075Sobrien						    | (TREE_READONLY (exp)
719090075Sobrien						       * TYPE_QUAL_CONST))),
719190075Sobrien			     0, TREE_ADDRESSABLE (exp), 1);
719250397Sobrien
7193102780Skan	  store_constructor (exp, target, 0, int_expr_size (exp));
719418334Speter	  return target;
719518334Speter	}
719618334Speter
7197169689Skan    case MISALIGNED_INDIRECT_REF:
7198169689Skan    case ALIGN_INDIRECT_REF:
719918334Speter    case INDIRECT_REF:
720018334Speter      {
720118334Speter	tree exp1 = TREE_OPERAND (exp, 0);
720290075Sobrien
7203169689Skan	if (modifier != EXPAND_WRITE)
7204169689Skan	  {
7205169689Skan	    tree t;
720618334Speter
7207169689Skan	    t = fold_read_from_constant_string (exp);
7208169689Skan	    if (t)
7209169689Skan	      return expand_expr (t, target, tmode, modifier);
7210169689Skan	  }
7211169689Skan
721250397Sobrien	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
721350397Sobrien	op0 = memory_address (mode, op0);
7214169689Skan
7215169689Skan	if (code == ALIGN_INDIRECT_REF)
7216169689Skan	  {
7217169689Skan	    int align = TYPE_ALIGN_UNIT (type);
7218169689Skan	    op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7219169689Skan	    op0 = memory_address (mode, op0);
7220169689Skan	  }
7221169689Skan
722250397Sobrien	temp = gen_rtx_MEM (mode, op0);
7223169689Skan
722490075Sobrien	set_mem_attributes (temp, exp, 0);
722570635Sobrien
7226169689Skan	/* Resolve the misalignment now, so that we don't have to remember
7227169689Skan	   to resolve it later.  Of course, this only works for reads.  */
7228169689Skan	/* ??? When we get around to supporting writes, we'll have to handle
7229169689Skan	   this in store_expr directly.  The vectorizer isn't generating
7230169689Skan	   those yet, however.  */
7231169689Skan	if (code == MISALIGNED_INDIRECT_REF)
7232169689Skan	  {
7233169689Skan	    int icode;
7234169689Skan	    rtx reg, insn;
723550397Sobrien
7236169689Skan	    gcc_assert (modifier == EXPAND_NORMAL
7237169689Skan			|| modifier == EXPAND_STACK_PARM);
7238169689Skan
7239169689Skan	    /* The vectorizer should have already checked the mode.  */
7240169689Skan	    icode = movmisalign_optab->handlers[mode].insn_code;
7241169689Skan	    gcc_assert (icode != CODE_FOR_nothing);
7242169689Skan
7243169689Skan	    /* We've already validated the memory, and we're creating a
7244169689Skan	       new pseudo destination.  The predicates really can't fail.  */
7245169689Skan	    reg = gen_reg_rtx (mode);
7246169689Skan
7247169689Skan	    /* Nor can the insn generator.  */
7248169689Skan	    insn = GEN_FCN (icode) (reg, temp);
7249169689Skan	    emit_insn (insn);
7250169689Skan
7251169689Skan	    return reg;
7252169689Skan	  }
7253169689Skan
725418334Speter	return temp;
725518334Speter      }
725618334Speter
7257169689Skan    case TARGET_MEM_REF:
7258169689Skan      {
7259169689Skan	struct mem_address addr;
7260169689Skan
7261169689Skan	get_address_description (exp, &addr);
7262169689Skan	op0 = addr_for_mem_ref (&addr, true);
7263169689Skan	op0 = memory_address (mode, op0);
7264169689Skan	temp = gen_rtx_MEM (mode, op0);
7265169689Skan	set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7266169689Skan      }
7267169689Skan      return temp;
7268169689Skan
726918334Speter    case ARRAY_REF:
727018334Speter
727118334Speter      {
727218334Speter	tree array = TREE_OPERAND (exp, 0);
7273169689Skan	tree index = TREE_OPERAND (exp, 1);
727418334Speter
727518334Speter	/* Fold an expression like: "foo"[2].
727618334Speter	   This is not done in fold so it won't happen inside &.
727718334Speter	   Don't fold if this is for wide characters since it's too
727818334Speter	   difficult to do correctly and this is a very rare case.  */
727918334Speter
7280117395Skan	if (modifier != EXPAND_CONST_ADDRESS
7281117395Skan	    && modifier != EXPAND_INITIALIZER
7282169689Skan	    && modifier != EXPAND_MEMORY)
7283169689Skan	  {
7284169689Skan	    tree t = fold_read_from_constant_string (exp);
728518334Speter
7286169689Skan	    if (t)
7287169689Skan	      return expand_expr (t, target, tmode, modifier);
7288169689Skan	  }
7289169689Skan
729018334Speter	/* If this is a constant index into a constant array,
729118334Speter	   just get the value from the array.  Handle both the cases when
729218334Speter	   we have an explicit constructor and when our operand is a variable
729318334Speter	   that was declared const.  */
729418334Speter
7295117395Skan	if (modifier != EXPAND_CONST_ADDRESS
7296117395Skan	    && modifier != EXPAND_INITIALIZER
7297117395Skan	    && modifier != EXPAND_MEMORY
7298117395Skan	    && TREE_CODE (array) == CONSTRUCTOR
7299117395Skan	    && ! TREE_SIDE_EFFECTS (array)
7300169689Skan	    && TREE_CODE (index) == INTEGER_CST)
730118334Speter	  {
7302169689Skan	    unsigned HOST_WIDE_INT ix;
7303169689Skan	    tree field, value;
730418334Speter
7305169689Skan	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7306169689Skan				      field, value)
7307169689Skan	      if (tree_int_cst_equal (field, index))
7308169689Skan		{
7309169689Skan		  if (!TREE_SIDE_EFFECTS (value))
7310169689Skan		    return expand_expr (fold (value), target, tmode, modifier);
7311169689Skan		  break;
7312169689Skan		}
731318334Speter	  }
731490075Sobrien
731518334Speter	else if (optimize >= 1
731690075Sobrien		 && modifier != EXPAND_CONST_ADDRESS
731790075Sobrien		 && modifier != EXPAND_INITIALIZER
7318117395Skan		 && modifier != EXPAND_MEMORY
731918334Speter		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
732018334Speter		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7321132718Skan		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7322132718Skan		 && targetm.binds_local_p (array))
732318334Speter	  {
732450397Sobrien	    if (TREE_CODE (index) == INTEGER_CST)
732518334Speter	      {
732618334Speter		tree init = DECL_INITIAL (array);
732718334Speter
732818334Speter		if (TREE_CODE (init) == CONSTRUCTOR)
732918334Speter		  {
7330169689Skan		    unsigned HOST_WIDE_INT ix;
7331169689Skan		    tree field, value;
733218334Speter
7333169689Skan		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7334169689Skan					      field, value)
7335169689Skan		      if (tree_int_cst_equal (field, index))
7336169689Skan			{
7337169689Skan			  if (!TREE_SIDE_EFFECTS (value))
7338169689Skan			    return expand_expr (fold (value), target, tmode,
7339169689Skan						modifier);
7340169689Skan			  break;
7341169689Skan			}
734218334Speter		  }
7343169689Skan		else if(TREE_CODE (init) == STRING_CST)
734490075Sobrien		  {
7345169689Skan		    tree index1 = index;
7346169689Skan		    tree low_bound = array_ref_low_bound (exp);
7347169689Skan		    index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7348259563Spfg
7349169689Skan		    /* Optimize the special-case of a zero lower bound.
7350259563Spfg
7351169689Skan		       We convert the low_bound to sizetype to avoid some problems
7352169689Skan		       with constant folding.  (E.g. suppose the lower bound is 1,
7353169689Skan		       and its mode is QI.  Without the conversion,l (ARRAY
7354169689Skan		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7355169689Skan		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
7356259563Spfg
7357169689Skan		    if (! integer_zerop (low_bound))
7358169689Skan		      index1 = size_diffop (index1, fold_convert (sizetype,
7359169689Skan								  low_bound));
7360259563Spfg
7361169689Skan		    if (0 > compare_tree_int (index1,
7362169689Skan					      TREE_STRING_LENGTH (init)))
7363169689Skan		      {
7364169689Skan			tree type = TREE_TYPE (TREE_TYPE (init));
7365169689Skan			enum machine_mode mode = TYPE_MODE (type);
736690075Sobrien
7367169689Skan			if (GET_MODE_CLASS (mode) == MODE_INT
7368169689Skan			    && GET_MODE_SIZE (mode) == 1)
7369169689Skan			  return gen_int_mode (TREE_STRING_POINTER (init)
7370169689Skan					       [TREE_INT_CST_LOW (index1)],
7371169689Skan					       mode);
7372169689Skan		      }
737390075Sobrien		  }
737418334Speter	      }
737518334Speter	  }
737618334Speter      }
7377132718Skan      goto normal_inner_ref;
737818334Speter
737918334Speter    case COMPONENT_REF:
738018334Speter      /* If the operand is a CONSTRUCTOR, we can just extract the
7381132718Skan	 appropriate field if it is present.  */
7382132718Skan      if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
738318334Speter	{
7384169689Skan	  unsigned HOST_WIDE_INT idx;
7385169689Skan	  tree field, value;
738618334Speter
7387169689Skan	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7388169689Skan				    idx, field, value)
7389169689Skan	    if (field == TREE_OPERAND (exp, 1)
739050397Sobrien		/* We can normally use the value of the field in the
739150397Sobrien		   CONSTRUCTOR.  However, if this is a bitfield in
739250397Sobrien		   an integral mode that we can fit in a HOST_WIDE_INT,
739350397Sobrien		   we must mask only the number of bits in the bitfield,
739450397Sobrien		   since this is done implicitly by the constructor.  If
739550397Sobrien		   the bitfield does not meet either of those conditions,
739650397Sobrien		   we can't do this optimization.  */
7397169689Skan		&& (! DECL_BIT_FIELD (field)
7398169689Skan		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7399169689Skan			&& (GET_MODE_BITSIZE (DECL_MODE (field))
740050397Sobrien			    <= HOST_BITS_PER_WIDE_INT))))
740150397Sobrien	      {
7402169689Skan		if (DECL_BIT_FIELD (field)
7403117395Skan		    && modifier == EXPAND_STACK_PARM)
7404117395Skan		  target = 0;
7405169689Skan		op0 = expand_expr (value, target, tmode, modifier);
7406169689Skan		if (DECL_BIT_FIELD (field))
740750397Sobrien		  {
7408169689Skan		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7409169689Skan		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
741050397Sobrien
7411169689Skan		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
741250397Sobrien		      {
741350397Sobrien			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
741496263Sobrien			op0 = expand_and (imode, op0, op1, target);
741550397Sobrien		      }
741650397Sobrien		    else
741750397Sobrien		      {
741850397Sobrien			tree count
7419169689Skan			  = build_int_cst (NULL_TREE,
7420169689Skan					   GET_MODE_BITSIZE (imode) - bitsize);
742150397Sobrien
742250397Sobrien			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
742350397Sobrien					    target, 0);
742450397Sobrien			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
742550397Sobrien					    target, 0);
742650397Sobrien		      }
742750397Sobrien		  }
742850397Sobrien
742950397Sobrien		return op0;
743050397Sobrien	      }
743118334Speter	}
7432132718Skan      goto normal_inner_ref;
743318334Speter
7434132718Skan    case BIT_FIELD_REF:
7435132718Skan    case ARRAY_RANGE_REF:
7436132718Skan    normal_inner_ref:
743718334Speter      {
743818334Speter	enum machine_mode mode1;
743990075Sobrien	HOST_WIDE_INT bitsize, bitpos;
744018334Speter	tree offset;
744118334Speter	int volatilep = 0;
744218334Speter	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7443169689Skan					&mode1, &unsignedp, &volatilep, true);
744490075Sobrien	rtx orig_op0;
744518334Speter
744618334Speter	/* If we got back the original object, something is wrong.  Perhaps
744718334Speter	   we are evaluating an expression too early.  In any event, don't
744818334Speter	   infinitely recurse.  */
7449169689Skan	gcc_assert (tem != exp);
745018334Speter
745150397Sobrien	/* If TEM's type is a union of variable size, pass TARGET to the inner
745218334Speter	   computation, since it will need a temporary and TARGET is known
745318334Speter	   to have to do.  This occurs in unchecked conversion in Ada.  */
745418334Speter
745590075Sobrien	orig_op0 = op0
745690075Sobrien	  = expand_expr (tem,
745790075Sobrien			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
745890075Sobrien			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
745990075Sobrien			      != INTEGER_CST)
7460117395Skan			  && modifier != EXPAND_STACK_PARM
746190075Sobrien			  ? target : NULL_RTX),
746290075Sobrien			 VOIDmode,
746390075Sobrien			 (modifier == EXPAND_INITIALIZER
7464117395Skan			  || modifier == EXPAND_CONST_ADDRESS
7465117395Skan			  || modifier == EXPAND_STACK_PARM)
746690075Sobrien			 ? modifier : EXPAND_NORMAL);
746790075Sobrien
7468169689Skan	/* If this is a constant, put it into a register if it is a legitimate
7469169689Skan	   constant, OFFSET is 0, and we won't try to extract outside the
7470169689Skan	   register (in case we were passed a partially uninitialized object
7471169689Skan	   or a view_conversion to a larger size).  Force the constant to
7472169689Skan	   memory otherwise.  */
747318334Speter	if (CONSTANT_P (op0))
747418334Speter	  {
747518334Speter	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
747690075Sobrien	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7477169689Skan		&& offset == 0
7478169689Skan		&& bitpos + bitsize <= GET_MODE_BITSIZE (mode))
747918334Speter	      op0 = force_reg (mode, op0);
748018334Speter	    else
748118334Speter	      op0 = validize_mem (force_const_mem (mode, op0));
748218334Speter	  }
748318334Speter
7484169689Skan	/* Otherwise, if this object not in memory and we either have an
7485169689Skan	   offset, a BLKmode result, or a reference outside the object, put it
7486169689Skan	   there.  Such cases can occur in Ada if we have unchecked conversion
7487169689Skan	   of an expression from a scalar type to an array or record type or
7488169689Skan	   for an ARRAY_RANGE_REF whose type is BLKmode.  */
7489169689Skan	else if (!MEM_P (op0)
7490132718Skan		 && (offset != 0
7491169689Skan		     || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7492132718Skan		     || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7493132718Skan	  {
7494169689Skan	    tree nt = build_qualified_type (TREE_TYPE (tem),
7495169689Skan					    (TYPE_QUALS (TREE_TYPE (tem))
7496169689Skan					     | TYPE_QUAL_CONST));
7497169689Skan	    rtx memloc = assign_temp (nt, 1, 1, 1);
7498132718Skan
7499169689Skan	    emit_move_insn (memloc, op0);
7500169689Skan	    op0 = memloc;
7501132718Skan	  }
7502132718Skan
750318334Speter	if (offset != 0)
750418334Speter	  {
7505117395Skan	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7506117395Skan					  EXPAND_SUM);
750718334Speter
7508169689Skan	    gcc_assert (MEM_P (op0));
750950397Sobrien
751050397Sobrien#ifdef POINTERS_EXTEND_UNSIGNED
751190075Sobrien	    if (GET_MODE (offset_rtx) != Pmode)
7512117395Skan	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
751396263Sobrien#else
751496263Sobrien	    if (GET_MODE (offset_rtx) != ptr_mode)
751596263Sobrien	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
751650397Sobrien#endif
751750397Sobrien
7518132718Skan	    if (GET_MODE (op0) == BLKmode
7519132718Skan		/* A constant address in OP0 can have VOIDmode, we must
7520132718Skan		   not try to call force_reg in that case.  */
752152284Sobrien		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
752290075Sobrien		&& bitsize != 0
752390075Sobrien		&& (bitpos % bitsize) == 0
752450397Sobrien		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
752590075Sobrien		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
752650397Sobrien	      {
752796263Sobrien		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
752850397Sobrien		bitpos = 0;
752950397Sobrien	      }
753050397Sobrien
753190075Sobrien	    op0 = offset_address (op0, offset_rtx,
753290075Sobrien				  highest_pow2_factor (offset));
753318334Speter	  }
753418334Speter
753596263Sobrien	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
753696263Sobrien	   record its alignment as BIGGEST_ALIGNMENT.  */
7537169689Skan	if (MEM_P (op0) && bitpos == 0 && offset != 0
753896263Sobrien	    && is_aligning_offset (offset, tem))
753996263Sobrien	  set_mem_align (op0, BIGGEST_ALIGNMENT);
754096263Sobrien
754118334Speter	/* Don't forget about volatility even if this is a bitfield.  */
7542169689Skan	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
754318334Speter	  {
754490075Sobrien	    if (op0 == orig_op0)
754590075Sobrien	      op0 = copy_rtx (op0);
754690075Sobrien
754718334Speter	    MEM_VOLATILE_P (op0) = 1;
754818334Speter	  }
754918334Speter
755096263Sobrien	/* The following code doesn't handle CONCAT.
755196263Sobrien	   Assume only bitpos == 0 can be used for CONCAT, due to
755296263Sobrien	   one element arrays having the same mode as its element.  */
755396263Sobrien	if (GET_CODE (op0) == CONCAT)
755496263Sobrien	  {
7555169689Skan	    gcc_assert (bitpos == 0
7556169689Skan			&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
755796263Sobrien	    return op0;
755896263Sobrien	  }
755996263Sobrien
756018334Speter	/* In cases where an aligned union has an unaligned object
756118334Speter	   as a field, we might be extracting a BLKmode value from
756218334Speter	   an integer-mode (e.g., SImode) object.  Handle this case
756318334Speter	   by doing the extract into an object as wide as the field
756418334Speter	   (which we know to be the width of a basic mode), then
756590075Sobrien	   storing into memory, and changing the mode to BLKmode.  */
756618334Speter	if (mode1 == VOIDmode
7567169689Skan	    || REG_P (op0) || GET_CODE (op0) == SUBREG
756890075Sobrien	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
756990075Sobrien		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
757090075Sobrien		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
757190075Sobrien		&& modifier != EXPAND_CONST_ADDRESS
757290075Sobrien		&& modifier != EXPAND_INITIALIZER)
757390075Sobrien	    /* If the field isn't aligned enough to fetch as a memref,
757490075Sobrien	       fetch it as a bit field.  */
757590075Sobrien	    || (mode1 != BLKmode
7576132718Skan		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7577132718Skan		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7578169689Skan		      || (MEM_P (op0)
7579132718Skan			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7580132718Skan			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7581132718Skan		     && ((modifier == EXPAND_CONST_ADDRESS
7582132718Skan			  || modifier == EXPAND_INITIALIZER)
7583132718Skan			 ? STRICT_ALIGNMENT
7584132718Skan			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7585132718Skan		    || (bitpos % BITS_PER_UNIT != 0)))
758690075Sobrien	    /* If the type and the field are a constant size and the
758790075Sobrien	       size of the type isn't the same size as the bitfield,
758890075Sobrien	       we must use bitfield operations.  */
758990075Sobrien	    || (bitsize >= 0
7590169689Skan		&& TYPE_SIZE (TREE_TYPE (exp))
7591169689Skan		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
759290075Sobrien		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
759390075Sobrien					  bitsize)))
759418334Speter	  {
759518334Speter	    enum machine_mode ext_mode = mode;
759618334Speter
759790075Sobrien	    if (ext_mode == BLKmode
7598169689Skan		&& ! (target != 0 && MEM_P (op0)
7599169689Skan		      && MEM_P (target)
760090075Sobrien		      && bitpos % BITS_PER_UNIT == 0))
760118334Speter	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
760218334Speter
760318334Speter	    if (ext_mode == BLKmode)
760450397Sobrien	      {
7605132718Skan		if (target == 0)
7606132718Skan		  target = assign_temp (type, 0, 1, 1);
7607132718Skan
7608132718Skan		if (bitsize == 0)
7609132718Skan		  return target;
7610132718Skan
761150397Sobrien		/* In this case, BITPOS must start at a byte boundary and
761250397Sobrien		   TARGET, if specified, must be a MEM.  */
7613169689Skan		gcc_assert (MEM_P (op0)
7614169689Skan			    && (!target || MEM_P (target))
7615169689Skan			    && !(bitpos % BITS_PER_UNIT));
761618334Speter
7617132718Skan		emit_block_move (target,
7618132718Skan				 adjust_address (op0, VOIDmode,
7619132718Skan						 bitpos / BITS_PER_UNIT),
762050397Sobrien				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7621117395Skan					  / BITS_PER_UNIT),
7622117395Skan				 (modifier == EXPAND_STACK_PARM
7623117395Skan				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
762490075Sobrien
762550397Sobrien		return target;
762650397Sobrien	      }
762750397Sobrien
762850397Sobrien	    op0 = validize_mem (op0);
762950397Sobrien
7630169689Skan	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
763190075Sobrien	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
763250397Sobrien
7633117395Skan	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7634117395Skan				     (modifier == EXPAND_STACK_PARM
7635117395Skan				      ? NULL_RTX : target),
7636169689Skan				     ext_mode, ext_mode);
763750397Sobrien
763850397Sobrien	    /* If the result is a record type and BITSIZE is narrower than
763950397Sobrien	       the mode of OP0, an integral mode, and this is a big endian
764050397Sobrien	       machine, we must put the field into the high-order bits.  */
764150397Sobrien	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
764250397Sobrien		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
764390075Sobrien		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
764450397Sobrien	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
764550397Sobrien				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
764650397Sobrien					    - bitsize),
764750397Sobrien				  op0, 1);
764850397Sobrien
7649169689Skan	    /* If the result type is BLKmode, store the data into a temporary
7650169689Skan	       of the appropriate type, but with the mode corresponding to the
7651169689Skan	       mode for the data we have (op0's mode).  It's tempting to make
7652169689Skan	       this a constant type, since we know it's only being stored once,
7653169689Skan	       but that can cause problems if we are taking the address of this
7654169689Skan	       COMPONENT_REF because the MEM of any reference via that address
7655169689Skan	       will have flags corresponding to the type, which will not
7656169689Skan	       necessarily be constant.  */
765718334Speter	    if (mode == BLKmode)
765818334Speter	      {
7659169689Skan		rtx new
7660169689Skan		  = assign_stack_temp_for_type
7661169689Skan		    (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
766218334Speter
766318334Speter		emit_move_insn (new, op0);
766418334Speter		op0 = copy_rtx (new);
766518334Speter		PUT_MODE (op0, BLKmode);
766690075Sobrien		set_mem_attributes (op0, exp, 1);
766718334Speter	      }
766818334Speter
766918334Speter	    return op0;
767018334Speter	  }
767118334Speter
767250397Sobrien	/* If the result is BLKmode, use that to access the object
767350397Sobrien	   now as well.  */
767450397Sobrien	if (mode == BLKmode)
767550397Sobrien	  mode1 = BLKmode;
767650397Sobrien
767718334Speter	/* Get a reference to just this component.  */
767818334Speter	if (modifier == EXPAND_CONST_ADDRESS
767918334Speter	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
768090075Sobrien	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
768118334Speter	else
768290075Sobrien	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
768350397Sobrien
768490075Sobrien	if (op0 == orig_op0)
768590075Sobrien	  op0 = copy_rtx (op0);
768650397Sobrien
768790075Sobrien	set_mem_attributes (op0, exp, 0);
7688169689Skan	if (REG_P (XEXP (op0, 0)))
768990075Sobrien	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
769050397Sobrien
769118334Speter	MEM_VOLATILE_P (op0) |= volatilep;
769250397Sobrien	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
769350397Sobrien	    || modifier == EXPAND_CONST_ADDRESS
769450397Sobrien	    || modifier == EXPAND_INITIALIZER)
769518334Speter	  return op0;
769650397Sobrien	else if (target == 0)
769718334Speter	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
769850397Sobrien
769918334Speter	convert_move (target, op0, unsignedp);
770018334Speter	return target;
770118334Speter      }
770218334Speter
7703169689Skan    case OBJ_TYPE_REF:
7704169689Skan      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
770590075Sobrien
770618334Speter    case CALL_EXPR:
770718334Speter      /* Check for a built-in function.  */
770818334Speter      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
770918334Speter	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
771018334Speter	      == FUNCTION_DECL)
771118334Speter	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7712117395Skan	{
771390075Sobrien	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
771490075Sobrien	      == BUILT_IN_FRONTEND)
7715169689Skan	    return lang_hooks.expand_expr (exp, original_target,
7716169689Skan					   tmode, modifier,
7717169689Skan					   alt_rtl);
771890075Sobrien	  else
771990075Sobrien	    return expand_builtin (exp, target, subtarget, tmode, ignore);
772090075Sobrien	}
772118334Speter
772218334Speter      return expand_call (exp, target, ignore);
772318334Speter
772418334Speter    case NON_LVALUE_EXPR:
772518334Speter    case NOP_EXPR:
772618334Speter    case CONVERT_EXPR:
772790075Sobrien      if (TREE_OPERAND (exp, 0) == error_mark_node)
772890075Sobrien	return const0_rtx;
772990075Sobrien
773018334Speter      if (TREE_CODE (type) == UNION_TYPE)
773118334Speter	{
773218334Speter	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
773390075Sobrien
773490075Sobrien	  /* If both input and output are BLKmode, this conversion isn't doing
773590075Sobrien	     anything except possibly changing memory attribute.  */
773690075Sobrien	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
773718334Speter	    {
773890075Sobrien	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
773990075Sobrien					modifier);
774090075Sobrien
774190075Sobrien	      result = copy_rtx (result);
774290075Sobrien	      set_mem_attributes (result, exp, 0);
774390075Sobrien	      return result;
774418334Speter	    }
774518334Speter
774690075Sobrien	  if (target == 0)
7747132718Skan	    {
7748132718Skan	      if (TYPE_MODE (type) != BLKmode)
7749132718Skan		target = gen_reg_rtx (TYPE_MODE (type));
7750132718Skan	      else
7751132718Skan		target = assign_temp (type, 0, 1, 1);
7752132718Skan	    }
775390075Sobrien
7754169689Skan	  if (MEM_P (target))
775518334Speter	    /* Store data into beginning of memory target.  */
775618334Speter	    store_expr (TREE_OPERAND (exp, 0),
7757117395Skan			adjust_address (target, TYPE_MODE (valtype), 0),
7758169689Skan			modifier == EXPAND_STACK_PARM);
775918334Speter
776018334Speter	  else
7761169689Skan	    {
7762169689Skan	      gcc_assert (REG_P (target));
7763259563Spfg
7764169689Skan	      /* Store this field into a union of the proper type.  */
7765169689Skan	      store_field (target,
7766169689Skan			   MIN ((int_size_in_bytes (TREE_TYPE
7767169689Skan						    (TREE_OPERAND (exp, 0)))
7768169689Skan				 * BITS_PER_UNIT),
7769169689Skan				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7770169689Skan			   0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7771169689Skan			   type, 0);
7772169689Skan	    }
777318334Speter
777418334Speter	  /* Return the entire union.  */
777518334Speter	  return target;
777618334Speter	}
777718334Speter
777818334Speter      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
777918334Speter	{
778018334Speter	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
778190075Sobrien			     modifier);
778218334Speter
778318334Speter	  /* If the signedness of the conversion differs and OP0 is
778418334Speter	     a promoted SUBREG, clear that indication since we now
778518334Speter	     have to do the proper extension.  */
7786169689Skan	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
778718334Speter	      && GET_CODE (op0) == SUBREG)
778818334Speter	    SUBREG_PROMOTED_VAR_P (op0) = 0;
778918334Speter
7790169689Skan	  return REDUCE_BIT_FIELD (op0);
779118334Speter	}
779218334Speter
7793169689Skan      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7794169689Skan			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
779518334Speter      if (GET_MODE (op0) == mode)
7796169689Skan	;
779718334Speter
779818334Speter      /* If OP0 is a constant, just convert it into the proper mode.  */
7799169689Skan      else if (CONSTANT_P (op0))
780096263Sobrien	{
780196263Sobrien	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
780296263Sobrien	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
780318334Speter
7804117395Skan	  if (modifier == EXPAND_INITIALIZER)
7805169689Skan	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
7806169689Skan				       subreg_lowpart_offset (mode,
7807169689Skan							      inner_mode));
780896263Sobrien	  else
7809169689Skan	    op0=  convert_modes (mode, inner_mode, op0,
7810169689Skan				 TYPE_UNSIGNED (inner_type));
781196263Sobrien	}
781296263Sobrien
7813169689Skan      else if (modifier == EXPAND_INITIALIZER)
7814169689Skan	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
781518334Speter
7816169689Skan      else if (target == 0)
7817169689Skan	op0 = convert_to_mode (mode, op0,
7818169689Skan			       TYPE_UNSIGNED (TREE_TYPE
7819169689Skan					      (TREE_OPERAND (exp, 0))));
782018334Speter      else
7821169689Skan	{
7822169689Skan	  convert_move (target, op0,
7823169689Skan			TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7824169689Skan	  op0 = target;
7825169689Skan	}
782618334Speter
7827169689Skan      return REDUCE_BIT_FIELD (op0);
7828169689Skan
782990075Sobrien    case VIEW_CONVERT_EXPR:
783090075Sobrien      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
783190075Sobrien
7832169689Skan      /* If the input and output modes are both the same, we are done.  */
783390075Sobrien      if (TYPE_MODE (type) == GET_MODE (op0))
783490075Sobrien	;
7835169689Skan      /* If neither mode is BLKmode, and both modes are the same size
7836169689Skan	 then we can use gen_lowpart.  */
783790075Sobrien      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7838169689Skan	       && GET_MODE_SIZE (TYPE_MODE (type))
7839169689Skan		   == GET_MODE_SIZE (GET_MODE (op0)))
784090075Sobrien	{
7841169689Skan	  if (GET_CODE (op0) == SUBREG)
7842169689Skan	    op0 = force_reg (GET_MODE (op0), op0);
7843169689Skan	  op0 = gen_lowpart (TYPE_MODE (type), op0);
7844169689Skan	}
7845169689Skan      /* If both modes are integral, then we can convert from one to the
7846169689Skan	 other.  */
7847169689Skan      else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7848169689Skan	       && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7849259563Spfg	op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7850169689Skan			     TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7851259563Spfg      /* As a last resort, spill op0 to memory, and reload it in a
7852169689Skan	 different mode.  */
7853169689Skan      else if (!MEM_P (op0))
7854169689Skan	{
785590075Sobrien	  /* If the operand is not a MEM, force it into memory.  Since we
7856169689Skan	     are going to be changing the mode of the MEM, don't call
785790075Sobrien	     force_const_mem for constants because we don't allow pool
785890075Sobrien	     constants to change mode.  */
785990075Sobrien	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
786090075Sobrien
7861169689Skan	  gcc_assert (!TREE_ADDRESSABLE (exp));
786290075Sobrien
786390075Sobrien	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
786490075Sobrien	    target
786590075Sobrien	      = assign_stack_temp_for_type
786690075Sobrien		(TYPE_MODE (inner_type),
786790075Sobrien		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
786890075Sobrien
786990075Sobrien	  emit_move_insn (target, op0);
787090075Sobrien	  op0 = target;
787190075Sobrien	}
787290075Sobrien
787390075Sobrien      /* At this point, OP0 is in the correct mode.  If the output type is such
787490075Sobrien	 that the operand is known to be aligned, indicate that it is.
787590075Sobrien	 Otherwise, we need only be concerned about alignment for non-BLKmode
787690075Sobrien	 results.  */
7877169689Skan      if (MEM_P (op0))
787890075Sobrien	{
787990075Sobrien	  op0 = copy_rtx (op0);
788090075Sobrien
788190075Sobrien	  if (TYPE_ALIGN_OK (type))
788290075Sobrien	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
788390075Sobrien	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
788490075Sobrien		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
788590075Sobrien	    {
788690075Sobrien	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
788790075Sobrien	      HOST_WIDE_INT temp_size
788890075Sobrien		= MAX (int_size_in_bytes (inner_type),
788990075Sobrien		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
789090075Sobrien	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
789190075Sobrien						    temp_size, 0, type);
789290075Sobrien	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
789390075Sobrien
7894169689Skan	      gcc_assert (!TREE_ADDRESSABLE (exp));
789590075Sobrien
789690075Sobrien	      if (GET_MODE (op0) == BLKmode)
789790075Sobrien		emit_block_move (new_with_op0_mode, op0,
7898117395Skan				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7899117395Skan				 (modifier == EXPAND_STACK_PARM
7900117395Skan				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
790190075Sobrien	      else
790290075Sobrien		emit_move_insn (new_with_op0_mode, op0);
790390075Sobrien
790490075Sobrien	      op0 = new;
790590075Sobrien	    }
7906117395Skan
790790075Sobrien	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
790890075Sobrien	}
790990075Sobrien
791090075Sobrien      return op0;
791190075Sobrien
791218334Speter    case PLUS_EXPR:
7913169689Skan      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
791418334Speter	 something else, make sure we add the register to the constant and
791518334Speter	 then to the other thing.  This case can occur during strength
791618334Speter	 reduction and doing it this way will produce better code if the
791718334Speter	 frame pointer or argument pointer is eliminated.
791818334Speter
791918334Speter	 fold-const.c will ensure that the constant is always in the inner
792018334Speter	 PLUS_EXPR, so the only case we need to do anything about is if
792118334Speter	 sp, ap, or fp is our second argument, in which case we must swap
792218334Speter	 the innermost first argument and our second argument.  */
792318334Speter
792418334Speter      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
792518334Speter	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7926169689Skan	  && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7927169689Skan	  && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7928169689Skan	      || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7929169689Skan	      || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
793018334Speter	{
793118334Speter	  tree t = TREE_OPERAND (exp, 1);
793218334Speter
793318334Speter	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
793418334Speter	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
793518334Speter	}
793618334Speter
793718334Speter      /* If the result is to be ptr_mode and we are adding an integer to
793818334Speter	 something, we might be forming a constant.  So try to use
793918334Speter	 plus_constant.  If it produces a sum and we can't accept it,
794018334Speter	 use force_operand.  This allows P = &ARR[const] to generate
794118334Speter	 efficient code on machines where a SYMBOL_REF is not a valid
794218334Speter	 address.
794318334Speter
794418334Speter	 If this is an EXPAND_SUM call, always return the sum.  */
794518334Speter      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7946117395Skan	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
794718334Speter	{
7948117395Skan	  if (modifier == EXPAND_STACK_PARM)
7949117395Skan	    target = 0;
795018334Speter	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
795118334Speter	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
795218334Speter	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
795318334Speter	    {
795490075Sobrien	      rtx constant_part;
795590075Sobrien
795618334Speter	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
795718334Speter				 EXPAND_SUM);
795890075Sobrien	      /* Use immed_double_const to ensure that the constant is
795990075Sobrien		 truncated according to the mode of OP1, then sign extended
796090075Sobrien		 to a HOST_WIDE_INT.  Using the constant directly can result
796190075Sobrien		 in non-canonical RTL in a 64x32 cross compile.  */
796290075Sobrien	      constant_part
796390075Sobrien		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
796490075Sobrien				      (HOST_WIDE_INT) 0,
796590075Sobrien				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
796690075Sobrien	      op1 = plus_constant (op1, INTVAL (constant_part));
796718334Speter	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
796818334Speter		op1 = force_operand (op1, target);
7969169689Skan	      return REDUCE_BIT_FIELD (op1);
797018334Speter	    }
797118334Speter
797218334Speter	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7973169689Skan		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
797418334Speter		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
797518334Speter	    {
797690075Sobrien	      rtx constant_part;
797790075Sobrien
797818334Speter	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
797996263Sobrien				 (modifier == EXPAND_INITIALIZER
798096263Sobrien				 ? EXPAND_INITIALIZER : EXPAND_SUM));
798118334Speter	      if (! CONSTANT_P (op0))
798218334Speter		{
798318334Speter		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
798418334Speter				     VOIDmode, modifier);
7985132718Skan		  /* Return a PLUS if modifier says it's OK.  */
7986132718Skan		  if (modifier == EXPAND_SUM
7987132718Skan		      || modifier == EXPAND_INITIALIZER)
7988132718Skan		    return simplify_gen_binary (PLUS, mode, op0, op1);
7989132718Skan		  goto binop2;
799018334Speter		}
799190075Sobrien	      /* Use immed_double_const to ensure that the constant is
799290075Sobrien		 truncated according to the mode of OP1, then sign extended
799390075Sobrien		 to a HOST_WIDE_INT.  Using the constant directly can result
799490075Sobrien		 in non-canonical RTL in a 64x32 cross compile.  */
799590075Sobrien	      constant_part
799690075Sobrien		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
799790075Sobrien				      (HOST_WIDE_INT) 0,
799890075Sobrien				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
799990075Sobrien	      op0 = plus_constant (op0, INTVAL (constant_part));
800018334Speter	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
800118334Speter		op0 = force_operand (op0, target);
8002169689Skan	      return REDUCE_BIT_FIELD (op0);
800318334Speter	    }
800418334Speter	}
800518334Speter
800618334Speter      /* No sense saving up arithmetic to be done
800718334Speter	 if it's all in the wrong mode to form part of an address.
800818334Speter	 And force_operand won't know whether to sign-extend or
800918334Speter	 zero-extend.  */
801018334Speter      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
801118334Speter	  || mode != ptr_mode)
8012104752Skan	{
8013132718Skan	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8014132718Skan			   subtarget, &op0, &op1, 0);
8015107590Sobrien	  if (op0 == const0_rtx)
8016107590Sobrien	    return op1;
8017107590Sobrien	  if (op1 == const0_rtx)
8018107590Sobrien	    return op0;
8019104752Skan	  goto binop2;
8020104752Skan	}
802118334Speter
8022132718Skan      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8023132718Skan		       subtarget, &op0, &op1, modifier);
8024169689Skan      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
802518334Speter
802618334Speter    case MINUS_EXPR:
802718334Speter      /* For initializers, we are allowed to return a MINUS of two
802818334Speter	 symbolic constants.  Here we handle all cases when both operands
802918334Speter	 are constant.  */
803018334Speter      /* Handle difference of two symbolic constants,
803118334Speter	 for the sake of an initializer.  */
803218334Speter      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
803318334Speter	  && really_constant_p (TREE_OPERAND (exp, 0))
803418334Speter	  && really_constant_p (TREE_OPERAND (exp, 1)))
803518334Speter	{
8036132718Skan	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8037132718Skan			   NULL_RTX, &op0, &op1, modifier);
803818334Speter
803918334Speter	  /* If the last operand is a CONST_INT, use plus_constant of
804018334Speter	     the negated constant.  Else make the MINUS.  */
804118334Speter	  if (GET_CODE (op1) == CONST_INT)
8042169689Skan	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
804318334Speter	  else
8044169689Skan	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
804518334Speter	}
804618334Speter
8047117395Skan      /* No sense saving up arithmetic to be done
8048117395Skan	 if it's all in the wrong mode to form part of an address.
8049117395Skan	 And force_operand won't know whether to sign-extend or
8050117395Skan	 zero-extend.  */
8051117395Skan      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8052117395Skan	  || mode != ptr_mode)
8053117395Skan	goto binop;
8054117395Skan
8055132718Skan      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8056132718Skan		       subtarget, &op0, &op1, modifier);
8057117395Skan
8058117395Skan      /* Convert A - const to A + (-const).  */
8059117395Skan      if (GET_CODE (op1) == CONST_INT)
8060117395Skan	{
8061117395Skan	  op1 = negate_rtx (mode, op1);
8062169689Skan	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8063117395Skan	}
8064117395Skan
8065117395Skan      goto binop2;
8066117395Skan
806718334Speter    case MULT_EXPR:
806818334Speter      /* If first operand is constant, swap them.
806918334Speter	 Thus the following special case checks need only
807018334Speter	 check the second operand.  */
807118334Speter      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
807218334Speter	{
807390075Sobrien	  tree t1 = TREE_OPERAND (exp, 0);
807418334Speter	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
807518334Speter	  TREE_OPERAND (exp, 1) = t1;
807618334Speter	}
807718334Speter
807818334Speter      /* Attempt to return something suitable for generating an
807918334Speter	 indexed address, for machines that support that.  */
808018334Speter
808118334Speter      if (modifier == EXPAND_SUM && mode == ptr_mode
808296263Sobrien	  && host_integerp (TREE_OPERAND (exp, 1), 0))
808318334Speter	{
8084117395Skan	  tree exp1 = TREE_OPERAND (exp, 1);
8085117395Skan
808650397Sobrien	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
808750397Sobrien			     EXPAND_SUM);
808818334Speter
8089169689Skan	  if (!REG_P (op0))
809018334Speter	    op0 = force_operand (op0, NULL_RTX);
8091169689Skan	  if (!REG_P (op0))
809218334Speter	    op0 = copy_to_mode_reg (mode, op0);
809318334Speter
8094169689Skan	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8095117395Skan			       gen_int_mode (tree_low_cst (exp1, 0),
8096169689Skan					     TYPE_MODE (TREE_TYPE (exp1)))));
809718334Speter	}
809818334Speter
8099117395Skan      if (modifier == EXPAND_STACK_PARM)
8100117395Skan	target = 0;
8101117395Skan
810218334Speter      /* Check for multiplying things that have been extended
810318334Speter	 from a narrower type.  If this machine supports multiplying
810418334Speter	 in that narrower type with a result in the desired type,
810518334Speter	 do it that way, and avoid the explicit type-conversion.  */
8106169689Skan
8107169689Skan      subexp0 = TREE_OPERAND (exp, 0);
8108169689Skan      subexp1 = TREE_OPERAND (exp, 1);
8109169689Skan      /* First, check if we have a multiplication of one signed and one
8110169689Skan	 unsigned operand.  */
8111169689Skan      if (TREE_CODE (subexp0) == NOP_EXPR
8112169689Skan	  && TREE_CODE (subexp1) == NOP_EXPR
811318334Speter	  && TREE_CODE (type) == INTEGER_TYPE
8114169689Skan	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8115169689Skan	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8116169689Skan	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8117169689Skan	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8118169689Skan	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8119169689Skan	      != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8120169689Skan	{
8121169689Skan	  enum machine_mode innermode
8122169689Skan	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8123169689Skan	  this_optab = usmul_widen_optab;
8124169689Skan	  if (mode == GET_MODE_WIDER_MODE (innermode))
8125169689Skan	    {
8126169689Skan	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8127169689Skan		{
8128169689Skan		  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8129169689Skan		    expand_operands (TREE_OPERAND (subexp0, 0),
8130169689Skan				     TREE_OPERAND (subexp1, 0),
8131169689Skan				     NULL_RTX, &op0, &op1, 0);
8132169689Skan		  else
8133169689Skan		    expand_operands (TREE_OPERAND (subexp0, 0),
8134169689Skan				     TREE_OPERAND (subexp1, 0),
8135169689Skan				     NULL_RTX, &op1, &op0, 0);
8136169689Skan
8137169689Skan		  goto binop3;
8138169689Skan		}
8139169689Skan	    }
8140169689Skan	}
8141169689Skan      /* Check for a multiplication with matching signedness.  */
8142169689Skan      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8143169689Skan	  && TREE_CODE (type) == INTEGER_TYPE
814418334Speter	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
814518334Speter	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
814618334Speter	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
814718334Speter	       && int_fits_type_p (TREE_OPERAND (exp, 1),
814818334Speter				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
814918334Speter	       /* Don't use a widening multiply if a shift will do.  */
815018334Speter	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
815118334Speter		    > HOST_BITS_PER_WIDE_INT)
815218334Speter		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
815318334Speter	      ||
815418334Speter	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8155169689Skan	       && (TYPE_PRECISION (TREE_TYPE
8156169689Skan				   (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8157169689Skan		   == TYPE_PRECISION (TREE_TYPE
8158169689Skan				      (TREE_OPERAND
8159169689Skan				       (TREE_OPERAND (exp, 0), 0))))
816018334Speter	       /* If both operands are extended, they must either both
816118334Speter		  be zero-extended or both be sign-extended.  */
8162169689Skan	       && (TYPE_UNSIGNED (TREE_TYPE
8163169689Skan				  (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8164169689Skan		   == TYPE_UNSIGNED (TREE_TYPE
8165169689Skan				     (TREE_OPERAND
8166169689Skan				      (TREE_OPERAND (exp, 0), 0)))))))
816718334Speter	{
8168169689Skan	  tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8169169689Skan	  enum machine_mode innermode = TYPE_MODE (op0type);
8170169689Skan	  bool zextend_p = TYPE_UNSIGNED (op0type);
8171169689Skan	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8172169689Skan	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8173169689Skan
8174169689Skan	  if (mode == GET_MODE_2XWIDER_MODE (innermode))
817518334Speter	    {
817650397Sobrien	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
817750397Sobrien		{
817850397Sobrien		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8179132718Skan		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8180132718Skan				     TREE_OPERAND (exp, 1),
8181169689Skan				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
818250397Sobrien		  else
8183132718Skan		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8184132718Skan				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8185169689Skan				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8186169689Skan		  goto binop3;
818750397Sobrien		}
818850397Sobrien	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
818950397Sobrien		       && innermode == word_mode)
819050397Sobrien		{
8191169689Skan		  rtx htem, hipart;
8192169689Skan		  op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
819350397Sobrien		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
819490075Sobrien		    op1 = convert_modes (innermode, mode,
8195169689Skan					 expand_normal (TREE_OPERAND (exp, 1)),
819690075Sobrien					 unsignedp);
819750397Sobrien		  else
8198169689Skan		    op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
819950397Sobrien		  temp = expand_binop (mode, other_optab, op0, op1, target,
820050397Sobrien				       unsignedp, OPTAB_LIB_WIDEN);
8201169689Skan		  hipart = gen_highpart (innermode, temp);
8202169689Skan		  htem = expand_mult_highpart_adjust (innermode, hipart,
8203169689Skan						      op0, op1, hipart,
8204169689Skan						      zextend_p);
8205169689Skan		  if (htem != hipart)
8206169689Skan		    emit_move_insn (hipart, htem);
8207169689Skan		  return REDUCE_BIT_FIELD (temp);
820850397Sobrien		}
820918334Speter	    }
821018334Speter	}
8211132718Skan      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8212132718Skan		       subtarget, &op0, &op1, 0);
8213169689Skan      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
821418334Speter
821518334Speter    case TRUNC_DIV_EXPR:
821618334Speter    case FLOOR_DIV_EXPR:
821718334Speter    case CEIL_DIV_EXPR:
821818334Speter    case ROUND_DIV_EXPR:
821918334Speter    case EXACT_DIV_EXPR:
8220117395Skan      if (modifier == EXPAND_STACK_PARM)
8221117395Skan	target = 0;
822218334Speter      /* Possible optimization: compute the dividend with EXPAND_SUM
822318334Speter	 then if the divisor is constant can optimize the case
822418334Speter	 where some terms of the dividend have coeffs divisible by it.  */
8225132718Skan      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8226132718Skan		       subtarget, &op0, &op1, 0);
822718334Speter      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
822818334Speter
822918334Speter    case RDIV_EXPR:
823018334Speter      goto binop;
823118334Speter
823218334Speter    case TRUNC_MOD_EXPR:
823318334Speter    case FLOOR_MOD_EXPR:
823418334Speter    case CEIL_MOD_EXPR:
823518334Speter    case ROUND_MOD_EXPR:
8236117395Skan      if (modifier == EXPAND_STACK_PARM)
8237117395Skan	target = 0;
8238132718Skan      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8239132718Skan		       subtarget, &op0, &op1, 0);
824018334Speter      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
824118334Speter
824218334Speter    case FIX_ROUND_EXPR:
824318334Speter    case FIX_FLOOR_EXPR:
824418334Speter    case FIX_CEIL_EXPR:
8245169689Skan      gcc_unreachable ();			/* Not used for C.  */
824618334Speter
824718334Speter    case FIX_TRUNC_EXPR:
8248169689Skan      op0 = expand_normal (TREE_OPERAND (exp, 0));
8249117395Skan      if (target == 0 || modifier == EXPAND_STACK_PARM)
825018334Speter	target = gen_reg_rtx (mode);
825118334Speter      expand_fix (target, op0, unsignedp);
825218334Speter      return target;
825318334Speter
825418334Speter    case FLOAT_EXPR:
8255169689Skan      op0 = expand_normal (TREE_OPERAND (exp, 0));
8256117395Skan      if (target == 0 || modifier == EXPAND_STACK_PARM)
825718334Speter	target = gen_reg_rtx (mode);
825818334Speter      /* expand_float can't figure out what to do if FROM has VOIDmode.
825918334Speter	 So give it the correct mode.  With -O, cse will optimize this.  */
826018334Speter      if (GET_MODE (op0) == VOIDmode)
826118334Speter	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
826218334Speter				op0);
826318334Speter      expand_float (target, op0,
8264169689Skan		    TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
826518334Speter      return target;
826618334Speter
826718334Speter    case NEGATE_EXPR:
826818334Speter      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8269117395Skan      if (modifier == EXPAND_STACK_PARM)
8270117395Skan	target = 0;
827190075Sobrien      temp = expand_unop (mode,
8272169689Skan      			  optab_for_tree_code (NEGATE_EXPR, type),
8273169689Skan			  op0, target, 0);
8274169689Skan      gcc_assert (temp);
8275169689Skan      return REDUCE_BIT_FIELD (temp);
827618334Speter
827718334Speter    case ABS_EXPR:
827818334Speter      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8279117395Skan      if (modifier == EXPAND_STACK_PARM)
8280117395Skan	target = 0;
828118334Speter
8282132718Skan      /* ABS_EXPR is not valid for complex arguments.  */
8283169689Skan      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8284169689Skan		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
828518334Speter
828618334Speter      /* Unsigned abs is simply the operand.  Testing here means we don't
828718334Speter	 risk generating incorrect code below.  */
8288169689Skan      if (TYPE_UNSIGNED (type))
828918334Speter	return op0;
829018334Speter
829190075Sobrien      return expand_abs (mode, op0, target, unsignedp,
829250397Sobrien			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
829318334Speter
829418334Speter    case MAX_EXPR:
829518334Speter    case MIN_EXPR:
829618334Speter      target = original_target;
8297117395Skan      if (target == 0
8298117395Skan	  || modifier == EXPAND_STACK_PARM
8299169689Skan	  || (MEM_P (target) && MEM_VOLATILE_P (target))
830018334Speter	  || GET_MODE (target) != mode
8301169689Skan	  || (REG_P (target)
830218334Speter	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
830318334Speter	target = gen_reg_rtx (mode);
8304132718Skan      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8305132718Skan		       target, &op0, &op1, 0);
830618334Speter
830718334Speter      /* First try to do it with a special MIN or MAX instruction.
830818334Speter	 If that does not win, use a conditional jump to select the proper
830918334Speter	 value.  */
8310169689Skan      this_optab = optab_for_tree_code (code, type);
831118334Speter      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
831218334Speter			   OPTAB_WIDEN);
831318334Speter      if (temp != 0)
831418334Speter	return temp;
831518334Speter
831618334Speter      /* At this point, a MEM target is no longer useful; we will get better
831718334Speter	 code without it.  */
831890075Sobrien
8319146895Skan      if (! REG_P (target))
832018334Speter	target = gen_reg_rtx (mode);
832118334Speter
8322132718Skan      /* If op1 was placed in target, swap op0 and op1.  */
8323132718Skan      if (target != op0 && target == op1)
8324132718Skan	{
8325169689Skan	  temp = op0;
8326132718Skan	  op0 = op1;
8327169689Skan	  op1 = temp;
8328132718Skan	}
8329132718Skan
8330146895Skan      /* We generate better code and avoid problems with op1 mentioning
8331146895Skan	 target by forcing op1 into a pseudo if it isn't a constant.  */
8332146895Skan      if (! CONSTANT_P (op1))
8333146895Skan	op1 = force_reg (mode, op1);
8334146895Skan
8335169689Skan      {
8336169689Skan	enum rtx_code comparison_code;
8337169689Skan	rtx cmpop1 = op1;
833818334Speter
8339169689Skan	if (code == MAX_EXPR)
8340169689Skan	  comparison_code = unsignedp ? GEU : GE;
8341169689Skan	else
8342169689Skan	  comparison_code = unsignedp ? LEU : LE;
834318334Speter
8344169689Skan	/* Canonicalize to comparisons against 0.  */
8345169689Skan	if (op1 == const1_rtx)
8346169689Skan	  {
8347169689Skan	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8348169689Skan	       or (a != 0 ? a : 1) for unsigned.
8349169689Skan	       For MIN we are safe converting (a <= 1 ? a : 1)
8350169689Skan	       into (a <= 0 ? a : 1)  */
8351169689Skan	    cmpop1 = const0_rtx;
8352169689Skan	    if (code == MAX_EXPR)
8353169689Skan	      comparison_code = unsignedp ? NE : GT;
8354169689Skan	  }
8355169689Skan	if (op1 == constm1_rtx && !unsignedp)
8356169689Skan	  {
8357169689Skan	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8358169689Skan	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8359169689Skan	    cmpop1 = const0_rtx;
8360169689Skan	    if (code == MIN_EXPR)
8361169689Skan	      comparison_code = LT;
8362169689Skan	  }
8363169689Skan#ifdef HAVE_conditional_move
8364169689Skan	/* Use a conditional move if possible.  */
8365169689Skan	if (can_conditionally_move_p (mode))
8366169689Skan	  {
8367169689Skan	    rtx insn;
8368169689Skan
8369169689Skan	    /* ??? Same problem as in expmed.c: emit_conditional_move
8370169689Skan	       forces a stack adjustment via compare_from_rtx, and we
8371169689Skan	       lose the stack adjustment if the sequence we are about
8372169689Skan	       to create is discarded.  */
8373169689Skan	    do_pending_stack_adjust ();
8374169689Skan
8375169689Skan	    start_sequence ();
8376169689Skan
8377169689Skan	    /* Try to emit the conditional move.  */
8378169689Skan	    insn = emit_conditional_move (target, comparison_code,
8379169689Skan					  op0, cmpop1, mode,
8380169689Skan					  op0, op1, mode,
8381169689Skan					  unsignedp);
8382169689Skan
8383169689Skan	    /* If we could do the conditional move, emit the sequence,
8384169689Skan	       and return.  */
8385169689Skan	    if (insn)
8386169689Skan	      {
8387169689Skan		rtx seq = get_insns ();
8388169689Skan		end_sequence ();
8389169689Skan		emit_insn (seq);
8390169689Skan		return target;
8391169689Skan	      }
8392169689Skan
8393169689Skan	    /* Otherwise discard the sequence and fall back to code with
8394169689Skan	       branches.  */
8395169689Skan	    end_sequence ();
8396169689Skan	  }
8397169689Skan#endif
8398169689Skan	if (target != op0)
8399169689Skan	  emit_move_insn (target, op0);
8400169689Skan
8401169689Skan	temp = gen_label_rtx ();
8402169689Skan	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8403169689Skan				 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8404169689Skan      }
840590075Sobrien      emit_move_insn (target, op1);
8406169689Skan      emit_label (temp);
840718334Speter      return target;
840818334Speter
840918334Speter    case BIT_NOT_EXPR:
841018334Speter      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8411117395Skan      if (modifier == EXPAND_STACK_PARM)
8412117395Skan	target = 0;
841318334Speter      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8414169689Skan      gcc_assert (temp);
841518334Speter      return temp;
841618334Speter
841718334Speter      /* ??? Can optimize bitwise operations with one arg constant.
841818334Speter	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
841918334Speter	 and (a bitwise1 b) bitwise2 b (etc)
842018334Speter	 but that is probably not worth while.  */
842118334Speter
842218334Speter      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
842318334Speter	 boolean values when we want in all cases to compute both of them.  In
842418334Speter	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
842518334Speter	 as actual zero-or-1 values and then bitwise anding.  In cases where
842618334Speter	 there cannot be any side effects, better code would be made by
842718334Speter	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
842818334Speter	 how to recognize those cases.  */
842918334Speter
843018334Speter    case TRUTH_AND_EXPR:
8431169689Skan      code = BIT_AND_EXPR;
843218334Speter    case BIT_AND_EXPR:
843318334Speter      goto binop;
843418334Speter
843518334Speter    case TRUTH_OR_EXPR:
8436169689Skan      code = BIT_IOR_EXPR;
843718334Speter    case BIT_IOR_EXPR:
843818334Speter      goto binop;
843918334Speter
844018334Speter    case TRUTH_XOR_EXPR:
8441169689Skan      code = BIT_XOR_EXPR;
844218334Speter    case BIT_XOR_EXPR:
844318334Speter      goto binop;
844418334Speter
844518334Speter    case LSHIFT_EXPR:
844618334Speter    case RSHIFT_EXPR:
844718334Speter    case LROTATE_EXPR:
844818334Speter    case RROTATE_EXPR:
844950397Sobrien      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
845018334Speter	subtarget = 0;
8451117395Skan      if (modifier == EXPAND_STACK_PARM)
8452117395Skan	target = 0;
845318334Speter      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
845418334Speter      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
845518334Speter			   unsignedp);
845618334Speter
845718334Speter      /* Could determine the answer when only additive constants differ.  Also,
845818334Speter	 the addition of one can be handled by changing the condition.  */
845918334Speter    case LT_EXPR:
846018334Speter    case LE_EXPR:
846118334Speter    case GT_EXPR:
846218334Speter    case GE_EXPR:
846318334Speter    case EQ_EXPR:
846418334Speter    case NE_EXPR:
846590075Sobrien    case UNORDERED_EXPR:
846690075Sobrien    case ORDERED_EXPR:
846790075Sobrien    case UNLT_EXPR:
846890075Sobrien    case UNLE_EXPR:
846990075Sobrien    case UNGT_EXPR:
847090075Sobrien    case UNGE_EXPR:
847190075Sobrien    case UNEQ_EXPR:
8472169689Skan    case LTGT_EXPR:
8473117395Skan      temp = do_store_flag (exp,
8474117395Skan			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8475117395Skan			    tmode != VOIDmode ? tmode : mode, 0);
847618334Speter      if (temp != 0)
847718334Speter	return temp;
847818334Speter
847950397Sobrien      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
848018334Speter      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
848118334Speter	  && original_target
8482169689Skan	  && REG_P (original_target)
848318334Speter	  && (GET_MODE (original_target)
848418334Speter	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
848518334Speter	{
848618334Speter	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
848718334Speter			      VOIDmode, 0);
848818334Speter
848996263Sobrien	  /* If temp is constant, we can just compute the result.  */
849096263Sobrien	  if (GET_CODE (temp) == CONST_INT)
849196263Sobrien	    {
849296263Sobrien	      if (INTVAL (temp) != 0)
849396263Sobrien	        emit_move_insn (target, const1_rtx);
849496263Sobrien	      else
849596263Sobrien	        emit_move_insn (target, const0_rtx);
849696263Sobrien
849796263Sobrien	      return target;
849896263Sobrien	    }
849996263Sobrien
850018334Speter	  if (temp != original_target)
850196263Sobrien	    {
850296263Sobrien	      enum machine_mode mode1 = GET_MODE (temp);
850396263Sobrien	      if (mode1 == VOIDmode)
850496263Sobrien		mode1 = tmode != VOIDmode ? tmode : mode;
8505117395Skan
850696263Sobrien	      temp = copy_to_mode_reg (mode1, temp);
850796263Sobrien	    }
850818334Speter
850918334Speter	  op1 = gen_label_rtx ();
851052284Sobrien	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
851190075Sobrien				   GET_MODE (temp), unsignedp, op1);
851218334Speter	  emit_move_insn (temp, const1_rtx);
851318334Speter	  emit_label (op1);
851418334Speter	  return temp;
851518334Speter	}
851618334Speter
8517169689Skan      /* If no set-flag instruction, must generate a conditional store
8518169689Skan	 into a temporary variable.  Drop through and handle this
8519169689Skan	 like && and ||.  */
852018334Speter
852118334Speter      if (! ignore
8522117395Skan	  && (target == 0
8523117395Skan	      || modifier == EXPAND_STACK_PARM
8524117395Skan	      || ! safe_from_p (target, exp, 1)
852518334Speter	      /* Make sure we don't have a hard reg (such as function's return
852618334Speter		 value) live across basic blocks, if not optimizing.  */
8527169689Skan	      || (!optimize && REG_P (target)
852818334Speter		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
852918334Speter	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
853018334Speter
853118334Speter      if (target)
8532169689Skan	emit_move_insn (target, const0_rtx);
853318334Speter
853418334Speter      op1 = gen_label_rtx ();
853518334Speter      jumpifnot (exp, op1);
853618334Speter
853718334Speter      if (target)
8538169689Skan	emit_move_insn (target, const1_rtx);
853918334Speter
854018334Speter      emit_label (op1);
854118334Speter      return ignore ? const0_rtx : target;
854218334Speter
854318334Speter    case TRUTH_NOT_EXPR:
8544117395Skan      if (modifier == EXPAND_STACK_PARM)
8545117395Skan	target = 0;
854618334Speter      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
854718334Speter      /* The parser is careful to generate TRUTH_NOT_EXPR
854818334Speter	 only with operands that are always zero or one.  */
854918334Speter      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
855018334Speter			   target, 1, OPTAB_LIB_WIDEN);
8551169689Skan      gcc_assert (temp);
855218334Speter      return temp;
855318334Speter
8554169689Skan    case STATEMENT_LIST:
855550397Sobrien      {
8556169689Skan	tree_stmt_iterator iter;
855718334Speter
8558169689Skan	gcc_assert (ignore);
855918334Speter
8560169689Skan	for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8561169689Skan	  expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8562169689Skan      }
8563169689Skan      return const0_rtx;
856418334Speter
8565169689Skan    case COND_EXPR:
8566169689Skan      /* A COND_EXPR with its type being VOID_TYPE represents a
8567169689Skan	 conditional jump and is handled in
8568169689Skan	 expand_gimple_cond_expr.  */
8569169689Skan      gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
857018334Speter
8571169689Skan        /* Note that COND_EXPRs whose type is a structure or union
8572169689Skan  	 are required to be constructed to contain assignments of
8573169689Skan  	 a temporary variable, so that we can evaluate them here
8574169689Skan  	 for side effect only.  If type is void, we must do likewise.  */
857518334Speter
8576169689Skan        gcc_assert (!TREE_ADDRESSABLE (type)
8577169689Skan		    && !ignore
8578169689Skan		    && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8579169689Skan		    && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
858018334Speter
8581169689Skan       /* If we are not to produce a result, we have no target.  Otherwise,
8582169689Skan 	 if a target was specified use it; it will not be used as an
8583169689Skan 	 intermediate target unless it is safe.  If no target, use a
8584169689Skan 	 temporary.  */
858518334Speter
8586169689Skan       if (modifier != EXPAND_STACK_PARM
8587169689Skan 	  && original_target
8588169689Skan 	  && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8589169689Skan 	  && GET_MODE (original_target) == mode
859050397Sobrien#ifdef HAVE_conditional_move
8591169689Skan 	  && (! can_conditionally_move_p (mode)
8592169689Skan 	      || REG_P (original_target))
859350397Sobrien#endif
8594169689Skan 	  && !MEM_P (original_target))
8595169689Skan 	temp = original_target;
8596169689Skan       else
8597169689Skan 	temp = assign_temp (type, 0, 0, 1);
859850397Sobrien
8599169689Skan       do_pending_stack_adjust ();
8600169689Skan       NO_DEFER_POP;
8601169689Skan       op0 = gen_label_rtx ();
8602169689Skan       op1 = gen_label_rtx ();
8603169689Skan       jumpifnot (TREE_OPERAND (exp, 0), op0);
8604169689Skan       store_expr (TREE_OPERAND (exp, 1), temp,
8605169689Skan 		  modifier == EXPAND_STACK_PARM);
860618334Speter
8607169689Skan       emit_jump_insn (gen_jump (op1));
8608169689Skan       emit_barrier ();
8609169689Skan       emit_label (op0);
8610169689Skan       store_expr (TREE_OPERAND (exp, 2), temp,
8611169689Skan 		  modifier == EXPAND_STACK_PARM);
861218334Speter
8613169689Skan       emit_label (op1);
8614169689Skan       OK_DEFER_POP;
8615169689Skan       return temp;
861618334Speter
8617169689Skan    case VEC_COND_EXPR:
8618169689Skan	target = expand_vec_cond_expr (exp, target);
861950397Sobrien	return target;
862018334Speter
862118334Speter    case MODIFY_EXPR:
862218334Speter      {
862318334Speter	tree lhs = TREE_OPERAND (exp, 0);
862418334Speter	tree rhs = TREE_OPERAND (exp, 1);
862518334Speter
8626169689Skan	gcc_assert (ignore);
862718334Speter
862818334Speter	/* Check for |= or &= of a bitfield of size one into another bitfield
862918334Speter	   of size 1.  In this case, (unless we need the result of the
863018334Speter	   assignment) we can do this more efficiently with a
863118334Speter	   test followed by an assignment, if necessary.
863218334Speter
863318334Speter	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
863418334Speter	   things change so we do, this code should be enhanced to
863518334Speter	   support it.  */
8636169689Skan	if (TREE_CODE (lhs) == COMPONENT_REF
863718334Speter	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
863818334Speter		|| TREE_CODE (rhs) == BIT_AND_EXPR)
863918334Speter	    && TREE_OPERAND (rhs, 0) == lhs
864018334Speter	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
864190075Sobrien	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
864290075Sobrien	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
864318334Speter	  {
864418334Speter	    rtx label = gen_label_rtx ();
8645169689Skan	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
864618334Speter	    do_jump (TREE_OPERAND (rhs, 1),
8647169689Skan		     value ? label : 0,
8648169689Skan		     value ? 0 : label);
8649169689Skan	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
865018334Speter	    do_pending_stack_adjust ();
865118334Speter	    emit_label (label);
865218334Speter	    return const0_rtx;
865318334Speter	  }
865418334Speter
8655169689Skan	expand_assignment (lhs, rhs);
8656117395Skan
8657169689Skan	return const0_rtx;
865818334Speter      }
865918334Speter
866052284Sobrien    case RETURN_EXPR:
866152284Sobrien      if (!TREE_OPERAND (exp, 0))
866252284Sobrien	expand_null_return ();
866352284Sobrien      else
866452284Sobrien	expand_return (TREE_OPERAND (exp, 0));
866552284Sobrien      return const0_rtx;
866652284Sobrien
866718334Speter    case ADDR_EXPR:
8668169689Skan      return expand_expr_addr_expr (exp, target, tmode, modifier);
866918334Speter
8670169689Skan    case COMPLEX_EXPR:
8671169689Skan      /* Get the rtx code of the operands.  */
8672169689Skan      op0 = expand_normal (TREE_OPERAND (exp, 0));
8673169689Skan      op1 = expand_normal (TREE_OPERAND (exp, 1));
867418334Speter
8675169689Skan      if (!target)
8676169689Skan	target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
867750397Sobrien
8678169689Skan      /* Move the real (op0) and imaginary (op1) parts to their location.  */
8679169689Skan      write_complex_part (target, op0, false);
8680169689Skan      write_complex_part (target, op1, true);
868118334Speter
8682169689Skan      return target;
868318334Speter
8684169689Skan    case REALPART_EXPR:
8685169689Skan      op0 = expand_normal (TREE_OPERAND (exp, 0));
8686169689Skan      return read_complex_part (op0, false);
8687117395Skan
8688169689Skan    case IMAGPART_EXPR:
8689169689Skan      op0 = expand_normal (TREE_OPERAND (exp, 0));
8690169689Skan      return read_complex_part (op0, true);
869118334Speter
8692169689Skan    case RESX_EXPR:
8693169689Skan      expand_resx_expr (exp);
8694169689Skan      return const0_rtx;
869590075Sobrien
8696169689Skan    case TRY_CATCH_EXPR:
8697169689Skan    case CATCH_EXPR:
8698169689Skan    case EH_FILTER_EXPR:
8699169689Skan    case TRY_FINALLY_EXPR:
8700169689Skan      /* Lowered by tree-eh.c.  */
8701169689Skan      gcc_unreachable ();
870218334Speter
8703169689Skan    case WITH_CLEANUP_EXPR:
8704169689Skan    case CLEANUP_POINT_EXPR:
8705169689Skan    case TARGET_EXPR:
8706169689Skan    case CASE_LABEL_EXPR:
8707169689Skan    case VA_ARG_EXPR:
8708169689Skan    case BIND_EXPR:
8709169689Skan    case INIT_EXPR:
8710169689Skan    case CONJ_EXPR:
8711169689Skan    case COMPOUND_EXPR:
8712169689Skan    case PREINCREMENT_EXPR:
8713169689Skan    case PREDECREMENT_EXPR:
8714169689Skan    case POSTINCREMENT_EXPR:
8715169689Skan    case POSTDECREMENT_EXPR:
8716169689Skan    case LOOP_EXPR:
8717169689Skan    case EXIT_EXPR:
8718169689Skan    case TRUTH_ANDIF_EXPR:
8719169689Skan    case TRUTH_ORIF_EXPR:
8720169689Skan      /* Lowered by gimplify.c.  */
8721169689Skan      gcc_unreachable ();
872290075Sobrien
8723169689Skan    case EXC_PTR_EXPR:
8724169689Skan      return get_exception_pointer (cfun);
872590075Sobrien
8726169689Skan    case FILTER_EXPR:
8727169689Skan      return get_exception_filter (cfun);
872890075Sobrien
8729169689Skan    case FDESC_EXPR:
8730169689Skan      /* Function descriptors are not valid except for as
8731169689Skan	 initialization constants, and should not be expanded.  */
8732169689Skan      gcc_unreachable ();
8733117395Skan
8734169689Skan    case SWITCH_EXPR:
8735169689Skan      expand_case (exp);
8736169689Skan      return const0_rtx;
8737117395Skan
8738169689Skan    case LABEL_EXPR:
8739169689Skan      expand_label (TREE_OPERAND (exp, 0));
8740169689Skan      return const0_rtx;
8741117395Skan
8742169689Skan    case ASM_EXPR:
8743169689Skan      expand_asm_expr (exp);
8744169689Skan      return const0_rtx;
874590075Sobrien
8746169689Skan    case WITH_SIZE_EXPR:
8747169689Skan      /* WITH_SIZE_EXPR expands to its first argument.  The caller should
8748169689Skan	 have pulled out the size to use in whatever context it needed.  */
8749169689Skan      return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8750169689Skan			       modifier, alt_rtl);
875118334Speter
8752169689Skan    case REALIGN_LOAD_EXPR:
875318334Speter      {
8754259563Spfg        tree oprnd0 = TREE_OPERAND (exp, 0);
8755169689Skan        tree oprnd1 = TREE_OPERAND (exp, 1);
8756169689Skan        tree oprnd2 = TREE_OPERAND (exp, 2);
8757169689Skan        rtx op2;
875818334Speter
8759169689Skan        this_optab = optab_for_tree_code (code, type);
8760169689Skan        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8761169689Skan        op2 = expand_normal (oprnd2);
8762259563Spfg        temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8763169689Skan				  target, unsignedp);
8764169689Skan        gcc_assert (temp);
8765169689Skan        return temp;
876618334Speter      }
876718334Speter
8768169689Skan    case DOT_PROD_EXPR:
876918334Speter      {
8770169689Skan	tree oprnd0 = TREE_OPERAND (exp, 0);
8771169689Skan	tree oprnd1 = TREE_OPERAND (exp, 1);
8772169689Skan	tree oprnd2 = TREE_OPERAND (exp, 2);
8773169689Skan	rtx op2;
877418334Speter
8775169689Skan	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8776169689Skan	op2 = expand_normal (oprnd2);
8777259563Spfg	target = expand_widen_pattern_expr (exp, op0, op1, op2,
8778169689Skan					    target, unsignedp);
877918334Speter	return target;
878018334Speter      }
878118334Speter
8782169689Skan    case WIDEN_SUM_EXPR:
878350397Sobrien      {
8784169689Skan        tree oprnd0 = TREE_OPERAND (exp, 0);
8785169689Skan        tree oprnd1 = TREE_OPERAND (exp, 1);
8786259563Spfg
8787169689Skan        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8788169689Skan        target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8789169689Skan                                            target, unsignedp);
8790169689Skan        return target;
879150397Sobrien      }
879250397Sobrien
8793169689Skan    case REDUC_MAX_EXPR:
8794169689Skan    case REDUC_MIN_EXPR:
8795169689Skan    case REDUC_PLUS_EXPR:
879652284Sobrien      {
8797169689Skan        op0 = expand_normal (TREE_OPERAND (exp, 0));
8798169689Skan        this_optab = optab_for_tree_code (code, type);
8799169689Skan        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8800169689Skan        gcc_assert (temp);
8801169689Skan        return temp;
880252284Sobrien      }
880352284Sobrien
8804169689Skan    case VEC_LSHIFT_EXPR:
8805169689Skan    case VEC_RSHIFT_EXPR:
880652284Sobrien      {
8807169689Skan	target = expand_vec_shift_expr (exp, target);
8808169689Skan	return target;
880952284Sobrien      }
881052284Sobrien
881118334Speter    default:
8812169689Skan      return lang_hooks.expand_expr (exp, original_target, tmode,
8813169689Skan				     modifier, alt_rtl);
881418334Speter    }
881518334Speter
8816169689Skan  /* Here to do an ordinary binary operator.  */
881718334Speter binop:
8818132718Skan  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8819132718Skan		   subtarget, &op0, &op1, 0);
882018334Speter binop2:
8821169689Skan  this_optab = optab_for_tree_code (code, type);
8822169689Skan binop3:
8823117395Skan  if (modifier == EXPAND_STACK_PARM)
8824117395Skan    target = 0;
882518334Speter  temp = expand_binop (mode, this_optab, op0, op1, target,
882618334Speter		       unsignedp, OPTAB_LIB_WIDEN);
8827169689Skan  gcc_assert (temp);
8828169689Skan  return REDUCE_BIT_FIELD (temp);
882918334Speter}
8830169689Skan#undef REDUCE_BIT_FIELD
883118334Speter
8832169689Skan/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8833169689Skan   signedness of TYPE), possibly returning the result in TARGET.  */
8834169689Skanstatic rtx
8835169689Skanreduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8836169689Skan{
8837169689Skan  HOST_WIDE_INT prec = TYPE_PRECISION (type);
8838169689Skan  if (target && GET_MODE (target) != GET_MODE (exp))
8839169689Skan    target = 0;
8840169689Skan  /* For constant values, reduce using build_int_cst_type. */
8841169689Skan  if (GET_CODE (exp) == CONST_INT)
8842169689Skan    {
8843169689Skan      HOST_WIDE_INT value = INTVAL (exp);
8844169689Skan      tree t = build_int_cst_type (type, value);
8845169689Skan      return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8846169689Skan    }
8847169689Skan  else if (TYPE_UNSIGNED (type))
8848169689Skan    {
8849169689Skan      rtx mask;
8850169689Skan      if (prec < HOST_BITS_PER_WIDE_INT)
8851169689Skan	mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8852169689Skan				   GET_MODE (exp));
8853169689Skan      else
8854169689Skan	mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8855169689Skan				   ((unsigned HOST_WIDE_INT) 1
8856169689Skan				    << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8857169689Skan				   GET_MODE (exp));
8858169689Skan      return expand_and (GET_MODE (exp), exp, mask, target);
8859169689Skan    }
8860169689Skan  else
8861169689Skan    {
8862169689Skan      tree count = build_int_cst (NULL_TREE,
8863169689Skan				  GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8864169689Skan      exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8865169689Skan      return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8866169689Skan    }
8867169689Skan}
8868169689Skan
886996263Sobrien/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
887096263Sobrien   when applied to the address of EXP produces an address known to be
887196263Sobrien   aligned more than BIGGEST_ALIGNMENT.  */
887296263Sobrien
887396263Sobrienstatic int
8874132718Skanis_aligning_offset (tree offset, tree exp)
887596263Sobrien{
8876169689Skan  /* Strip off any conversions.  */
887796263Sobrien  while (TREE_CODE (offset) == NON_LVALUE_EXPR
887896263Sobrien	 || TREE_CODE (offset) == NOP_EXPR
8879169689Skan	 || TREE_CODE (offset) == CONVERT_EXPR)
888096263Sobrien    offset = TREE_OPERAND (offset, 0);
888196263Sobrien
888296263Sobrien  /* We must now have a BIT_AND_EXPR with a constant that is one less than
888396263Sobrien     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
888496263Sobrien  if (TREE_CODE (offset) != BIT_AND_EXPR
888596263Sobrien      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8886169689Skan      || compare_tree_int (TREE_OPERAND (offset, 1),
8887132718Skan			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
888896263Sobrien      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
888996263Sobrien    return 0;
889096263Sobrien
889196263Sobrien  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
889296263Sobrien     It must be NEGATE_EXPR.  Then strip any more conversions.  */
889396263Sobrien  offset = TREE_OPERAND (offset, 0);
889496263Sobrien  while (TREE_CODE (offset) == NON_LVALUE_EXPR
889596263Sobrien	 || TREE_CODE (offset) == NOP_EXPR
889696263Sobrien	 || TREE_CODE (offset) == CONVERT_EXPR)
889796263Sobrien    offset = TREE_OPERAND (offset, 0);
889896263Sobrien
889996263Sobrien  if (TREE_CODE (offset) != NEGATE_EXPR)
890096263Sobrien    return 0;
890196263Sobrien
890296263Sobrien  offset = TREE_OPERAND (offset, 0);
890396263Sobrien  while (TREE_CODE (offset) == NON_LVALUE_EXPR
890496263Sobrien	 || TREE_CODE (offset) == NOP_EXPR
890596263Sobrien	 || TREE_CODE (offset) == CONVERT_EXPR)
890696263Sobrien    offset = TREE_OPERAND (offset, 0);
890796263Sobrien
8908169689Skan  /* This must now be the address of EXP.  */
8909169689Skan  return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
891096263Sobrien}
891196263Sobrien
8912117395Skan/* Return the tree node if an ARG corresponds to a string constant or zero
8913117395Skan   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
891490075Sobrien   in bytes within the string that ARG is accessing.  The type of the
891590075Sobrien   offset will be `sizetype'.  */
891618334Speter
891790075Sobrientree
8918132718Skanstring_constant (tree arg, tree *ptr_offset)
891918334Speter{
8920169689Skan  tree array, offset;
892118334Speter  STRIP_NOPS (arg);
892218334Speter
8923169689Skan  if (TREE_CODE (arg) == ADDR_EXPR)
892418334Speter    {
8925169689Skan      if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8926169689Skan	{
8927169689Skan	  *ptr_offset = size_zero_node;
8928169689Skan	  return TREE_OPERAND (arg, 0);
8929169689Skan	}
8930169689Skan      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8931169689Skan	{
8932169689Skan	  array = TREE_OPERAND (arg, 0);
8933169689Skan	  offset = size_zero_node;
8934169689Skan	}
8935169689Skan      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8936169689Skan	{
8937169689Skan	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8938169689Skan	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8939169689Skan	  if (TREE_CODE (array) != STRING_CST
8940169689Skan	      && TREE_CODE (array) != VAR_DECL)
8941169689Skan	    return 0;
8942169689Skan	}
8943169689Skan      else
8944169689Skan	return 0;
894518334Speter    }
894618334Speter  else if (TREE_CODE (arg) == PLUS_EXPR)
894718334Speter    {
894818334Speter      tree arg0 = TREE_OPERAND (arg, 0);
894918334Speter      tree arg1 = TREE_OPERAND (arg, 1);
895018334Speter
895118334Speter      STRIP_NOPS (arg0);
895218334Speter      STRIP_NOPS (arg1);
895318334Speter
895418334Speter      if (TREE_CODE (arg0) == ADDR_EXPR
8955169689Skan	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8956169689Skan	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
895718334Speter	{
8958169689Skan	  array = TREE_OPERAND (arg0, 0);
8959169689Skan	  offset = arg1;
896018334Speter	}
896118334Speter      else if (TREE_CODE (arg1) == ADDR_EXPR
8962169689Skan	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8963169689Skan		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
896418334Speter	{
8965169689Skan	  array = TREE_OPERAND (arg1, 0);
8966169689Skan	  offset = arg0;
896718334Speter	}
896818334Speter      else
8969169689Skan	return 0;
897018334Speter    }
8971169689Skan  else
8972169689Skan    return 0;
897318334Speter
8974169689Skan  if (TREE_CODE (array) == STRING_CST)
897518334Speter    {
8976169689Skan      *ptr_offset = fold_convert (sizetype, offset);
8977169689Skan      return array;
897818334Speter    }
8979169689Skan  else if (TREE_CODE (array) == VAR_DECL)
898018334Speter    {
8981169689Skan      int length;
898218334Speter
8983169689Skan      /* Variables initialized to string literals can be handled too.  */
8984169689Skan      if (DECL_INITIAL (array) == NULL_TREE
8985169689Skan	  || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8986169689Skan	return 0;
898718334Speter
8988169689Skan      /* If they are read-only, non-volatile and bind locally.  */
8989169689Skan      if (! TREE_READONLY (array)
8990169689Skan	  || TREE_SIDE_EFFECTS (array)
8991169689Skan	  || ! targetm.binds_local_p (array))
8992169689Skan	return 0;
899318334Speter
8994169689Skan      /* Avoid const char foo[4] = "abcde";  */
8995169689Skan      if (DECL_SIZE_UNIT (array) == NULL_TREE
8996169689Skan	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8997169689Skan	  || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8998169689Skan	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8999169689Skan	return 0;
900018334Speter
9001169689Skan      /* If variable is bigger than the string literal, OFFSET must be constant
9002169689Skan	 and inside of the bounds of the string literal.  */
9003169689Skan      offset = fold_convert (sizetype, offset);
9004169689Skan      if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9005169689Skan	  && (! host_integerp (offset, 1)
9006169689Skan	      || compare_tree_int (offset, length) >= 0))
9007169689Skan	return 0;
900818334Speter
9009169689Skan      *ptr_offset = offset;
9010169689Skan      return DECL_INITIAL (array);
901118334Speter    }
901218334Speter
9013169689Skan  return 0;
901418334Speter}
901518334Speter
901618334Speter/* Generate code to calculate EXP using a store-flag instruction
901718334Speter   and return an rtx for the result.  EXP is either a comparison
901818334Speter   or a TRUTH_NOT_EXPR whose operand is a comparison.
901918334Speter
902018334Speter   If TARGET is nonzero, store the result there if convenient.
902118334Speter
9022117395Skan   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
902318334Speter   cheap.
902418334Speter
902518334Speter   Return zero if there is no suitable set-flag instruction
902618334Speter   available on this machine.
902718334Speter
902818334Speter   Once expand_expr has been called on the arguments of the comparison,
902918334Speter   we are committed to doing the store flag, since it is not safe to
903018334Speter   re-evaluate the expression.  We emit the store-flag insn by calling
903118334Speter   emit_store_flag, but only expand the arguments if we have a reason
903218334Speter   to believe that emit_store_flag will be successful.  If we think that
903318334Speter   it will, but it isn't, we have to simulate the store-flag with a
903418334Speter   set/jump/set sequence.  */
903518334Speter
903618334Speterstatic rtx
9037132718Skando_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
903818334Speter{
903918334Speter  enum rtx_code code;
904018334Speter  tree arg0, arg1, type;
904118334Speter  tree tem;
904218334Speter  enum machine_mode operand_mode;
904318334Speter  int invert = 0;
904418334Speter  int unsignedp;
904518334Speter  rtx op0, op1;
904618334Speter  enum insn_code icode;
904718334Speter  rtx subtarget = target;
904850397Sobrien  rtx result, label;
904918334Speter
905018334Speter  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
905118334Speter     result at the end.  We can't simply invert the test since it would
905218334Speter     have already been inverted if it were valid.  This case occurs for
905318334Speter     some floating-point comparisons.  */
905418334Speter
905518334Speter  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
905618334Speter    invert = 1, exp = TREE_OPERAND (exp, 0);
905718334Speter
905818334Speter  arg0 = TREE_OPERAND (exp, 0);
905918334Speter  arg1 = TREE_OPERAND (exp, 1);
906090075Sobrien
906190075Sobrien  /* Don't crash if the comparison was erroneous.  */
906290075Sobrien  if (arg0 == error_mark_node || arg1 == error_mark_node)
906390075Sobrien    return const0_rtx;
906490075Sobrien
906518334Speter  type = TREE_TYPE (arg0);
906618334Speter  operand_mode = TYPE_MODE (type);
9067169689Skan  unsignedp = TYPE_UNSIGNED (type);
906818334Speter
906918334Speter  /* We won't bother with BLKmode store-flag operations because it would mean
907018334Speter     passing a lot of information to emit_store_flag.  */
907118334Speter  if (operand_mode == BLKmode)
907218334Speter    return 0;
907318334Speter
907450397Sobrien  /* We won't bother with store-flag operations involving function pointers
907550397Sobrien     when function pointers must be canonicalized before comparisons.  */
907650397Sobrien#ifdef HAVE_canonicalize_funcptr_for_compare
907750397Sobrien  if (HAVE_canonicalize_funcptr_for_compare
907850397Sobrien      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
907950397Sobrien	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
908050397Sobrien	       == FUNCTION_TYPE))
908150397Sobrien	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
908250397Sobrien	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
908350397Sobrien		  == FUNCTION_TYPE))))
908450397Sobrien    return 0;
908550397Sobrien#endif
908650397Sobrien
908718334Speter  STRIP_NOPS (arg0);
908818334Speter  STRIP_NOPS (arg1);
908918334Speter
909018334Speter  /* Get the rtx comparison code to use.  We know that EXP is a comparison
909118334Speter     operation of some type.  Some comparisons against 1 and -1 can be
909218334Speter     converted to comparisons with zero.  Do so here so that the tests
909318334Speter     below will be aware that we have a comparison with zero.   These
909418334Speter     tests will not catch constants in the first operand, but constants
909518334Speter     are rarely passed as the first operand.  */
909618334Speter
909718334Speter  switch (TREE_CODE (exp))
909818334Speter    {
909918334Speter    case EQ_EXPR:
910018334Speter      code = EQ;
910118334Speter      break;
910218334Speter    case NE_EXPR:
910318334Speter      code = NE;
910418334Speter      break;
910518334Speter    case LT_EXPR:
910618334Speter      if (integer_onep (arg1))
910718334Speter	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
910818334Speter      else
910918334Speter	code = unsignedp ? LTU : LT;
911018334Speter      break;
911118334Speter    case LE_EXPR:
911218334Speter      if (! unsignedp && integer_all_onesp (arg1))
911318334Speter	arg1 = integer_zero_node, code = LT;
911418334Speter      else
911518334Speter	code = unsignedp ? LEU : LE;
911618334Speter      break;
911718334Speter    case GT_EXPR:
911818334Speter      if (! unsignedp && integer_all_onesp (arg1))
911918334Speter	arg1 = integer_zero_node, code = GE;
912018334Speter      else
912118334Speter	code = unsignedp ? GTU : GT;
912218334Speter      break;
912318334Speter    case GE_EXPR:
912418334Speter      if (integer_onep (arg1))
912518334Speter	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
912618334Speter      else
912718334Speter	code = unsignedp ? GEU : GE;
912818334Speter      break;
912990075Sobrien
913090075Sobrien    case UNORDERED_EXPR:
913190075Sobrien      code = UNORDERED;
913290075Sobrien      break;
913390075Sobrien    case ORDERED_EXPR:
913490075Sobrien      code = ORDERED;
913590075Sobrien      break;
913690075Sobrien    case UNLT_EXPR:
913790075Sobrien      code = UNLT;
913890075Sobrien      break;
913990075Sobrien    case UNLE_EXPR:
914090075Sobrien      code = UNLE;
914190075Sobrien      break;
914290075Sobrien    case UNGT_EXPR:
914390075Sobrien      code = UNGT;
914490075Sobrien      break;
914590075Sobrien    case UNGE_EXPR:
914690075Sobrien      code = UNGE;
914790075Sobrien      break;
914890075Sobrien    case UNEQ_EXPR:
914990075Sobrien      code = UNEQ;
915090075Sobrien      break;
9151169689Skan    case LTGT_EXPR:
9152169689Skan      code = LTGT;
9153169689Skan      break;
915490075Sobrien
915518334Speter    default:
9156169689Skan      gcc_unreachable ();
915718334Speter    }
915818334Speter
915918334Speter  /* Put a constant second.  */
916018334Speter  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
916118334Speter    {
916218334Speter      tem = arg0; arg0 = arg1; arg1 = tem;
916318334Speter      code = swap_condition (code);
916418334Speter    }
916518334Speter
916618334Speter  /* If this is an equality or inequality test of a single bit, we can
916718334Speter     do this by shifting the bit being tested to the low-order bit and
916818334Speter     masking the result with the constant 1.  If the condition was EQ,
916918334Speter     we xor it with 1.  This does not require an scc insn and is faster
9170132718Skan     than an scc insn even if we have it.
917118334Speter
9172132718Skan     The code to make this transformation was moved into fold_single_bit_test,
9173132718Skan     so we just call into the folder and expand its result.  */
9174132718Skan
917518334Speter  if ((code == NE || code == EQ)
917618334Speter      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
917750397Sobrien      && integer_pow2p (TREE_OPERAND (arg0, 1)))
917818334Speter    {
9179169689Skan      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9180132718Skan      return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9181132718Skan						arg0, arg1, type),
9182132718Skan			  target, VOIDmode, EXPAND_NORMAL);
918318334Speter    }
918418334Speter
918518334Speter  /* Now see if we are likely to be able to do this.  Return if not.  */
918690075Sobrien  if (! can_compare_p (code, operand_mode, ccp_store_flag))
918718334Speter    return 0;
918890075Sobrien
918918334Speter  icode = setcc_gen_code[(int) code];
919018334Speter  if (icode == CODE_FOR_nothing
919190075Sobrien      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
919218334Speter    {
919318334Speter      /* We can only do this if it is one of the special cases that
919418334Speter	 can be handled without an scc insn.  */
919518334Speter      if ((code == LT && integer_zerop (arg1))
919618334Speter	  || (! only_cheap && code == GE && integer_zerop (arg1)))
919718334Speter	;
9198169689Skan      else if (! only_cheap && (code == NE || code == EQ)
919918334Speter	       && TREE_CODE (type) != REAL_TYPE
920018334Speter	       && ((abs_optab->handlers[(int) operand_mode].insn_code
920118334Speter		    != CODE_FOR_nothing)
920218334Speter		   || (ffs_optab->handlers[(int) operand_mode].insn_code
920318334Speter		       != CODE_FOR_nothing)))
920418334Speter	;
920518334Speter      else
920618334Speter	return 0;
920718334Speter    }
920890075Sobrien
920990075Sobrien  if (! get_subtarget (target)
9210132718Skan      || GET_MODE (subtarget) != operand_mode)
921118334Speter    subtarget = 0;
921218334Speter
9213132718Skan  expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
921418334Speter
921518334Speter  if (target == 0)
921618334Speter    target = gen_reg_rtx (mode);
921718334Speter
9218169689Skan  result = emit_store_flag (target, code, op0, op1,
921918334Speter			    operand_mode, unsignedp, 1);
922018334Speter
922118334Speter  if (result)
922218334Speter    {
922318334Speter      if (invert)
922418334Speter	result = expand_binop (mode, xor_optab, result, const1_rtx,
922518334Speter			       result, 0, OPTAB_LIB_WIDEN);
922618334Speter      return result;
922718334Speter    }
922818334Speter
922918334Speter  /* If this failed, we have to do this with set/compare/jump/set code.  */
9230169689Skan  if (!REG_P (target)
923118334Speter      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
923218334Speter    target = gen_reg_rtx (GET_MODE (target));
923318334Speter
923418334Speter  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
923518334Speter  result = compare_from_rtx (op0, op1, code, unsignedp,
923690075Sobrien			     operand_mode, NULL_RTX);
923718334Speter  if (GET_CODE (result) == CONST_INT)
923818334Speter    return (((result == const0_rtx && ! invert)
923918334Speter	     || (result != const0_rtx && invert))
924018334Speter	    ? const0_rtx : const1_rtx);
924118334Speter
924290075Sobrien  /* The code of RESULT may not match CODE if compare_from_rtx
924390075Sobrien     decided to swap its operands and reverse the original code.
924490075Sobrien
924590075Sobrien     We know that compare_from_rtx returns either a CONST_INT or
924690075Sobrien     a new comparison code, so it is safe to just extract the
924790075Sobrien     code from RESULT.  */
924890075Sobrien  code = GET_CODE (result);
924990075Sobrien
925018334Speter  label = gen_label_rtx ();
9251169689Skan  gcc_assert (bcc_gen_fctn[(int) code]);
925218334Speter
925318334Speter  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
925418334Speter  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
925518334Speter  emit_label (label);
925618334Speter
925718334Speter  return target;
925818334Speter}
925918334Speter
926018334Speter
926190075Sobrien/* Stubs in case we haven't got a casesi insn.  */
926290075Sobrien#ifndef HAVE_casesi
926390075Sobrien# define HAVE_casesi 0
926490075Sobrien# define gen_casesi(a, b, c, d, e) (0)
926590075Sobrien# define CODE_FOR_casesi CODE_FOR_nothing
926690075Sobrien#endif
926718334Speter
926890075Sobrien/* If the machine does not have a case insn that compares the bounds,
926990075Sobrien   this means extra overhead for dispatch tables, which raises the
927090075Sobrien   threshold for using them.  */
927190075Sobrien#ifndef CASE_VALUES_THRESHOLD
927290075Sobrien#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
927390075Sobrien#endif /* CASE_VALUES_THRESHOLD */
927490075Sobrien
927590075Sobrienunsigned int
9276132718Skancase_values_threshold (void)
927790075Sobrien{
927890075Sobrien  return CASE_VALUES_THRESHOLD;
927990075Sobrien}
928090075Sobrien
928190075Sobrien/* Attempt to generate a casesi instruction.  Returns 1 if successful,
928290075Sobrien   0 otherwise (i.e. if there is no casesi instruction).  */
928390075Sobrienint
9284132718Skantry_casesi (tree index_type, tree index_expr, tree minval, tree range,
9285132718Skan	    rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
928690075Sobrien{
928790075Sobrien  enum machine_mode index_mode = SImode;
928890075Sobrien  int index_bits = GET_MODE_BITSIZE (index_mode);
928990075Sobrien  rtx op1, op2, index;
929090075Sobrien  enum machine_mode op_mode;
929190075Sobrien
929290075Sobrien  if (! HAVE_casesi)
929390075Sobrien    return 0;
929490075Sobrien
929590075Sobrien  /* Convert the index to SImode.  */
929690075Sobrien  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
929790075Sobrien    {
929890075Sobrien      enum machine_mode omode = TYPE_MODE (index_type);
9299169689Skan      rtx rangertx = expand_normal (range);
930090075Sobrien
930190075Sobrien      /* We must handle the endpoints in the original mode.  */
9302169689Skan      index_expr = build2 (MINUS_EXPR, index_type,
9303169689Skan			   index_expr, minval);
930490075Sobrien      minval = integer_zero_node;
9305169689Skan      index = expand_normal (index_expr);
930690075Sobrien      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
930790075Sobrien			       omode, 1, default_label);
930890075Sobrien      /* Now we can safely truncate.  */
930990075Sobrien      index = convert_to_mode (index_mode, index, 0);
931090075Sobrien    }
931190075Sobrien  else
931290075Sobrien    {
931390075Sobrien      if (TYPE_MODE (index_type) != index_mode)
931490075Sobrien	{
9315169689Skan	  index_type = lang_hooks.types.type_for_size (index_bits, 0);
9316169689Skan	  index_expr = fold_convert (index_type, index_expr);
931790075Sobrien	}
931890075Sobrien
9319169689Skan      index = expand_normal (index_expr);
932090075Sobrien    }
9321169689Skan
932290075Sobrien  do_pending_stack_adjust ();
932390075Sobrien
932490075Sobrien  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
932590075Sobrien  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
932690075Sobrien      (index, op_mode))
932790075Sobrien    index = copy_to_mode_reg (op_mode, index);
932890075Sobrien
9329169689Skan  op1 = expand_normal (minval);
933090075Sobrien
933190075Sobrien  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
933290075Sobrien  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9333169689Skan		       op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
933490075Sobrien  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
933590075Sobrien      (op1, op_mode))
933690075Sobrien    op1 = copy_to_mode_reg (op_mode, op1);
933790075Sobrien
9338169689Skan  op2 = expand_normal (range);
933990075Sobrien
934090075Sobrien  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
934190075Sobrien  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9342169689Skan		       op2, TYPE_UNSIGNED (TREE_TYPE (range)));
934390075Sobrien  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
934490075Sobrien      (op2, op_mode))
934590075Sobrien    op2 = copy_to_mode_reg (op_mode, op2);
934690075Sobrien
934790075Sobrien  emit_jump_insn (gen_casesi (index, op1, op2,
934890075Sobrien			      table_label, default_label));
934990075Sobrien  return 1;
935090075Sobrien}
935190075Sobrien
935290075Sobrien/* Attempt to generate a tablejump instruction; same concept.  */
935390075Sobrien#ifndef HAVE_tablejump
935490075Sobrien#define HAVE_tablejump 0
935590075Sobrien#define gen_tablejump(x, y) (0)
935690075Sobrien#endif
935790075Sobrien
935890075Sobrien/* Subroutine of the next function.
935990075Sobrien
936090075Sobrien   INDEX is the value being switched on, with the lowest value
936118334Speter   in the table already subtracted.
936218334Speter   MODE is its expected mode (needed if INDEX is constant).
936318334Speter   RANGE is the length of the jump table.
936418334Speter   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
936518334Speter
936618334Speter   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
936718334Speter   index value is out of range.  */
936818334Speter
936990075Sobrienstatic void
9370132718Skando_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9371132718Skan	      rtx default_label)
937218334Speter{
937390075Sobrien  rtx temp, vector;
937418334Speter
9375117395Skan  if (INTVAL (range) > cfun->max_jumptable_ents)
9376117395Skan    cfun->max_jumptable_ents = INTVAL (range);
9377117395Skan
937818334Speter  /* Do an unsigned comparison (in the proper mode) between the index
937918334Speter     expression and the value which represents the length of the range.
938018334Speter     Since we just finished subtracting the lower bound of the range
938118334Speter     from the index expression, this comparison allows us to simultaneously
938218334Speter     check that the original index expression value is both greater than
938318334Speter     or equal to the minimum value of the range and less than or equal to
938418334Speter     the maximum value of the range.  */
938518334Speter
938652284Sobrien  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
938790075Sobrien			   default_label);
938818334Speter
938918334Speter  /* If index is in range, it must fit in Pmode.
939018334Speter     Convert to Pmode so we can index with it.  */
939118334Speter  if (mode != Pmode)
939218334Speter    index = convert_to_mode (Pmode, index, 1);
939318334Speter
9394132718Skan  /* Don't let a MEM slip through, because then INDEX that comes
939518334Speter     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
939618334Speter     and break_out_memory_refs will go to work on it and mess it up.  */
939718334Speter#ifdef PIC_CASE_VECTOR_ADDRESS
9398169689Skan  if (flag_pic && !REG_P (index))
939918334Speter    index = copy_to_mode_reg (Pmode, index);
940018334Speter#endif
940118334Speter
940218334Speter  /* If flag_force_addr were to affect this address
940318334Speter     it could interfere with the tricky assumptions made
940418334Speter     about addresses that contain label-refs,
940518334Speter     which may be valid only very near the tablejump itself.  */
940618334Speter  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
940718334Speter     GET_MODE_SIZE, because this indicates how large insns are.  The other
940818334Speter     uses should all be Pmode, because they are addresses.  This code
940918334Speter     could fail if addresses and insns are not the same size.  */
941050397Sobrien  index = gen_rtx_PLUS (Pmode,
941150397Sobrien			gen_rtx_MULT (Pmode, index,
941250397Sobrien				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
941350397Sobrien			gen_rtx_LABEL_REF (Pmode, table_label));
941418334Speter#ifdef PIC_CASE_VECTOR_ADDRESS
941518334Speter  if (flag_pic)
941618334Speter    index = PIC_CASE_VECTOR_ADDRESS (index);
941718334Speter  else
941818334Speter#endif
941918334Speter    index = memory_address_noforce (CASE_VECTOR_MODE, index);
942018334Speter  temp = gen_reg_rtx (CASE_VECTOR_MODE);
9421169689Skan  vector = gen_const_mem (CASE_VECTOR_MODE, index);
942218334Speter  convert_move (temp, vector, 0);
942318334Speter
942418334Speter  emit_jump_insn (gen_tablejump (temp, table_label));
942518334Speter
942618334Speter  /* If we are generating PIC code or if the table is PC-relative, the
942718334Speter     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
942850397Sobrien  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
942918334Speter    emit_barrier ();
943018334Speter}
943118334Speter
943290075Sobrienint
9433132718Skantry_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9434132718Skan	       rtx table_label, rtx default_label)
943590075Sobrien{
943690075Sobrien  rtx index;
943790075Sobrien
943890075Sobrien  if (! HAVE_tablejump)
943990075Sobrien    return 0;
944090075Sobrien
9441169689Skan  index_expr = fold_build2 (MINUS_EXPR, index_type,
9442169689Skan			    fold_convert (index_type, index_expr),
9443169689Skan			    fold_convert (index_type, minval));
9444169689Skan  index = expand_normal (index_expr);
944590075Sobrien  do_pending_stack_adjust ();
944690075Sobrien
944790075Sobrien  do_tablejump (index, TYPE_MODE (index_type),
944890075Sobrien		convert_modes (TYPE_MODE (index_type),
944990075Sobrien			       TYPE_MODE (TREE_TYPE (range)),
9450169689Skan			       expand_normal (range),
9451169689Skan			       TYPE_UNSIGNED (TREE_TYPE (range))),
945290075Sobrien		table_label, default_label);
945390075Sobrien  return 1;
945490075Sobrien}
9455117395Skan
9456117395Skan/* Nonzero if the mode is a valid vector mode for this architecture.
9457117395Skan   This returns nonzero even if there is no hardware support for the
9458117395Skan   vector mode, but we can emulate with narrower modes.  */
9459117395Skan
9460117395Skanint
9461132718Skanvector_mode_valid_p (enum machine_mode mode)
9462117395Skan{
9463117395Skan  enum mode_class class = GET_MODE_CLASS (mode);
9464117395Skan  enum machine_mode innermode;
9465117395Skan
9466117395Skan  /* Doh!  What's going on?  */
9467117395Skan  if (class != MODE_VECTOR_INT
9468117395Skan      && class != MODE_VECTOR_FLOAT)
9469117395Skan    return 0;
9470117395Skan
9471117395Skan  /* Hardware support.  Woo hoo!  */
9472169689Skan  if (targetm.vector_mode_supported_p (mode))
9473117395Skan    return 1;
9474117395Skan
9475117395Skan  innermode = GET_MODE_INNER (mode);
9476117395Skan
9477117395Skan  /* We should probably return 1 if requesting V4DI and we have no DI,
9478117395Skan     but we have V2DI, but this is probably very unlikely.  */
9479117395Skan
9480117395Skan  /* If we have support for the inner mode, we can safely emulate it.
9481117395Skan     We may not have V2DI, but me can emulate with a pair of DIs.  */
9482169689Skan  return targetm.scalar_mode_supported_p (innermode);
9483117395Skan}
9484117395Skan
9485119256Skan/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
9486119256Skanstatic rtx
9487132718Skanconst_vector_from_tree (tree exp)
9488119256Skan{
9489119256Skan  rtvec v;
9490119256Skan  int units, i;
9491119256Skan  tree link, elt;
9492119256Skan  enum machine_mode inner, mode;
9493119256Skan
9494119256Skan  mode = TYPE_MODE (TREE_TYPE (exp));
9495119256Skan
9496169689Skan  if (initializer_zerop (exp))
9497119256Skan    return CONST0_RTX (mode);
9498119256Skan
9499119256Skan  units = GET_MODE_NUNITS (mode);
9500119256Skan  inner = GET_MODE_INNER (mode);
9501119256Skan
9502119256Skan  v = rtvec_alloc (units);
9503119256Skan
9504119256Skan  link = TREE_VECTOR_CST_ELTS (exp);
9505119256Skan  for (i = 0; link; link = TREE_CHAIN (link), ++i)
9506119256Skan    {
9507119256Skan      elt = TREE_VALUE (link);
9508119256Skan
9509119256Skan      if (TREE_CODE (elt) == REAL_CST)
9510119256Skan	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9511119256Skan							 inner);
9512119256Skan      else
9513119256Skan	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9514119256Skan					       TREE_INT_CST_HIGH (elt),
9515119256Skan					       inner);
9516119256Skan    }
9517119256Skan
9518132718Skan  /* Initialize remaining elements to 0.  */
9519132718Skan  for (; i < units; ++i)
9520132718Skan    RTVEC_ELT (v, i) = CONST0_RTX (inner);
9521132718Skan
9522169689Skan  return gen_rtx_CONST_VECTOR (mode, v);
9523119256Skan}
9524117395Skan#include "gt-expr.h"
9525