1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4   Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "machmode.h"
28#include "real.h"
29#include "rtl.h"
30#include "tree.h"
31#include "flags.h"
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "except.h"
35#include "function.h"
36#include "insn-config.h"
37#include "insn-attr.h"
38/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
39#include "expr.h"
40#include "optabs.h"
41#include "libfuncs.h"
42#include "recog.h"
43#include "reload.h"
44#include "output.h"
45#include "typeclass.h"
46#include "toplev.h"
47#include "ggc.h"
48#include "langhooks.h"
49#include "intl.h"
50#include "tm_p.h"
51#include "tree-iterator.h"
52#include "tree-pass.h"
53#include "tree-flow.h"
54#include "target.h"
55#include "timevar.h"
56
57/* Decide whether a function's arguments should be processed
58   from first to last or from last to first.
59
60   They should if the stack and args grow in opposite directions, but
61   only if we have push insns.  */
62
63#ifdef PUSH_ROUNDING
64
65#ifndef PUSH_ARGS_REVERSED
66#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
68#endif
69#endif
70
71#endif
72
73#ifndef STACK_PUSH_CODE
74#ifdef STACK_GROWS_DOWNWARD
75#define STACK_PUSH_CODE PRE_DEC
76#else
77#define STACK_PUSH_CODE PRE_INC
78#endif
79#endif
80
81
82/* If this is nonzero, we do not bother generating VOLATILE
83   around volatile memory references, and we are willing to
84   output indirect addresses.  If cse is to follow, we reject
85   indirect addresses so a useful potential cse is generated;
86   if it is used only once, instruction combination will produce
87   the same indirect address eventually.  */
88int cse_not_expected;
89
90/* This structure is used by move_by_pieces to describe the move to
91   be performed.  */
92struct move_by_pieces
93{
94  rtx to;
95  rtx to_addr;
96  int autinc_to;
97  int explicit_inc_to;
98  rtx from;
99  rtx from_addr;
100  int autinc_from;
101  int explicit_inc_from;
102  unsigned HOST_WIDE_INT len;
103  HOST_WIDE_INT offset;
104  int reverse;
105};
106
107/* This structure is used by store_by_pieces to describe the clear to
108   be performed.  */
109
110struct store_by_pieces
111{
112  rtx to;
113  rtx to_addr;
114  int autinc_to;
115  int explicit_inc_to;
116  unsigned HOST_WIDE_INT len;
117  HOST_WIDE_INT offset;
118  rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119  void *constfundata;
120  int reverse;
121};
122
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124						     unsigned int,
125						     unsigned int);
126static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127			      struct move_by_pieces *);
128static bool block_move_libcall_safe_for_call_parm (void);
129static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131static tree emit_block_move_libcall_fn (int);
132static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137			       struct store_by_pieces *);
138static rtx clear_storage_via_libcall (rtx, rtx, bool);
139static tree clear_storage_libcall_fn (int);
140static rtx compress_float_constant (rtx, rtx);
141static rtx get_subtarget (rtx);
142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143				     HOST_WIDE_INT, enum machine_mode,
144				     tree, tree, int, int);
145static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147			tree, tree, int);
148
149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150
151static int is_aligning_offset (tree, tree);
152static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153			     enum expand_modifier);
154static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156#ifdef PUSH_ROUNDING
157static void emit_single_push_insn (enum machine_mode, rtx, tree);
158#endif
159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160static rtx const_vector_from_tree (tree);
161static void write_complex_part (rtx, rtx, bool);
162
163/* Record for each mode whether we can move a register directly to or
164   from an object of that mode in memory.  If we can't, we won't try
165   to use that mode directly when accessing a field of that mode.  */
166
167static char direct_load[NUM_MACHINE_MODES];
168static char direct_store[NUM_MACHINE_MODES];
169
170/* Record for each mode whether we can float-extend from memory.  */
171
172static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174/* This macro is used to determine whether move_by_pieces should be called
175   to perform a structure copy.  */
176#ifndef MOVE_BY_PIECES_P
177#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178  (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179   < (unsigned int) MOVE_RATIO)
180#endif
181
182/* This macro is used to determine whether clear_by_pieces should be
183   called to clear storage.  */
184#ifndef CLEAR_BY_PIECES_P
185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187   < (unsigned int) CLEAR_RATIO)
188#endif
189
190/* This macro is used to determine whether store_by_pieces should be
191   called to "memset" storage with byte values other than zero, or
192   to "memcpy" storage when the source is a constant string.  */
193#ifndef STORE_BY_PIECES_P
194#define STORE_BY_PIECES_P(SIZE, ALIGN) \
195  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196   < (unsigned int) MOVE_RATIO)
197#endif
198
199/* This array records the insn_code of insns to perform block moves.  */
200enum insn_code movmem_optab[NUM_MACHINE_MODES];
201
202/* This array records the insn_code of insns to perform block sets.  */
203enum insn_code setmem_optab[NUM_MACHINE_MODES];
204
205/* These arrays record the insn_code of three different kinds of insns
206   to perform block compares.  */
207enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
211/* Synchronization primitives.  */
212enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
235/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
236
237#ifndef SLOW_UNALIGNED_ACCESS
238#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239#endif
240
241/* This is run once per compilation to set up which modes can be used
242   directly in memory and to initialize the block move optab.  */
243
244void
245init_expr_once (void)
246{
247  rtx insn, pat;
248  enum machine_mode mode;
249  int num_clobbers;
250  rtx mem, mem1;
251  rtx reg;
252
253  /* Try indexing by frame ptr and try by stack ptr.
254     It is known that on the Convex the stack ptr isn't a valid index.
255     With luck, one or the other is valid on any machine.  */
256  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258
259  /* A scratch register we can modify in-place below to avoid
260     useless RTL allocations.  */
261  reg = gen_rtx_REG (VOIDmode, -1);
262
263  insn = rtx_alloc (INSN);
264  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265  PATTERN (insn) = pat;
266
267  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268       mode = (enum machine_mode) ((int) mode + 1))
269    {
270      int regno;
271
272      direct_load[(int) mode] = direct_store[(int) mode] = 0;
273      PUT_MODE (mem, mode);
274      PUT_MODE (mem1, mode);
275      PUT_MODE (reg, mode);
276
277      /* See if there is some register that can be used in this mode and
278	 directly loaded or stored from memory.  */
279
280      if (mode != VOIDmode && mode != BLKmode)
281	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283	     regno++)
284	  {
285	    if (! HARD_REGNO_MODE_OK (regno, mode))
286	      continue;
287
288	    REGNO (reg) = regno;
289
290	    SET_SRC (pat) = mem;
291	    SET_DEST (pat) = reg;
292	    if (recog (pat, insn, &num_clobbers) >= 0)
293	      direct_load[(int) mode] = 1;
294
295	    SET_SRC (pat) = mem1;
296	    SET_DEST (pat) = reg;
297	    if (recog (pat, insn, &num_clobbers) >= 0)
298	      direct_load[(int) mode] = 1;
299
300	    SET_SRC (pat) = reg;
301	    SET_DEST (pat) = mem;
302	    if (recog (pat, insn, &num_clobbers) >= 0)
303	      direct_store[(int) mode] = 1;
304
305	    SET_SRC (pat) = reg;
306	    SET_DEST (pat) = mem1;
307	    if (recog (pat, insn, &num_clobbers) >= 0)
308	      direct_store[(int) mode] = 1;
309	  }
310    }
311
312  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315       mode = GET_MODE_WIDER_MODE (mode))
316    {
317      enum machine_mode srcmode;
318      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319	   srcmode = GET_MODE_WIDER_MODE (srcmode))
320	{
321	  enum insn_code ic;
322
323	  ic = can_extend_p (mode, srcmode, 0);
324	  if (ic == CODE_FOR_nothing)
325	    continue;
326
327	  PUT_MODE (mem, srcmode);
328
329	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330	    float_extend_from_mem[mode][srcmode] = true;
331	}
332    }
333}
334
335/* This is run at the start of compiling a function.  */
336
337void
338init_expr (void)
339{
340  cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341}
342
343/* Copy data from FROM to TO, where the machine modes are not the same.
344   Both modes may be integer, or both may be floating.
345   UNSIGNEDP should be nonzero if FROM is an unsigned type.
346   This causes zero-extension instead of sign-extension.  */
347
348void
349convert_move (rtx to, rtx from, int unsignedp)
350{
351  enum machine_mode to_mode = GET_MODE (to);
352  enum machine_mode from_mode = GET_MODE (from);
353  int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354  int from_real = SCALAR_FLOAT_MODE_P (from_mode);
355  enum insn_code code;
356  rtx libcall;
357
358  /* rtx code for making an equivalent value.  */
359  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361
362
363  gcc_assert (to_real == from_real);
364
365  /* If the source and destination are already the same, then there's
366     nothing to do.  */
367  if (to == from)
368    return;
369
370  /* If FROM is a SUBREG that indicates that we have already done at least
371     the required extension, strip it.  We don't handle such SUBREGs as
372     TO here.  */
373
374  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376	  >= GET_MODE_SIZE (to_mode))
377      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378    from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
380  gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381
382  if (to_mode == from_mode
383      || (from_mode == VOIDmode && CONSTANT_P (from)))
384    {
385      emit_move_insn (to, from);
386      return;
387    }
388
389  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390    {
391      gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392
393      if (VECTOR_MODE_P (to_mode))
394	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395      else
396	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397
398      emit_move_insn (to, from);
399      return;
400    }
401
402  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403    {
404      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406      return;
407    }
408
409  if (to_real)
410    {
411      rtx value, insns;
412      convert_optab tab;
413
414      gcc_assert ((GET_MODE_PRECISION (from_mode)
415		   != GET_MODE_PRECISION (to_mode))
416		  || (DECIMAL_FLOAT_MODE_P (from_mode)
417		      != DECIMAL_FLOAT_MODE_P (to_mode)));
418
419      if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420	/* Conversion between decimal float and binary float, same size.  */
421	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422      else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423	tab = sext_optab;
424      else
425	tab = trunc_optab;
426
427      /* Try converting directly if the insn is supported.  */
428
429      code = tab->handlers[to_mode][from_mode].insn_code;
430      if (code != CODE_FOR_nothing)
431	{
432	  emit_unop_insn (code, to, from,
433			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434	  return;
435	}
436
437      /* Otherwise use a libcall.  */
438      libcall = tab->handlers[to_mode][from_mode].libfunc;
439
440      /* Is this conversion implemented yet?  */
441      gcc_assert (libcall);
442
443      start_sequence ();
444      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445				       1, from, from_mode);
446      insns = get_insns ();
447      end_sequence ();
448      emit_libcall_block (insns, to, value,
449			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450								       from)
451			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
452      return;
453    }
454
455  /* Handle pointer conversion.  */			/* SPEE 900220.  */
456  /* Targets are expected to provide conversion insns between PxImode and
457     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
458  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459    {
460      enum machine_mode full_mode
461	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462
463      gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464		  != CODE_FOR_nothing);
465
466      if (full_mode != from_mode)
467	from = convert_to_mode (full_mode, from, unsignedp);
468      emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469		      to, from, UNKNOWN);
470      return;
471    }
472  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473    {
474      rtx new_from;
475      enum machine_mode full_mode
476	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477
478      gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479		  != CODE_FOR_nothing);
480
481      if (to_mode == full_mode)
482	{
483	  emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484			  to, from, UNKNOWN);
485	  return;
486	}
487
488      new_from = gen_reg_rtx (full_mode);
489      emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490		      new_from, from, UNKNOWN);
491
492      /* else proceed to integer conversions below.  */
493      from_mode = full_mode;
494      from = new_from;
495    }
496
497  /* Now both modes are integers.  */
498
499  /* Handle expanding beyond a word.  */
500  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502    {
503      rtx insns;
504      rtx lowpart;
505      rtx fill_value;
506      rtx lowfrom;
507      int i;
508      enum machine_mode lowpart_mode;
509      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510
511      /* Try converting directly if the insn is supported.  */
512      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513	  != CODE_FOR_nothing)
514	{
515	  /* If FROM is a SUBREG, put it into a register.  Do this
516	     so that we always generate the same set of insns for
517	     better cse'ing; if an intermediate assignment occurred,
518	     we won't be doing the operation directly on the SUBREG.  */
519	  if (optimize > 0 && GET_CODE (from) == SUBREG)
520	    from = force_reg (from_mode, from);
521	  emit_unop_insn (code, to, from, equiv_code);
522	  return;
523	}
524      /* Next, try converting via full word.  */
525      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527		   != CODE_FOR_nothing))
528	{
529	  if (REG_P (to))
530	    {
531	      if (reg_overlap_mentioned_p (to, from))
532		from = force_reg (from_mode, from);
533	      emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534	    }
535	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536	  emit_unop_insn (code, to,
537			  gen_lowpart (word_mode, to), equiv_code);
538	  return;
539	}
540
541      /* No special multiword conversion insn; do it by hand.  */
542      start_sequence ();
543
544      /* Since we will turn this into a no conflict block, we must ensure
545	 that the source does not overlap the target.  */
546
547      if (reg_overlap_mentioned_p (to, from))
548	from = force_reg (from_mode, from);
549
550      /* Get a copy of FROM widened to a word, if necessary.  */
551      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552	lowpart_mode = word_mode;
553      else
554	lowpart_mode = from_mode;
555
556      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557
558      lowpart = gen_lowpart (lowpart_mode, to);
559      emit_move_insn (lowpart, lowfrom);
560
561      /* Compute the value to put in each remaining word.  */
562      if (unsignedp)
563	fill_value = const0_rtx;
564      else
565	{
566#ifdef HAVE_slt
567	  if (HAVE_slt
568	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569	      && STORE_FLAG_VALUE == -1)
570	    {
571	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572			     lowpart_mode, 0);
573	      fill_value = gen_reg_rtx (word_mode);
574	      emit_insn (gen_slt (fill_value));
575	    }
576	  else
577#endif
578	    {
579	      fill_value
580		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582				NULL_RTX, 0);
583	      fill_value = convert_to_mode (word_mode, fill_value, 1);
584	    }
585	}
586
587      /* Fill the remaining words.  */
588      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589	{
590	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591	  rtx subword = operand_subword (to, index, 1, to_mode);
592
593	  gcc_assert (subword);
594
595	  if (fill_value != subword)
596	    emit_move_insn (subword, fill_value);
597	}
598
599      insns = get_insns ();
600      end_sequence ();
601
602      emit_no_conflict_block (insns, to, from, NULL_RTX,
603			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604      return;
605    }
606
607  /* Truncating multi-word to a word or less.  */
608  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610    {
611      if (!((MEM_P (from)
612	     && ! MEM_VOLATILE_P (from)
613	     && direct_load[(int) to_mode]
614	     && ! mode_dependent_address_p (XEXP (from, 0)))
615	    || REG_P (from)
616	    || GET_CODE (from) == SUBREG))
617	from = force_reg (from_mode, from);
618      convert_move (to, gen_lowpart (word_mode, from), 0);
619      return;
620    }
621
622  /* Now follow all the conversions between integers
623     no more than a word long.  */
624
625  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
626  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628				GET_MODE_BITSIZE (from_mode)))
629    {
630      if (!((MEM_P (from)
631	     && ! MEM_VOLATILE_P (from)
632	     && direct_load[(int) to_mode]
633	     && ! mode_dependent_address_p (XEXP (from, 0)))
634	    || REG_P (from)
635	    || GET_CODE (from) == SUBREG))
636	from = force_reg (from_mode, from);
637      if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639	from = copy_to_reg (from);
640      emit_move_insn (to, gen_lowpart (to_mode, from));
641      return;
642    }
643
644  /* Handle extension.  */
645  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646    {
647      /* Convert directly if that works.  */
648      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649	  != CODE_FOR_nothing)
650	{
651	  emit_unop_insn (code, to, from, equiv_code);
652	  return;
653	}
654      else
655	{
656	  enum machine_mode intermediate;
657	  rtx tmp;
658	  tree shift_amount;
659
660	  /* Search for a mode to convert via.  */
661	  for (intermediate = from_mode; intermediate != VOIDmode;
662	       intermediate = GET_MODE_WIDER_MODE (intermediate))
663	    if (((can_extend_p (to_mode, intermediate, unsignedp)
664		  != CODE_FOR_nothing)
665		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667					       GET_MODE_BITSIZE (intermediate))))
668		&& (can_extend_p (intermediate, from_mode, unsignedp)
669		    != CODE_FOR_nothing))
670	      {
671		convert_move (to, convert_to_mode (intermediate, from,
672						   unsignedp), unsignedp);
673		return;
674	      }
675
676	  /* No suitable intermediate mode.
677	     Generate what we need with	shifts.  */
678	  shift_amount = build_int_cst (NULL_TREE,
679					GET_MODE_BITSIZE (to_mode)
680					- GET_MODE_BITSIZE (from_mode));
681	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
682	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683			      to, unsignedp);
684	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685			      to, unsignedp);
686	  if (tmp != to)
687	    emit_move_insn (to, tmp);
688	  return;
689	}
690    }
691
692  /* Support special truncate insns for certain modes.  */
693  if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694    {
695      emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696		      to, from, UNKNOWN);
697      return;
698    }
699
700  /* Handle truncation of volatile memrefs, and so on;
701     the things that couldn't be truncated directly,
702     and for which there was no special instruction.
703
704     ??? Code above formerly short-circuited this, for most integer
705     mode pairs, with a force_reg in from_mode followed by a recursive
706     call to this routine.  Appears always to have been wrong.  */
707  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708    {
709      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710      emit_move_insn (to, temp);
711      return;
712    }
713
714  /* Mode combination is not recognized.  */
715  gcc_unreachable ();
716}
717
718/* Return an rtx for a value that would result
719   from converting X to mode MODE.
720   Both X and MODE may be floating, or both integer.
721   UNSIGNEDP is nonzero if X is an unsigned value.
722   This can be done by referring to a part of X in place
723   or by copying to a new temporary with conversion.  */
724
725rtx
726convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727{
728  return convert_modes (mode, VOIDmode, x, unsignedp);
729}
730
731/* Return an rtx for a value that would result
732   from converting X from mode OLDMODE to mode MODE.
733   Both modes may be floating, or both integer.
734   UNSIGNEDP is nonzero if X is an unsigned value.
735
736   This can be done by referring to a part of X in place
737   or by copying to a new temporary with conversion.
738
739   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
740
741rtx
742convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743{
744  rtx temp;
745
746  /* If FROM is a SUBREG that indicates that we have already done at least
747     the required extension, strip it.  */
748
749  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752    x = gen_lowpart (mode, x);
753
754  if (GET_MODE (x) != VOIDmode)
755    oldmode = GET_MODE (x);
756
757  if (mode == oldmode)
758    return x;
759
760  /* There is one case that we must handle specially: If we are converting
761     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762     we are to interpret the constant as unsigned, gen_lowpart will do
763     the wrong if the constant appears negative.  What we want to do is
764     make the high-order word of the constant zero, not all ones.  */
765
766  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769    {
770      HOST_WIDE_INT val = INTVAL (x);
771
772      if (oldmode != VOIDmode
773	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774	{
775	  int width = GET_MODE_BITSIZE (oldmode);
776
777	  /* We need to zero extend VAL.  */
778	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
779	}
780
781      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782    }
783
784  /* We can do this with a gen_lowpart if both desired and current modes
785     are integer, and this is either a constant integer, a register, or a
786     non-volatile MEM.  Except for the constant case where MODE is no
787     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
788
789  if ((GET_CODE (x) == CONST_INT
790       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791      || (GET_MODE_CLASS (mode) == MODE_INT
792	  && GET_MODE_CLASS (oldmode) == MODE_INT
793	  && (GET_CODE (x) == CONST_DOUBLE
794	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796		       && direct_load[(int) mode])
797		      || (REG_P (x)
798			  && (! HARD_REGISTER_P (x)
799			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
800			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
802    {
803      /* ?? If we don't know OLDMODE, we have to assume here that
804	 X does not need sign- or zero-extension.   This may not be
805	 the case, but it's the best we can do.  */
806      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808	{
809	  HOST_WIDE_INT val = INTVAL (x);
810	  int width = GET_MODE_BITSIZE (oldmode);
811
812	  /* We must sign or zero-extend in this case.  Start by
813	     zero-extending, then sign extend if we need to.  */
814	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
815	  if (! unsignedp
816	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817	    val |= (HOST_WIDE_INT) (-1) << width;
818
819	  return gen_int_mode (val, mode);
820	}
821
822      return gen_lowpart (mode, x);
823    }
824
825  /* Converting from integer constant into mode is always equivalent to an
826     subreg operation.  */
827  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828    {
829      gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830      return simplify_gen_subreg (mode, x, oldmode, 0);
831    }
832
833  temp = gen_reg_rtx (mode);
834  convert_move (temp, x, unsignedp);
835  return temp;
836}
837
838/* STORE_MAX_PIECES is the number of bytes at a time that we can
839   store efficiently.  Due to internal GCC limitations, this is
840   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841   for an immediate constant.  */
842
843#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844
845/* Determine whether the LEN bytes can be moved by using several move
846   instructions.  Return nonzero if a call to move_by_pieces should
847   succeed.  */
848
849int
850can_move_by_pieces (unsigned HOST_WIDE_INT len,
851		    unsigned int align ATTRIBUTE_UNUSED)
852{
853  return MOVE_BY_PIECES_P (len, align);
854}
855
856/* Generate several move instructions to copy LEN bytes from block FROM to
857   block TO.  (These are MEM rtx's with BLKmode).
858
859   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860   used to push FROM to the stack.
861
862   ALIGN is maximum stack alignment we can assume.
863
864   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866   stpcpy.  */
867
868rtx
869move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870		unsigned int align, int endp)
871{
872  struct move_by_pieces data;
873  rtx to_addr, from_addr = XEXP (from, 0);
874  unsigned int max_size = MOVE_MAX_PIECES + 1;
875  enum machine_mode mode = VOIDmode, tmode;
876  enum insn_code icode;
877
878  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879
880  data.offset = 0;
881  data.from_addr = from_addr;
882  if (to)
883    {
884      to_addr = XEXP (to, 0);
885      data.to = to;
886      data.autinc_to
887	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889      data.reverse
890	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891    }
892  else
893    {
894      to_addr = NULL_RTX;
895      data.to = NULL_RTX;
896      data.autinc_to = 1;
897#ifdef STACK_GROWS_DOWNWARD
898      data.reverse = 1;
899#else
900      data.reverse = 0;
901#endif
902    }
903  data.to_addr = to_addr;
904  data.from = from;
905  data.autinc_from
906    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907       || GET_CODE (from_addr) == POST_INC
908       || GET_CODE (from_addr) == POST_DEC);
909
910  data.explicit_inc_from = 0;
911  data.explicit_inc_to = 0;
912  if (data.reverse) data.offset = len;
913  data.len = len;
914
915  /* If copying requires more than two move insns,
916     copy addresses to registers (to make displacements shorter)
917     and use post-increment if available.  */
918  if (!(data.autinc_from && data.autinc_to)
919      && move_by_pieces_ninsns (len, align, max_size) > 2)
920    {
921      /* Find the mode of the largest move...  */
922      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924	if (GET_MODE_SIZE (tmode) < max_size)
925	  mode = tmode;
926
927      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928	{
929	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930	  data.autinc_from = 1;
931	  data.explicit_inc_from = -1;
932	}
933      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934	{
935	  data.from_addr = copy_addr_to_reg (from_addr);
936	  data.autinc_from = 1;
937	  data.explicit_inc_from = 1;
938	}
939      if (!data.autinc_from && CONSTANT_P (from_addr))
940	data.from_addr = copy_addr_to_reg (from_addr);
941      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942	{
943	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944	  data.autinc_to = 1;
945	  data.explicit_inc_to = -1;
946	}
947      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948	{
949	  data.to_addr = copy_addr_to_reg (to_addr);
950	  data.autinc_to = 1;
951	  data.explicit_inc_to = 1;
952	}
953      if (!data.autinc_to && CONSTANT_P (to_addr))
954	data.to_addr = copy_addr_to_reg (to_addr);
955    }
956
957  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958  if (align >= GET_MODE_ALIGNMENT (tmode))
959    align = GET_MODE_ALIGNMENT (tmode);
960  else
961    {
962      enum machine_mode xmode;
963
964      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965	   tmode != VOIDmode;
966	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968	    || SLOW_UNALIGNED_ACCESS (tmode, align))
969	  break;
970
971      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972    }
973
974  /* First move what we can in the largest integer mode, then go to
975     successively smaller modes.  */
976
977  while (max_size > 1)
978    {
979      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981	if (GET_MODE_SIZE (tmode) < max_size)
982	  mode = tmode;
983
984      if (mode == VOIDmode)
985	break;
986
987      icode = mov_optab->handlers[(int) mode].insn_code;
988      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990
991      max_size = GET_MODE_SIZE (mode);
992    }
993
994  /* The code above should have handled everything.  */
995  gcc_assert (!data.len);
996
997  if (endp)
998    {
999      rtx to1;
1000
1001      gcc_assert (!data.reverse);
1002      if (data.autinc_to)
1003	{
1004	  if (endp == 2)
1005	    {
1006	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008	      else
1009		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010								-1));
1011	    }
1012	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013					   data.offset);
1014	}
1015      else
1016	{
1017	  if (endp == 2)
1018	    --data.offset;
1019	  to1 = adjust_address (data.to, QImode, data.offset);
1020	}
1021      return to1;
1022    }
1023  else
1024    return data.to;
1025}
1026
1027/* Return number of insns required to move L bytes by pieces.
1028   ALIGN (in bits) is maximum alignment we can assume.  */
1029
1030static unsigned HOST_WIDE_INT
1031move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032		       unsigned int max_size)
1033{
1034  unsigned HOST_WIDE_INT n_insns = 0;
1035  enum machine_mode tmode;
1036
1037  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038  if (align >= GET_MODE_ALIGNMENT (tmode))
1039    align = GET_MODE_ALIGNMENT (tmode);
1040  else
1041    {
1042      enum machine_mode tmode, xmode;
1043
1044      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045	   tmode != VOIDmode;
1046	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048	    || SLOW_UNALIGNED_ACCESS (tmode, align))
1049	  break;
1050
1051      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052    }
1053
1054  while (max_size > 1)
1055    {
1056      enum machine_mode mode = VOIDmode;
1057      enum insn_code icode;
1058
1059      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061	if (GET_MODE_SIZE (tmode) < max_size)
1062	  mode = tmode;
1063
1064      if (mode == VOIDmode)
1065	break;
1066
1067      icode = mov_optab->handlers[(int) mode].insn_code;
1068      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070
1071      max_size = GET_MODE_SIZE (mode);
1072    }
1073
1074  gcc_assert (!l);
1075  return n_insns;
1076}
1077
1078/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1079   with move instructions for mode MODE.  GENFUN is the gen_... function
1080   to make a move insn for that mode.  DATA has all the other info.  */
1081
1082static void
1083move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084		  struct move_by_pieces *data)
1085{
1086  unsigned int size = GET_MODE_SIZE (mode);
1087  rtx to1 = NULL_RTX, from1;
1088
1089  while (data->len >= size)
1090    {
1091      if (data->reverse)
1092	data->offset -= size;
1093
1094      if (data->to)
1095	{
1096	  if (data->autinc_to)
1097	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098					     data->offset);
1099	  else
1100	    to1 = adjust_address (data->to, mode, data->offset);
1101	}
1102
1103      if (data->autinc_from)
1104	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105					   data->offset);
1106      else
1107	from1 = adjust_address (data->from, mode, data->offset);
1108
1109      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110	emit_insn (gen_add2_insn (data->to_addr,
1111				  GEN_INT (-(HOST_WIDE_INT)size)));
1112      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113	emit_insn (gen_add2_insn (data->from_addr,
1114				  GEN_INT (-(HOST_WIDE_INT)size)));
1115
1116      if (data->to)
1117	emit_insn ((*genfun) (to1, from1));
1118      else
1119	{
1120#ifdef PUSH_ROUNDING
1121	  emit_single_push_insn (mode, from1, NULL);
1122#else
1123	  gcc_unreachable ();
1124#endif
1125	}
1126
1127      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131
1132      if (! data->reverse)
1133	data->offset += size;
1134
1135      data->len -= size;
1136    }
1137}
1138
1139/* Emit code to move a block Y to a block X.  This may be done with
1140   string-move instructions, with multiple scalar move instructions,
1141   or with a library call.
1142
1143   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144   SIZE is an rtx that says how long they are.
1145   ALIGN is the maximum alignment we can assume they have.
1146   METHOD describes what kind of copy this is, and what mechanisms may be used.
1147
1148   Return the address of the new block, if memcpy is called and returns it,
1149   0 otherwise.  */
1150
1151rtx
1152emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153{
1154  bool may_use_call;
1155  rtx retval = 0;
1156  unsigned int align;
1157
1158  switch (method)
1159    {
1160    case BLOCK_OP_NORMAL:
1161    case BLOCK_OP_TAILCALL:
1162      may_use_call = true;
1163      break;
1164
1165    case BLOCK_OP_CALL_PARM:
1166      may_use_call = block_move_libcall_safe_for_call_parm ();
1167
1168      /* Make inhibit_defer_pop nonzero around the library call
1169	 to force it to pop the arguments right away.  */
1170      NO_DEFER_POP;
1171      break;
1172
1173    case BLOCK_OP_NO_LIBCALL:
1174      may_use_call = false;
1175      break;
1176
1177    default:
1178      gcc_unreachable ();
1179    }
1180
1181  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182
1183  gcc_assert (MEM_P (x));
1184  gcc_assert (MEM_P (y));
1185  gcc_assert (size);
1186
1187  /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188     block copy is more efficient for other large modes, e.g. DCmode.  */
1189  x = adjust_address (x, BLKmode, 0);
1190  y = adjust_address (y, BLKmode, 0);
1191
1192  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1193     can be incorrect is coming from __builtin_memcpy.  */
1194  if (GET_CODE (size) == CONST_INT)
1195    {
1196      if (INTVAL (size) == 0)
1197	return 0;
1198
1199      x = shallow_copy_rtx (x);
1200      y = shallow_copy_rtx (y);
1201      set_mem_size (x, size);
1202      set_mem_size (y, size);
1203    }
1204
1205  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1206    move_by_pieces (x, y, INTVAL (size), align, 0);
1207  else if (emit_block_move_via_movmem (x, y, size, align))
1208    ;
1209  else if (may_use_call)
1210    retval = emit_block_move_via_libcall (x, y, size,
1211					  method == BLOCK_OP_TAILCALL);
1212  else
1213    emit_block_move_via_loop (x, y, size, align);
1214
1215  if (method == BLOCK_OP_CALL_PARM)
1216    OK_DEFER_POP;
1217
1218  return retval;
1219}
1220
1221/* A subroutine of emit_block_move.  Returns true if calling the
1222   block move libcall will not clobber any parameters which may have
1223   already been placed on the stack.  */
1224
1225static bool
1226block_move_libcall_safe_for_call_parm (void)
1227{
1228  /* If arguments are pushed on the stack, then they're safe.  */
1229  if (PUSH_ARGS)
1230    return true;
1231
1232  /* If registers go on the stack anyway, any argument is sure to clobber
1233     an outgoing argument.  */
1234#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235  {
1236    tree fn = emit_block_move_libcall_fn (false);
1237    (void) fn;
1238    if (REG_PARM_STACK_SPACE (fn) != 0)
1239      return false;
1240  }
1241#endif
1242
1243  /* If any argument goes in memory, then it might clobber an outgoing
1244     argument.  */
1245  {
1246    CUMULATIVE_ARGS args_so_far;
1247    tree fn, arg;
1248
1249    fn = emit_block_move_libcall_fn (false);
1250    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251
1252    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254      {
1255	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256	rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257	if (!tmp || !REG_P (tmp))
1258	  return false;
1259	if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1260	  return false;
1261	FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1262      }
1263  }
1264  return true;
1265}
1266
1267/* A subroutine of emit_block_move.  Expand a movmem pattern;
1268   return true if successful.  */
1269
1270static bool
1271emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272{
1273  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1274  int save_volatile_ok = volatile_ok;
1275  enum machine_mode mode;
1276
1277  /* Since this is a move insn, we don't care about volatility.  */
1278  volatile_ok = 1;
1279
1280  /* Try the most limited insn first, because there's no point
1281     including more than one in the machine description unless
1282     the more limited one has some advantage.  */
1283
1284  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285       mode = GET_MODE_WIDER_MODE (mode))
1286    {
1287      enum insn_code code = movmem_optab[(int) mode];
1288      insn_operand_predicate_fn pred;
1289
1290      if (code != CODE_FOR_nothing
1291	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292	     here because if SIZE is less than the mode mask, as it is
1293	     returned by the macro, it will definitely be less than the
1294	     actual mode mask.  */
1295	  && ((GET_CODE (size) == CONST_INT
1296	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297		   <= (GET_MODE_MASK (mode) >> 1)))
1298	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300	      || (*pred) (x, BLKmode))
1301	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302	      || (*pred) (y, BLKmode))
1303	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304	      || (*pred) (opalign, VOIDmode)))
1305	{
1306	  rtx op2;
1307	  rtx last = get_last_insn ();
1308	  rtx pat;
1309
1310	  op2 = convert_to_mode (mode, size, 1);
1311	  pred = insn_data[(int) code].operand[2].predicate;
1312	  if (pred != 0 && ! (*pred) (op2, mode))
1313	    op2 = copy_to_mode_reg (mode, op2);
1314
1315	  /* ??? When called via emit_block_move_for_call, it'd be
1316	     nice if there were some way to inform the backend, so
1317	     that it doesn't fail the expansion because it thinks
1318	     emitting the libcall would be more efficient.  */
1319
1320	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321	  if (pat)
1322	    {
1323	      emit_insn (pat);
1324	      volatile_ok = save_volatile_ok;
1325	      return true;
1326	    }
1327	  else
1328	    delete_insns_since (last);
1329	}
1330    }
1331
1332  volatile_ok = save_volatile_ok;
1333  return false;
1334}
1335
1336/* A subroutine of emit_block_move.  Expand a call to memcpy.
1337   Return the return value from memcpy, 0 otherwise.  */
1338
1339static rtx
1340emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341{
1342  rtx dst_addr, src_addr;
1343  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344  enum machine_mode size_mode;
1345  rtx retval;
1346
1347  /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348     pseudos.  We can then place those new pseudos into a VAR_DECL and
1349     use them later.  */
1350
1351  dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352  src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353
1354  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355  src_addr = convert_memory_address (ptr_mode, src_addr);
1356
1357  dst_tree = make_tree (ptr_type_node, dst_addr);
1358  src_tree = make_tree (ptr_type_node, src_addr);
1359
1360  size_mode = TYPE_MODE (sizetype);
1361
1362  size = convert_to_mode (size_mode, size, 1);
1363  size = copy_to_mode_reg (size_mode, size);
1364
1365  /* It is incorrect to use the libcall calling conventions to call
1366     memcpy in this context.  This could be a user call to memcpy and
1367     the user may wish to examine the return value from memcpy.  For
1368     targets where libcalls and normal calls have different conventions
1369     for returning pointers, we could end up generating incorrect code.  */
1370
1371  size_tree = make_tree (sizetype, size);
1372
1373  fn = emit_block_move_libcall_fn (true);
1374  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1375  arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376  arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377
1378  /* Now we have to build up the CALL_EXPR itself.  */
1379  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1380  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381		      call_expr, arg_list, NULL_TREE);
1382  CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383
1384  retval = expand_normal (call_expr);
1385
1386  return retval;
1387}
1388
1389/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1390   for the function we use for block copies.  The first time FOR_CALL
1391   is true, we call assemble_external.  */
1392
1393static GTY(()) tree block_move_fn;
1394
1395void
1396init_block_move_fn (const char *asmspec)
1397{
1398  if (!block_move_fn)
1399    {
1400      tree args, fn;
1401
1402      fn = get_identifier ("memcpy");
1403      args = build_function_type_list (ptr_type_node, ptr_type_node,
1404				       const_ptr_type_node, sizetype,
1405				       NULL_TREE);
1406
1407      fn = build_decl (FUNCTION_DECL, fn, args);
1408      DECL_EXTERNAL (fn) = 1;
1409      TREE_PUBLIC (fn) = 1;
1410      DECL_ARTIFICIAL (fn) = 1;
1411      TREE_NOTHROW (fn) = 1;
1412      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413      DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414
1415      block_move_fn = fn;
1416    }
1417
1418  if (asmspec)
1419    set_user_assembler_name (block_move_fn, asmspec);
1420}
1421
1422static tree
1423emit_block_move_libcall_fn (int for_call)
1424{
1425  static bool emitted_extern;
1426
1427  if (!block_move_fn)
1428    init_block_move_fn (NULL);
1429
1430  if (for_call && !emitted_extern)
1431    {
1432      emitted_extern = true;
1433      make_decl_rtl (block_move_fn);
1434      assemble_external (block_move_fn);
1435    }
1436
1437  return block_move_fn;
1438}
1439
1440/* A subroutine of emit_block_move.  Copy the data via an explicit
1441   loop.  This is used only when libcalls are forbidden.  */
1442/* ??? It'd be nice to copy in hunks larger than QImode.  */
1443
1444static void
1445emit_block_move_via_loop (rtx x, rtx y, rtx size,
1446			  unsigned int align ATTRIBUTE_UNUSED)
1447{
1448  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1449  enum machine_mode iter_mode;
1450
1451  iter_mode = GET_MODE (size);
1452  if (iter_mode == VOIDmode)
1453    iter_mode = word_mode;
1454
1455  top_label = gen_label_rtx ();
1456  cmp_label = gen_label_rtx ();
1457  iter = gen_reg_rtx (iter_mode);
1458
1459  emit_move_insn (iter, const0_rtx);
1460
1461  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1462  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1463  do_pending_stack_adjust ();
1464
1465  emit_jump (cmp_label);
1466  emit_label (top_label);
1467
1468  tmp = convert_modes (Pmode, iter_mode, iter, true);
1469  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1470  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1471  x = change_address (x, QImode, x_addr);
1472  y = change_address (y, QImode, y_addr);
1473
1474  emit_move_insn (x, y);
1475
1476  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1477			     true, OPTAB_LIB_WIDEN);
1478  if (tmp != iter)
1479    emit_move_insn (iter, tmp);
1480
1481  emit_label (cmp_label);
1482
1483  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1484			   true, top_label);
1485}
1486
1487/* Copy all or part of a value X into registers starting at REGNO.
1488   The number of registers to be filled is NREGS.  */
1489
1490void
1491move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492{
1493  int i;
1494#ifdef HAVE_load_multiple
1495  rtx pat;
1496  rtx last;
1497#endif
1498
1499  if (nregs == 0)
1500    return;
1501
1502  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1503    x = validize_mem (force_const_mem (mode, x));
1504
1505  /* See if the machine can do this with a load multiple insn.  */
1506#ifdef HAVE_load_multiple
1507  if (HAVE_load_multiple)
1508    {
1509      last = get_last_insn ();
1510      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1511			       GEN_INT (nregs));
1512      if (pat)
1513	{
1514	  emit_insn (pat);
1515	  return;
1516	}
1517      else
1518	delete_insns_since (last);
1519    }
1520#endif
1521
1522  for (i = 0; i < nregs; i++)
1523    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1524		    operand_subword_force (x, i, mode));
1525}
1526
1527/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528   The number of registers to be filled is NREGS.  */
1529
1530void
1531move_block_from_reg (int regno, rtx x, int nregs)
1532{
1533  int i;
1534
1535  if (nregs == 0)
1536    return;
1537
1538  /* See if the machine can do this with a store multiple insn.  */
1539#ifdef HAVE_store_multiple
1540  if (HAVE_store_multiple)
1541    {
1542      rtx last = get_last_insn ();
1543      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1544				    GEN_INT (nregs));
1545      if (pat)
1546	{
1547	  emit_insn (pat);
1548	  return;
1549	}
1550      else
1551	delete_insns_since (last);
1552    }
1553#endif
1554
1555  for (i = 0; i < nregs; i++)
1556    {
1557      rtx tem = operand_subword (x, i, 1, BLKmode);
1558
1559      gcc_assert (tem);
1560
1561      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1562    }
1563}
1564
1565/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566   ORIG, where ORIG is a non-consecutive group of registers represented by
1567   a PARALLEL.  The clone is identical to the original except in that the
1568   original set of registers is replaced by a new set of pseudo registers.
1569   The new set has the same modes as the original set.  */
1570
1571rtx
1572gen_group_rtx (rtx orig)
1573{
1574  int i, length;
1575  rtx *tmps;
1576
1577  gcc_assert (GET_CODE (orig) == PARALLEL);
1578
1579  length = XVECLEN (orig, 0);
1580  tmps = alloca (sizeof (rtx) * length);
1581
1582  /* Skip a NULL entry in first slot.  */
1583  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584
1585  if (i)
1586    tmps[0] = 0;
1587
1588  for (; i < length; i++)
1589    {
1590      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1591      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592
1593      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594    }
1595
1596  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597}
1598
1599/* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1600   except that values are placed in TMPS[i], and must later be moved
1601   into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1602
1603static void
1604emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1605{
1606  rtx src;
1607  int start, i;
1608  enum machine_mode m = GET_MODE (orig_src);
1609
1610  gcc_assert (GET_CODE (dst) == PARALLEL);
1611
1612  if (m != VOIDmode
1613      && !SCALAR_INT_MODE_P (m)
1614      && !MEM_P (orig_src)
1615      && GET_CODE (orig_src) != CONCAT)
1616    {
1617      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1618      if (imode == BLKmode)
1619	src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1620      else
1621	src = gen_reg_rtx (imode);
1622      if (imode != BLKmode)
1623	src = gen_lowpart (GET_MODE (orig_src), src);
1624      emit_move_insn (src, orig_src);
1625      /* ...and back again.  */
1626      if (imode != BLKmode)
1627	src = gen_lowpart (imode, src);
1628      emit_group_load_1 (tmps, dst, src, type, ssize);
1629      return;
1630    }
1631
1632  /* Check for a NULL entry, used to indicate that the parameter goes
1633     both on the stack and in registers.  */
1634  if (XEXP (XVECEXP (dst, 0, 0), 0))
1635    start = 0;
1636  else
1637    start = 1;
1638
1639  /* Process the pieces.  */
1640  for (i = start; i < XVECLEN (dst, 0); i++)
1641    {
1642      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1643      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1644      unsigned int bytelen = GET_MODE_SIZE (mode);
1645      int shift = 0;
1646
1647      /* Handle trailing fragments that run over the size of the struct.  */
1648      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649	{
1650	  /* Arrange to shift the fragment to where it belongs.
1651	     extract_bit_field loads to the lsb of the reg.  */
1652	  if (
1653#ifdef BLOCK_REG_PADDING
1654	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1655	      == (BYTES_BIG_ENDIAN ? upward : downward)
1656#else
1657	      BYTES_BIG_ENDIAN
1658#endif
1659	      )
1660	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1661	  bytelen = ssize - bytepos;
1662	  gcc_assert (bytelen > 0);
1663	}
1664
1665      /* If we won't be loading directly from memory, protect the real source
1666	 from strange tricks we might play; but make sure that the source can
1667	 be loaded directly into the destination.  */
1668      src = orig_src;
1669      if (!MEM_P (orig_src)
1670	  && (!CONSTANT_P (orig_src)
1671	      || (GET_MODE (orig_src) != mode
1672		  && GET_MODE (orig_src) != VOIDmode)))
1673	{
1674	  if (GET_MODE (orig_src) == VOIDmode)
1675	    src = gen_reg_rtx (mode);
1676	  else
1677	    src = gen_reg_rtx (GET_MODE (orig_src));
1678
1679	  emit_move_insn (src, orig_src);
1680	}
1681
1682      /* Optimize the access just a bit.  */
1683      if (MEM_P (src)
1684	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1685	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1686	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1687	  && bytelen == GET_MODE_SIZE (mode))
1688	{
1689	  tmps[i] = gen_reg_rtx (mode);
1690	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691	}
1692      else if (COMPLEX_MODE_P (mode)
1693	       && GET_MODE (src) == mode
1694	       && bytelen == GET_MODE_SIZE (mode))
1695	/* Let emit_move_complex do the bulk of the work.  */
1696	tmps[i] = src;
1697      else if (GET_CODE (src) == CONCAT)
1698	{
1699	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1700	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701
1702	  if ((bytepos == 0 && bytelen == slen0)
1703	      || (bytepos != 0 && bytepos + bytelen <= slen))
1704	    {
1705	      /* The following assumes that the concatenated objects all
1706		 have the same size.  In this case, a simple calculation
1707		 can be used to determine the object and the bit field
1708		 to be extracted.  */
1709	      tmps[i] = XEXP (src, bytepos / slen0);
1710	      if (! CONSTANT_P (tmps[i])
1711		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1712		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1713					     (bytepos % slen0) * BITS_PER_UNIT,
1714					     1, NULL_RTX, mode, mode);
1715	    }
1716	  else
1717	    {
1718	      rtx mem;
1719
1720	      gcc_assert (!bytepos);
1721	      mem = assign_stack_temp (GET_MODE (src), slen, 0);
1722	      emit_move_insn (mem, src);
1723	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1724					   0, 1, NULL_RTX, mode, mode);
1725	    }
1726	}
1727      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728	 SIMD register, which is currently broken.  While we get GCC
1729	 to emit proper RTL for these cases, let's dump to memory.  */
1730      else if (VECTOR_MODE_P (GET_MODE (dst))
1731	       && REG_P (src))
1732	{
1733	  int slen = GET_MODE_SIZE (GET_MODE (src));
1734	  rtx mem;
1735
1736	  mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737	  emit_move_insn (mem, src);
1738	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739	}
1740      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1741               && XVECLEN (dst, 0) > 1)
1742        tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1743      else if (CONSTANT_P (src)
1744	       || (REG_P (src) && GET_MODE (src) == mode))
1745	tmps[i] = src;
1746      else
1747	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1748				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1749				     mode, mode);
1750
1751      if (shift)
1752	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1753				build_int_cst (NULL_TREE, shift), tmps[i], 0);
1754    }
1755}
1756
1757/* Emit code to move a block SRC of type TYPE to a block DST,
1758   where DST is non-consecutive registers represented by a PARALLEL.
1759   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1760   if not known.  */
1761
1762void
1763emit_group_load (rtx dst, rtx src, tree type, int ssize)
1764{
1765  rtx *tmps;
1766  int i;
1767
1768  tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1769  emit_group_load_1 (tmps, dst, src, type, ssize);
1770
1771  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1772  for (i = 0; i < XVECLEN (dst, 0); i++)
1773    {
1774      rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1775      if (d == NULL)
1776	continue;
1777      emit_move_insn (d, tmps[i]);
1778    }
1779}
1780
1781/* Similar, but load SRC into new pseudos in a format that looks like
1782   PARALLEL.  This can later be fed to emit_group_move to get things
1783   in the right place.  */
1784
1785rtx
1786emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1787{
1788  rtvec vec;
1789  int i;
1790
1791  vec = rtvec_alloc (XVECLEN (parallel, 0));
1792  emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1793
1794  /* Convert the vector to look just like the original PARALLEL, except
1795     with the computed values.  */
1796  for (i = 0; i < XVECLEN (parallel, 0); i++)
1797    {
1798      rtx e = XVECEXP (parallel, 0, i);
1799      rtx d = XEXP (e, 0);
1800
1801      if (d)
1802	{
1803	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1804	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1805	}
1806      RTVEC_ELT (vec, i) = e;
1807    }
1808
1809  return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1810}
1811
1812/* Emit code to move a block SRC to block DST, where SRC and DST are
1813   non-consecutive groups of registers, each represented by a PARALLEL.  */
1814
1815void
1816emit_group_move (rtx dst, rtx src)
1817{
1818  int i;
1819
1820  gcc_assert (GET_CODE (src) == PARALLEL
1821	      && GET_CODE (dst) == PARALLEL
1822	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1823
1824  /* Skip first entry if NULL.  */
1825  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1826    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1827		    XEXP (XVECEXP (src, 0, i), 0));
1828}
1829
1830/* Move a group of registers represented by a PARALLEL into pseudos.  */
1831
1832rtx
1833emit_group_move_into_temps (rtx src)
1834{
1835  rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1836  int i;
1837
1838  for (i = 0; i < XVECLEN (src, 0); i++)
1839    {
1840      rtx e = XVECEXP (src, 0, i);
1841      rtx d = XEXP (e, 0);
1842
1843      if (d)
1844	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1845      RTVEC_ELT (vec, i) = e;
1846    }
1847
1848  return gen_rtx_PARALLEL (GET_MODE (src), vec);
1849}
1850
1851/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852   where SRC is non-consecutive registers represented by a PARALLEL.
1853   SSIZE represents the total size of block ORIG_DST, or -1 if not
1854   known.  */
1855
1856void
1857emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1858{
1859  rtx *tmps, dst;
1860  int start, finish, i;
1861  enum machine_mode m = GET_MODE (orig_dst);
1862
1863  gcc_assert (GET_CODE (src) == PARALLEL);
1864
1865  if (!SCALAR_INT_MODE_P (m)
1866      && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1867    {
1868      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1869      if (imode == BLKmode)
1870        dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1871      else
1872        dst = gen_reg_rtx (imode);
1873      emit_group_store (dst, src, type, ssize);
1874      if (imode != BLKmode)
1875        dst = gen_lowpart (GET_MODE (orig_dst), dst);
1876      emit_move_insn (orig_dst, dst);
1877      return;
1878    }
1879
1880  /* Check for a NULL entry, used to indicate that the parameter goes
1881     both on the stack and in registers.  */
1882  if (XEXP (XVECEXP (src, 0, 0), 0))
1883    start = 0;
1884  else
1885    start = 1;
1886  finish = XVECLEN (src, 0);
1887
1888  tmps = alloca (sizeof (rtx) * finish);
1889
1890  /* Copy the (probable) hard regs into pseudos.  */
1891  for (i = start; i < finish; i++)
1892    {
1893      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1894      if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1895	{
1896	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897	  emit_move_insn (tmps[i], reg);
1898	}
1899      else
1900	tmps[i] = reg;
1901    }
1902
1903  /* If we won't be storing directly into memory, protect the real destination
1904     from strange tricks we might play.  */
1905  dst = orig_dst;
1906  if (GET_CODE (dst) == PARALLEL)
1907    {
1908      rtx temp;
1909
1910      /* We can get a PARALLEL dst if there is a conditional expression in
1911	 a return statement.  In that case, the dst and src are the same,
1912	 so no action is necessary.  */
1913      if (rtx_equal_p (dst, src))
1914	return;
1915
1916      /* It is unclear if we can ever reach here, but we may as well handle
1917	 it.  Allocate a temporary, and split this into a store/load to/from
1918	 the temporary.  */
1919
1920      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1921      emit_group_store (temp, src, type, ssize);
1922      emit_group_load (dst, temp, type, ssize);
1923      return;
1924    }
1925  else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1926    {
1927      enum machine_mode outer = GET_MODE (dst);
1928      enum machine_mode inner;
1929      HOST_WIDE_INT bytepos;
1930      bool done = false;
1931      rtx temp;
1932
1933      if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1934	dst = gen_reg_rtx (outer);
1935
1936      /* Make life a bit easier for combine.  */
1937      /* If the first element of the vector is the low part
1938	 of the destination mode, use a paradoxical subreg to
1939	 initialize the destination.  */
1940      if (start < finish)
1941	{
1942	  inner = GET_MODE (tmps[start]);
1943	  bytepos = subreg_lowpart_offset (inner, outer);
1944	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1945	    {
1946	      temp = simplify_gen_subreg (outer, tmps[start],
1947					  inner, 0);
1948	      if (temp)
1949		{
1950		  emit_move_insn (dst, temp);
1951		  done = true;
1952		  start++;
1953		}
1954	    }
1955	}
1956
1957      /* If the first element wasn't the low part, try the last.  */
1958      if (!done
1959	  && start < finish - 1)
1960	{
1961	  inner = GET_MODE (tmps[finish - 1]);
1962	  bytepos = subreg_lowpart_offset (inner, outer);
1963	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1964	    {
1965	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
1966					  inner, 0);
1967	      if (temp)
1968		{
1969		  emit_move_insn (dst, temp);
1970		  done = true;
1971		  finish--;
1972		}
1973	    }
1974	}
1975
1976      /* Otherwise, simply initialize the result to zero.  */
1977      if (!done)
1978        emit_move_insn (dst, CONST0_RTX (outer));
1979    }
1980
1981  /* Process the pieces.  */
1982  for (i = start; i < finish; i++)
1983    {
1984      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1985      enum machine_mode mode = GET_MODE (tmps[i]);
1986      unsigned int bytelen = GET_MODE_SIZE (mode);
1987      rtx dest = dst;
1988
1989      /* Handle trailing fragments that run over the size of the struct.  */
1990      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1991	{
1992	  /* store_bit_field always takes its value from the lsb.
1993	     Move the fragment to the lsb if it's not already there.  */
1994	  if (
1995#ifdef BLOCK_REG_PADDING
1996	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1997	      == (BYTES_BIG_ENDIAN ? upward : downward)
1998#else
1999	      BYTES_BIG_ENDIAN
2000#endif
2001	      )
2002	    {
2003	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2004	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2005				      build_int_cst (NULL_TREE, shift),
2006				      tmps[i], 0);
2007	    }
2008	  bytelen = ssize - bytepos;
2009	}
2010
2011      if (GET_CODE (dst) == CONCAT)
2012	{
2013	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014	    dest = XEXP (dst, 0);
2015	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016	    {
2017	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018	      dest = XEXP (dst, 1);
2019	    }
2020	  else
2021	    {
2022	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023	      dest = assign_stack_temp (GET_MODE (dest),
2024				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2025	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2026			      tmps[i]);
2027	      dst = dest;
2028	      break;
2029	    }
2030	}
2031
2032      /* Optimize the access just a bit.  */
2033      if (MEM_P (dest)
2034	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2035	      || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2036	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2037	  && bytelen == GET_MODE_SIZE (mode))
2038	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2039      else
2040	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2041			 mode, tmps[i]);
2042    }
2043
2044  /* Copy from the pseudo into the (probable) hard reg.  */
2045  if (orig_dst != dst)
2046    emit_move_insn (orig_dst, dst);
2047}
2048
2049/* Generate code to copy a BLKmode object of TYPE out of a
2050   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2051   is null, a stack temporary is created.  TGTBLK is returned.
2052
2053   The purpose of this routine is to handle functions that return
2054   BLKmode structures in registers.  Some machines (the PA for example)
2055   want to return all small structures in registers regardless of the
2056   structure's alignment.  */
2057
2058rtx
2059copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2060{
2061  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2062  rtx src = NULL, dst = NULL;
2063  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2064  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2065
2066  if (tgtblk == 0)
2067    {
2068      tgtblk = assign_temp (build_qualified_type (type,
2069						  (TYPE_QUALS (type)
2070						   | TYPE_QUAL_CONST)),
2071			    0, 1, 1);
2072      preserve_temp_slots (tgtblk);
2073    }
2074
2075  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2076     into a new pseudo which is a full word.  */
2077
2078  if (GET_MODE (srcreg) != BLKmode
2079      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2080    srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2081
2082  /* If the structure doesn't take up a whole number of words, see whether
2083     SRCREG is padded on the left or on the right.  If it's on the left,
2084     set PADDING_CORRECTION to the number of bits to skip.
2085
2086     In most ABIs, the structure will be returned at the least end of
2087     the register, which translates to right padding on little-endian
2088     targets and left padding on big-endian targets.  The opposite
2089     holds if the structure is returned at the most significant
2090     end of the register.  */
2091  if (bytes % UNITS_PER_WORD != 0
2092      && (targetm.calls.return_in_msb (type)
2093	  ? !BYTES_BIG_ENDIAN
2094	  : BYTES_BIG_ENDIAN))
2095    padding_correction
2096      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2097
2098  /* Copy the structure BITSIZE bites at a time.
2099
2100     We could probably emit more efficient code for machines which do not use
2101     strict alignment, but it doesn't seem worth the effort at the current
2102     time.  */
2103  for (bitpos = 0, xbitpos = padding_correction;
2104       bitpos < bytes * BITS_PER_UNIT;
2105       bitpos += bitsize, xbitpos += bitsize)
2106    {
2107      /* We need a new source operand each time xbitpos is on a
2108	 word boundary and when xbitpos == padding_correction
2109	 (the first time through).  */
2110      if (xbitpos % BITS_PER_WORD == 0
2111	  || xbitpos == padding_correction)
2112	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2113				     GET_MODE (srcreg));
2114
2115      /* We need a new destination operand each time bitpos is on
2116	 a word boundary.  */
2117      if (bitpos % BITS_PER_WORD == 0)
2118	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2119
2120      /* Use xbitpos for the source extraction (right justified) and
2121	 xbitpos for the destination store (left justified).  */
2122      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2123		       extract_bit_field (src, bitsize,
2124					  xbitpos % BITS_PER_WORD, 1,
2125					  NULL_RTX, word_mode, word_mode));
2126    }
2127
2128  return tgtblk;
2129}
2130
2131/* Add a USE expression for REG to the (possibly empty) list pointed
2132   to by CALL_FUSAGE.  REG must denote a hard register.  */
2133
2134void
2135use_reg (rtx *call_fusage, rtx reg)
2136{
2137  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2138
2139  *call_fusage
2140    = gen_rtx_EXPR_LIST (VOIDmode,
2141			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2142}
2143
2144/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2145   starting at REGNO.  All of these registers must be hard registers.  */
2146
2147void
2148use_regs (rtx *call_fusage, int regno, int nregs)
2149{
2150  int i;
2151
2152  gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2153
2154  for (i = 0; i < nregs; i++)
2155    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2156}
2157
2158/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2159   PARALLEL REGS.  This is for calls that pass values in multiple
2160   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2161
2162void
2163use_group_regs (rtx *call_fusage, rtx regs)
2164{
2165  int i;
2166
2167  for (i = 0; i < XVECLEN (regs, 0); i++)
2168    {
2169      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2170
2171      /* A NULL entry means the parameter goes both on the stack and in
2172	 registers.  This can also be a MEM for targets that pass values
2173	 partially on the stack and partially in registers.  */
2174      if (reg != 0 && REG_P (reg))
2175	use_reg (call_fusage, reg);
2176    }
2177}
2178
2179
2180/* Determine whether the LEN bytes generated by CONSTFUN can be
2181   stored to memory using several move instructions.  CONSTFUNDATA is
2182   a pointer which will be passed as argument in every CONSTFUN call.
2183   ALIGN is maximum alignment we can assume.  Return nonzero if a
2184   call to store_by_pieces should succeed.  */
2185
2186int
2187can_store_by_pieces (unsigned HOST_WIDE_INT len,
2188		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2189		     void *constfundata, unsigned int align)
2190{
2191  unsigned HOST_WIDE_INT l;
2192  unsigned int max_size;
2193  HOST_WIDE_INT offset = 0;
2194  enum machine_mode mode, tmode;
2195  enum insn_code icode;
2196  int reverse;
2197  rtx cst;
2198
2199  if (len == 0)
2200    return 1;
2201
2202  if (! STORE_BY_PIECES_P (len, align))
2203    return 0;
2204
2205  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2206  if (align >= GET_MODE_ALIGNMENT (tmode))
2207    align = GET_MODE_ALIGNMENT (tmode);
2208  else
2209    {
2210      enum machine_mode xmode;
2211
2212      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2213	   tmode != VOIDmode;
2214	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2215	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2216	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2217	  break;
2218
2219      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2220    }
2221
2222  /* We would first store what we can in the largest integer mode, then go to
2223     successively smaller modes.  */
2224
2225  for (reverse = 0;
2226       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2227       reverse++)
2228    {
2229      l = len;
2230      mode = VOIDmode;
2231      max_size = STORE_MAX_PIECES + 1;
2232      while (max_size > 1)
2233	{
2234	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2235	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2236	    if (GET_MODE_SIZE (tmode) < max_size)
2237	      mode = tmode;
2238
2239	  if (mode == VOIDmode)
2240	    break;
2241
2242	  icode = mov_optab->handlers[(int) mode].insn_code;
2243	  if (icode != CODE_FOR_nothing
2244	      && align >= GET_MODE_ALIGNMENT (mode))
2245	    {
2246	      unsigned int size = GET_MODE_SIZE (mode);
2247
2248	      while (l >= size)
2249		{
2250		  if (reverse)
2251		    offset -= size;
2252
2253		  cst = (*constfun) (constfundata, offset, mode);
2254		  if (!LEGITIMATE_CONSTANT_P (cst))
2255		    return 0;
2256
2257		  if (!reverse)
2258		    offset += size;
2259
2260		  l -= size;
2261		}
2262	    }
2263
2264	  max_size = GET_MODE_SIZE (mode);
2265	}
2266
2267      /* The code above should have handled everything.  */
2268      gcc_assert (!l);
2269    }
2270
2271  return 1;
2272}
2273
2274/* Generate several move instructions to store LEN bytes generated by
2275   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2276   pointer which will be passed as argument in every CONSTFUN call.
2277   ALIGN is maximum alignment we can assume.
2278   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2279   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2280   stpcpy.  */
2281
2282rtx
2283store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2284		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2285		 void *constfundata, unsigned int align, int endp)
2286{
2287  struct store_by_pieces data;
2288
2289  if (len == 0)
2290    {
2291      gcc_assert (endp != 2);
2292      return to;
2293    }
2294
2295  gcc_assert (STORE_BY_PIECES_P (len, align));
2296  data.constfun = constfun;
2297  data.constfundata = constfundata;
2298  data.len = len;
2299  data.to = to;
2300  store_by_pieces_1 (&data, align);
2301  if (endp)
2302    {
2303      rtx to1;
2304
2305      gcc_assert (!data.reverse);
2306      if (data.autinc_to)
2307	{
2308	  if (endp == 2)
2309	    {
2310	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2311		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2312	      else
2313		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2314								-1));
2315	    }
2316	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2317					   data.offset);
2318	}
2319      else
2320	{
2321	  if (endp == 2)
2322	    --data.offset;
2323	  to1 = adjust_address (data.to, QImode, data.offset);
2324	}
2325      return to1;
2326    }
2327  else
2328    return data.to;
2329}
2330
2331/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2332   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2333
2334static void
2335clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2336{
2337  struct store_by_pieces data;
2338
2339  if (len == 0)
2340    return;
2341
2342  data.constfun = clear_by_pieces_1;
2343  data.constfundata = NULL;
2344  data.len = len;
2345  data.to = to;
2346  store_by_pieces_1 (&data, align);
2347}
2348
2349/* Callback routine for clear_by_pieces.
2350   Return const0_rtx unconditionally.  */
2351
2352static rtx
2353clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2354		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2355		   enum machine_mode mode ATTRIBUTE_UNUSED)
2356{
2357  return const0_rtx;
2358}
2359
2360/* Subroutine of clear_by_pieces and store_by_pieces.
2361   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2362   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2363
2364static void
2365store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2366		   unsigned int align ATTRIBUTE_UNUSED)
2367{
2368  rtx to_addr = XEXP (data->to, 0);
2369  unsigned int max_size = STORE_MAX_PIECES + 1;
2370  enum machine_mode mode = VOIDmode, tmode;
2371  enum insn_code icode;
2372
2373  data->offset = 0;
2374  data->to_addr = to_addr;
2375  data->autinc_to
2376    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2377       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2378
2379  data->explicit_inc_to = 0;
2380  data->reverse
2381    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2382  if (data->reverse)
2383    data->offset = data->len;
2384
2385  /* If storing requires more than two move insns,
2386     copy addresses to registers (to make displacements shorter)
2387     and use post-increment if available.  */
2388  if (!data->autinc_to
2389      && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2390    {
2391      /* Determine the main mode we'll be using.  */
2392      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2393	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2394	if (GET_MODE_SIZE (tmode) < max_size)
2395	  mode = tmode;
2396
2397      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2398	{
2399	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2400	  data->autinc_to = 1;
2401	  data->explicit_inc_to = -1;
2402	}
2403
2404      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2405	  && ! data->autinc_to)
2406	{
2407	  data->to_addr = copy_addr_to_reg (to_addr);
2408	  data->autinc_to = 1;
2409	  data->explicit_inc_to = 1;
2410	}
2411
2412      if ( !data->autinc_to && CONSTANT_P (to_addr))
2413	data->to_addr = copy_addr_to_reg (to_addr);
2414    }
2415
2416  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2417  if (align >= GET_MODE_ALIGNMENT (tmode))
2418    align = GET_MODE_ALIGNMENT (tmode);
2419  else
2420    {
2421      enum machine_mode xmode;
2422
2423      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2424	   tmode != VOIDmode;
2425	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2426	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2427	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2428	  break;
2429
2430      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2431    }
2432
2433  /* First store what we can in the largest integer mode, then go to
2434     successively smaller modes.  */
2435
2436  while (max_size > 1)
2437    {
2438      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440	if (GET_MODE_SIZE (tmode) < max_size)
2441	  mode = tmode;
2442
2443      if (mode == VOIDmode)
2444	break;
2445
2446      icode = mov_optab->handlers[(int) mode].insn_code;
2447      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2448	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2449
2450      max_size = GET_MODE_SIZE (mode);
2451    }
2452
2453  /* The code above should have handled everything.  */
2454  gcc_assert (!data->len);
2455}
2456
2457/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2458   with move instructions for mode MODE.  GENFUN is the gen_... function
2459   to make a move insn for that mode.  DATA has all the other info.  */
2460
2461static void
2462store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2463		   struct store_by_pieces *data)
2464{
2465  unsigned int size = GET_MODE_SIZE (mode);
2466  rtx to1, cst;
2467
2468  while (data->len >= size)
2469    {
2470      if (data->reverse)
2471	data->offset -= size;
2472
2473      if (data->autinc_to)
2474	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2475					 data->offset);
2476      else
2477	to1 = adjust_address (data->to, mode, data->offset);
2478
2479      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2480	emit_insn (gen_add2_insn (data->to_addr,
2481				  GEN_INT (-(HOST_WIDE_INT) size)));
2482
2483      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2484      emit_insn ((*genfun) (to1, cst));
2485
2486      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2487	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2488
2489      if (! data->reverse)
2490	data->offset += size;
2491
2492      data->len -= size;
2493    }
2494}
2495
2496/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2497   its length in bytes.  */
2498
2499rtx
2500clear_storage (rtx object, rtx size, enum block_op_methods method)
2501{
2502  enum machine_mode mode = GET_MODE (object);
2503  unsigned int align;
2504
2505  gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2506
2507  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2508     just move a zero.  Otherwise, do this a piece at a time.  */
2509  if (mode != BLKmode
2510      && GET_CODE (size) == CONST_INT
2511      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2512    {
2513      rtx zero = CONST0_RTX (mode);
2514      if (zero != NULL)
2515	{
2516	  emit_move_insn (object, zero);
2517	  return NULL;
2518	}
2519
2520      if (COMPLEX_MODE_P (mode))
2521	{
2522	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2523	  if (zero != NULL)
2524	    {
2525	      write_complex_part (object, zero, 0);
2526	      write_complex_part (object, zero, 1);
2527	      return NULL;
2528	    }
2529	}
2530    }
2531
2532  if (size == const0_rtx)
2533    return NULL;
2534
2535  align = MEM_ALIGN (object);
2536
2537  if (GET_CODE (size) == CONST_INT
2538      && CLEAR_BY_PIECES_P (INTVAL (size), align))
2539    clear_by_pieces (object, INTVAL (size), align);
2540  else if (set_storage_via_setmem (object, size, const0_rtx, align))
2541    ;
2542  else
2543    return clear_storage_via_libcall (object, size,
2544				      method == BLOCK_OP_TAILCALL);
2545
2546  return NULL;
2547}
2548
2549/* A subroutine of clear_storage.  Expand a call to memset.
2550   Return the return value of memset, 0 otherwise.  */
2551
2552static rtx
2553clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2554{
2555  tree call_expr, arg_list, fn, object_tree, size_tree;
2556  enum machine_mode size_mode;
2557  rtx retval;
2558
2559  /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2560     place those into new pseudos into a VAR_DECL and use them later.  */
2561
2562  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2563
2564  size_mode = TYPE_MODE (sizetype);
2565  size = convert_to_mode (size_mode, size, 1);
2566  size = copy_to_mode_reg (size_mode, size);
2567
2568  /* It is incorrect to use the libcall calling conventions to call
2569     memset in this context.  This could be a user call to memset and
2570     the user may wish to examine the return value from memset.  For
2571     targets where libcalls and normal calls have different conventions
2572     for returning pointers, we could end up generating incorrect code.  */
2573
2574  object_tree = make_tree (ptr_type_node, object);
2575  size_tree = make_tree (sizetype, size);
2576
2577  fn = clear_storage_libcall_fn (true);
2578  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2579  arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2580  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2581
2582  /* Now we have to build up the CALL_EXPR itself.  */
2583  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2584  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2585		      call_expr, arg_list, NULL_TREE);
2586  CALL_EXPR_TAILCALL (call_expr) = tailcall;
2587
2588  retval = expand_normal (call_expr);
2589
2590  return retval;
2591}
2592
2593/* A subroutine of clear_storage_via_libcall.  Create the tree node
2594   for the function we use for block clears.  The first time FOR_CALL
2595   is true, we call assemble_external.  */
2596
2597static GTY(()) tree block_clear_fn;
2598
2599void
2600init_block_clear_fn (const char *asmspec)
2601{
2602  if (!block_clear_fn)
2603    {
2604      tree fn, args;
2605
2606      fn = get_identifier ("memset");
2607      args = build_function_type_list (ptr_type_node, ptr_type_node,
2608				       integer_type_node, sizetype,
2609				       NULL_TREE);
2610
2611      fn = build_decl (FUNCTION_DECL, fn, args);
2612      DECL_EXTERNAL (fn) = 1;
2613      TREE_PUBLIC (fn) = 1;
2614      DECL_ARTIFICIAL (fn) = 1;
2615      TREE_NOTHROW (fn) = 1;
2616      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2617      DECL_VISIBILITY_SPECIFIED (fn) = 1;
2618
2619      block_clear_fn = fn;
2620    }
2621
2622  if (asmspec)
2623    set_user_assembler_name (block_clear_fn, asmspec);
2624}
2625
2626static tree
2627clear_storage_libcall_fn (int for_call)
2628{
2629  static bool emitted_extern;
2630
2631  if (!block_clear_fn)
2632    init_block_clear_fn (NULL);
2633
2634  if (for_call && !emitted_extern)
2635    {
2636      emitted_extern = true;
2637      make_decl_rtl (block_clear_fn);
2638      assemble_external (block_clear_fn);
2639    }
2640
2641  return block_clear_fn;
2642}
2643
2644/* Expand a setmem pattern; return true if successful.  */
2645
2646bool
2647set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2648{
2649  /* Try the most limited insn first, because there's no point
2650     including more than one in the machine description unless
2651     the more limited one has some advantage.  */
2652
2653  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2654  enum machine_mode mode;
2655
2656  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2657       mode = GET_MODE_WIDER_MODE (mode))
2658    {
2659      enum insn_code code = setmem_optab[(int) mode];
2660      insn_operand_predicate_fn pred;
2661
2662      if (code != CODE_FOR_nothing
2663	  /* We don't need MODE to be narrower than
2664	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2665	     the mode mask, as it is returned by the macro, it will
2666	     definitely be less than the actual mode mask.  */
2667	  && ((GET_CODE (size) == CONST_INT
2668	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2669		   <= (GET_MODE_MASK (mode) >> 1)))
2670	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2671	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2672	      || (*pred) (object, BLKmode))
2673	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2674	      || (*pred) (opalign, VOIDmode)))
2675	{
2676	  rtx opsize, opchar;
2677	  enum machine_mode char_mode;
2678	  rtx last = get_last_insn ();
2679	  rtx pat;
2680
2681	  opsize = convert_to_mode (mode, size, 1);
2682	  pred = insn_data[(int) code].operand[1].predicate;
2683	  if (pred != 0 && ! (*pred) (opsize, mode))
2684	    opsize = copy_to_mode_reg (mode, opsize);
2685
2686	  opchar = val;
2687	  char_mode = insn_data[(int) code].operand[2].mode;
2688	  if (char_mode != VOIDmode)
2689	    {
2690	      opchar = convert_to_mode (char_mode, opchar, 1);
2691	      pred = insn_data[(int) code].operand[2].predicate;
2692	      if (pred != 0 && ! (*pred) (opchar, char_mode))
2693		opchar = copy_to_mode_reg (char_mode, opchar);
2694	    }
2695
2696	  pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2697	  if (pat)
2698	    {
2699	      emit_insn (pat);
2700	      return true;
2701	    }
2702	  else
2703	    delete_insns_since (last);
2704	}
2705    }
2706
2707  return false;
2708}
2709
2710
2711/* Write to one of the components of the complex value CPLX.  Write VAL to
2712   the real part if IMAG_P is false, and the imaginary part if its true.  */
2713
2714static void
2715write_complex_part (rtx cplx, rtx val, bool imag_p)
2716{
2717  enum machine_mode cmode;
2718  enum machine_mode imode;
2719  unsigned ibitsize;
2720
2721  if (GET_CODE (cplx) == CONCAT)
2722    {
2723      emit_move_insn (XEXP (cplx, imag_p), val);
2724      return;
2725    }
2726
2727  cmode = GET_MODE (cplx);
2728  imode = GET_MODE_INNER (cmode);
2729  ibitsize = GET_MODE_BITSIZE (imode);
2730
2731  /* For MEMs simplify_gen_subreg may generate an invalid new address
2732     because, e.g., the original address is considered mode-dependent
2733     by the target, which restricts simplify_subreg from invoking
2734     adjust_address_nv.  Instead of preparing fallback support for an
2735     invalid address, we call adjust_address_nv directly.  */
2736  if (MEM_P (cplx))
2737    {
2738      emit_move_insn (adjust_address_nv (cplx, imode,
2739					 imag_p ? GET_MODE_SIZE (imode) : 0),
2740		      val);
2741      return;
2742    }
2743
2744  /* If the sub-object is at least word sized, then we know that subregging
2745     will work.  This special case is important, since store_bit_field
2746     wants to operate on integer modes, and there's rarely an OImode to
2747     correspond to TCmode.  */
2748  if (ibitsize >= BITS_PER_WORD
2749      /* For hard regs we have exact predicates.  Assume we can split
2750	 the original object if it spans an even number of hard regs.
2751	 This special case is important for SCmode on 64-bit platforms
2752	 where the natural size of floating-point regs is 32-bit.  */
2753      || (REG_P (cplx)
2754	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2756    {
2757      rtx part = simplify_gen_subreg (imode, cplx, cmode,
2758				      imag_p ? GET_MODE_SIZE (imode) : 0);
2759      if (part)
2760        {
2761	  emit_move_insn (part, val);
2762	  return;
2763	}
2764      else
2765	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2766	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2767    }
2768
2769  store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2770}
2771
2772/* Extract one of the components of the complex value CPLX.  Extract the
2773   real part if IMAG_P is false, and the imaginary part if it's true.  */
2774
2775static rtx
2776read_complex_part (rtx cplx, bool imag_p)
2777{
2778  enum machine_mode cmode, imode;
2779  unsigned ibitsize;
2780
2781  if (GET_CODE (cplx) == CONCAT)
2782    return XEXP (cplx, imag_p);
2783
2784  cmode = GET_MODE (cplx);
2785  imode = GET_MODE_INNER (cmode);
2786  ibitsize = GET_MODE_BITSIZE (imode);
2787
2788  /* Special case reads from complex constants that got spilled to memory.  */
2789  if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2790    {
2791      tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2792      if (decl && TREE_CODE (decl) == COMPLEX_CST)
2793	{
2794	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2795	  if (CONSTANT_CLASS_P (part))
2796	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2797	}
2798    }
2799
2800  /* For MEMs simplify_gen_subreg may generate an invalid new address
2801     because, e.g., the original address is considered mode-dependent
2802     by the target, which restricts simplify_subreg from invoking
2803     adjust_address_nv.  Instead of preparing fallback support for an
2804     invalid address, we call adjust_address_nv directly.  */
2805  if (MEM_P (cplx))
2806    return adjust_address_nv (cplx, imode,
2807			      imag_p ? GET_MODE_SIZE (imode) : 0);
2808
2809  /* If the sub-object is at least word sized, then we know that subregging
2810     will work.  This special case is important, since extract_bit_field
2811     wants to operate on integer modes, and there's rarely an OImode to
2812     correspond to TCmode.  */
2813  if (ibitsize >= BITS_PER_WORD
2814      /* For hard regs we have exact predicates.  Assume we can split
2815	 the original object if it spans an even number of hard regs.
2816	 This special case is important for SCmode on 64-bit platforms
2817	 where the natural size of floating-point regs is 32-bit.  */
2818      || (REG_P (cplx)
2819	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2820	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2821    {
2822      rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2823				     imag_p ? GET_MODE_SIZE (imode) : 0);
2824      if (ret)
2825        return ret;
2826      else
2827	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2828	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2829    }
2830
2831  return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2832			    true, NULL_RTX, imode, imode);
2833}
2834
2835/* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2836   NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2837   represented in NEW_MODE.  If FORCE is true, this will never happen, as
2838   we'll force-create a SUBREG if needed.  */
2839
2840static rtx
2841emit_move_change_mode (enum machine_mode new_mode,
2842		       enum machine_mode old_mode, rtx x, bool force)
2843{
2844  rtx ret;
2845
2846  if (MEM_P (x))
2847    {
2848      /* We don't have to worry about changing the address since the
2849	 size in bytes is supposed to be the same.  */
2850      if (reload_in_progress)
2851	{
2852	  /* Copy the MEM to change the mode and move any
2853	     substitutions from the old MEM to the new one.  */
2854	  ret = adjust_address_nv (x, new_mode, 0);
2855	  copy_replacements (x, ret);
2856	}
2857      else
2858	ret = adjust_address (x, new_mode, 0);
2859    }
2860  else
2861    {
2862      /* Note that we do want simplify_subreg's behavior of validating
2863	 that the new mode is ok for a hard register.  If we were to use
2864	 simplify_gen_subreg, we would create the subreg, but would
2865	 probably run into the target not being able to implement it.  */
2866      /* Except, of course, when FORCE is true, when this is exactly what
2867	 we want.  Which is needed for CCmodes on some targets.  */
2868      if (force)
2869	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2870      else
2871	ret = simplify_subreg (new_mode, x, old_mode, 0);
2872    }
2873
2874  return ret;
2875}
2876
2877/* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
2878   an integer mode of the same size as MODE.  Returns the instruction
2879   emitted, or NULL if such a move could not be generated.  */
2880
2881static rtx
2882emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2883{
2884  enum machine_mode imode;
2885  enum insn_code code;
2886
2887  /* There must exist a mode of the exact size we require.  */
2888  imode = int_mode_for_mode (mode);
2889  if (imode == BLKmode)
2890    return NULL_RTX;
2891
2892  /* The target must support moves in this mode.  */
2893  code = mov_optab->handlers[imode].insn_code;
2894  if (code == CODE_FOR_nothing)
2895    return NULL_RTX;
2896
2897  x = emit_move_change_mode (imode, mode, x, force);
2898  if (x == NULL_RTX)
2899    return NULL_RTX;
2900  y = emit_move_change_mode (imode, mode, y, force);
2901  if (y == NULL_RTX)
2902    return NULL_RTX;
2903  return emit_insn (GEN_FCN (code) (x, y));
2904}
2905
2906/* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
2907   Return an equivalent MEM that does not use an auto-increment.  */
2908
2909static rtx
2910emit_move_resolve_push (enum machine_mode mode, rtx x)
2911{
2912  enum rtx_code code = GET_CODE (XEXP (x, 0));
2913  HOST_WIDE_INT adjust;
2914  rtx temp;
2915
2916  adjust = GET_MODE_SIZE (mode);
2917#ifdef PUSH_ROUNDING
2918  adjust = PUSH_ROUNDING (adjust);
2919#endif
2920  if (code == PRE_DEC || code == POST_DEC)
2921    adjust = -adjust;
2922  else if (code == PRE_MODIFY || code == POST_MODIFY)
2923    {
2924      rtx expr = XEXP (XEXP (x, 0), 1);
2925      HOST_WIDE_INT val;
2926
2927      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2928      gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2929      val = INTVAL (XEXP (expr, 1));
2930      if (GET_CODE (expr) == MINUS)
2931	val = -val;
2932      gcc_assert (adjust == val || adjust == -val);
2933      adjust = val;
2934    }
2935
2936  /* Do not use anti_adjust_stack, since we don't want to update
2937     stack_pointer_delta.  */
2938  temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2939			      GEN_INT (adjust), stack_pointer_rtx,
2940			      0, OPTAB_LIB_WIDEN);
2941  if (temp != stack_pointer_rtx)
2942    emit_move_insn (stack_pointer_rtx, temp);
2943
2944  switch (code)
2945    {
2946    case PRE_INC:
2947    case PRE_DEC:
2948    case PRE_MODIFY:
2949      temp = stack_pointer_rtx;
2950      break;
2951    case POST_INC:
2952    case POST_DEC:
2953    case POST_MODIFY:
2954      temp = plus_constant (stack_pointer_rtx, -adjust);
2955      break;
2956    default:
2957      gcc_unreachable ();
2958    }
2959
2960  return replace_equiv_address (x, temp);
2961}
2962
2963/* A subroutine of emit_move_complex.  Generate a move from Y into X.
2964   X is known to satisfy push_operand, and MODE is known to be complex.
2965   Returns the last instruction emitted.  */
2966
2967static rtx
2968emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2969{
2970  enum machine_mode submode = GET_MODE_INNER (mode);
2971  bool imag_first;
2972
2973#ifdef PUSH_ROUNDING
2974  unsigned int submodesize = GET_MODE_SIZE (submode);
2975
2976  /* In case we output to the stack, but the size is smaller than the
2977     machine can push exactly, we need to use move instructions.  */
2978  if (PUSH_ROUNDING (submodesize) != submodesize)
2979    {
2980      x = emit_move_resolve_push (mode, x);
2981      return emit_move_insn (x, y);
2982    }
2983#endif
2984
2985  /* Note that the real part always precedes the imag part in memory
2986     regardless of machine's endianness.  */
2987  switch (GET_CODE (XEXP (x, 0)))
2988    {
2989    case PRE_DEC:
2990    case POST_DEC:
2991      imag_first = true;
2992      break;
2993    case PRE_INC:
2994    case POST_INC:
2995      imag_first = false;
2996      break;
2997    default:
2998      gcc_unreachable ();
2999    }
3000
3001  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3002		  read_complex_part (y, imag_first));
3003  return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004			 read_complex_part (y, !imag_first));
3005}
3006
3007/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3008   MODE is known to be complex.  Returns the last instruction emitted.  */
3009
3010static rtx
3011emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3012{
3013  bool try_int;
3014
3015  /* Need to take special care for pushes, to maintain proper ordering
3016     of the data, and possibly extra padding.  */
3017  if (push_operand (x, mode))
3018    return emit_move_complex_push (mode, x, y);
3019
3020  /* See if we can coerce the target into moving both values at once.  */
3021
3022  /* Move floating point as parts.  */
3023  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3024      && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3025    try_int = false;
3026  /* Not possible if the values are inherently not adjacent.  */
3027  else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3028    try_int = false;
3029  /* Is possible if both are registers (or subregs of registers).  */
3030  else if (register_operand (x, mode) && register_operand (y, mode))
3031    try_int = true;
3032  /* If one of the operands is a memory, and alignment constraints
3033     are friendly enough, we may be able to do combined memory operations.
3034     We do not attempt this if Y is a constant because that combination is
3035     usually better with the by-parts thing below.  */
3036  else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3037	   && (!STRICT_ALIGNMENT
3038	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3039    try_int = true;
3040  else
3041    try_int = false;
3042
3043  if (try_int)
3044    {
3045      rtx ret;
3046
3047      /* For memory to memory moves, optimal behavior can be had with the
3048	 existing block move logic.  */
3049      if (MEM_P (x) && MEM_P (y))
3050	{
3051	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3052			   BLOCK_OP_NO_LIBCALL);
3053	  return get_last_insn ();
3054	}
3055
3056      ret = emit_move_via_integer (mode, x, y, true);
3057      if (ret)
3058	return ret;
3059    }
3060
3061  /* Show the output dies here.  This is necessary for SUBREGs
3062     of pseudos since we cannot track their lifetimes correctly;
3063     hard regs shouldn't appear here except as return values.  */
3064  if (!reload_completed && !reload_in_progress
3065      && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3066    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3067
3068  write_complex_part (x, read_complex_part (y, false), false);
3069  write_complex_part (x, read_complex_part (y, true), true);
3070  return get_last_insn ();
3071}
3072
3073/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3074   MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3075
3076static rtx
3077emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3078{
3079  rtx ret;
3080
3081  /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3082  if (mode != CCmode)
3083    {
3084      enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3085      if (code != CODE_FOR_nothing)
3086	{
3087	  x = emit_move_change_mode (CCmode, mode, x, true);
3088	  y = emit_move_change_mode (CCmode, mode, y, true);
3089	  return emit_insn (GEN_FCN (code) (x, y));
3090	}
3091    }
3092
3093  /* Otherwise, find the MODE_INT mode of the same width.  */
3094  ret = emit_move_via_integer (mode, x, y, false);
3095  gcc_assert (ret != NULL);
3096  return ret;
3097}
3098
3099/* Return true if word I of OP lies entirely in the
3100   undefined bits of a paradoxical subreg.  */
3101
3102static bool
3103undefined_operand_subword_p (rtx op, int i)
3104{
3105  enum machine_mode innermode, innermostmode;
3106  int offset;
3107  if (GET_CODE (op) != SUBREG)
3108    return false;
3109  innermode = GET_MODE (op);
3110  innermostmode = GET_MODE (SUBREG_REG (op));
3111  offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3112  /* The SUBREG_BYTE represents offset, as if the value were stored in
3113     memory, except for a paradoxical subreg where we define
3114     SUBREG_BYTE to be 0; undo this exception as in
3115     simplify_subreg.  */
3116  if (SUBREG_BYTE (op) == 0
3117      && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3118    {
3119      int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3120      if (WORDS_BIG_ENDIAN)
3121	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3122      if (BYTES_BIG_ENDIAN)
3123	offset += difference % UNITS_PER_WORD;
3124    }
3125  if (offset >= GET_MODE_SIZE (innermostmode)
3126      || offset <= -GET_MODE_SIZE (word_mode))
3127    return true;
3128  return false;
3129}
3130
3131/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3132   MODE is any multi-word or full-word mode that lacks a move_insn
3133   pattern.  Note that you will get better code if you define such
3134   patterns, even if they must turn into multiple assembler instructions.  */
3135
3136static rtx
3137emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3138{
3139  rtx last_insn = 0;
3140  rtx seq, inner;
3141  bool need_clobber;
3142  int i;
3143
3144  gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3145
3146  /* If X is a push on the stack, do the push now and replace
3147     X with a reference to the stack pointer.  */
3148  if (push_operand (x, mode))
3149    x = emit_move_resolve_push (mode, x);
3150
3151  /* If we are in reload, see if either operand is a MEM whose address
3152     is scheduled for replacement.  */
3153  if (reload_in_progress && MEM_P (x)
3154      && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3155    x = replace_equiv_address_nv (x, inner);
3156  if (reload_in_progress && MEM_P (y)
3157      && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3158    y = replace_equiv_address_nv (y, inner);
3159
3160  start_sequence ();
3161
3162  need_clobber = false;
3163  for (i = 0;
3164       i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3165       i++)
3166    {
3167      rtx xpart = operand_subword (x, i, 1, mode);
3168      rtx ypart;
3169
3170      /* Do not generate code for a move if it would come entirely
3171	 from the undefined bits of a paradoxical subreg.  */
3172      if (undefined_operand_subword_p (y, i))
3173	continue;
3174
3175      ypart = operand_subword (y, i, 1, mode);
3176
3177      /* If we can't get a part of Y, put Y into memory if it is a
3178	 constant.  Otherwise, force it into a register.  Then we must
3179	 be able to get a part of Y.  */
3180      if (ypart == 0 && CONSTANT_P (y))
3181	{
3182	  y = use_anchored_address (force_const_mem (mode, y));
3183	  ypart = operand_subword (y, i, 1, mode);
3184	}
3185      else if (ypart == 0)
3186	ypart = operand_subword_force (y, i, mode);
3187
3188      gcc_assert (xpart && ypart);
3189
3190      need_clobber |= (GET_CODE (xpart) == SUBREG);
3191
3192      last_insn = emit_move_insn (xpart, ypart);
3193    }
3194
3195  seq = get_insns ();
3196  end_sequence ();
3197
3198  /* Show the output dies here.  This is necessary for SUBREGs
3199     of pseudos since we cannot track their lifetimes correctly;
3200     hard regs shouldn't appear here except as return values.
3201     We never want to emit such a clobber after reload.  */
3202  if (x != y
3203      && ! (reload_in_progress || reload_completed)
3204      && need_clobber != 0)
3205    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3206
3207  emit_insn (seq);
3208
3209  return last_insn;
3210}
3211
3212/* Low level part of emit_move_insn.
3213   Called just like emit_move_insn, but assumes X and Y
3214   are basically valid.  */
3215
3216rtx
3217emit_move_insn_1 (rtx x, rtx y)
3218{
3219  enum machine_mode mode = GET_MODE (x);
3220  enum insn_code code;
3221
3222  gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3223
3224  code = mov_optab->handlers[mode].insn_code;
3225  if (code != CODE_FOR_nothing)
3226    return emit_insn (GEN_FCN (code) (x, y));
3227
3228  /* Expand complex moves by moving real part and imag part.  */
3229  if (COMPLEX_MODE_P (mode))
3230    return emit_move_complex (mode, x, y);
3231
3232  if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3233    {
3234      rtx result = emit_move_via_integer (mode, x, y, true);
3235
3236      /* If we can't find an integer mode, use multi words.  */
3237      if (result)
3238	return result;
3239      else
3240	return emit_move_multi_word (mode, x, y);
3241    }
3242
3243  if (GET_MODE_CLASS (mode) == MODE_CC)
3244    return emit_move_ccmode (mode, x, y);
3245
3246  /* Try using a move pattern for the corresponding integer mode.  This is
3247     only safe when simplify_subreg can convert MODE constants into integer
3248     constants.  At present, it can only do this reliably if the value
3249     fits within a HOST_WIDE_INT.  */
3250  if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3251    {
3252      rtx ret = emit_move_via_integer (mode, x, y, false);
3253      if (ret)
3254	return ret;
3255    }
3256
3257  return emit_move_multi_word (mode, x, y);
3258}
3259
3260/* Generate code to copy Y into X.
3261   Both Y and X must have the same mode, except that
3262   Y can be a constant with VOIDmode.
3263   This mode cannot be BLKmode; use emit_block_move for that.
3264
3265   Return the last instruction emitted.  */
3266
3267rtx
3268emit_move_insn (rtx x, rtx y)
3269{
3270  enum machine_mode mode = GET_MODE (x);
3271  rtx y_cst = NULL_RTX;
3272  rtx last_insn, set;
3273
3274  gcc_assert (mode != BLKmode
3275	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3276
3277  if (CONSTANT_P (y))
3278    {
3279      if (optimize
3280	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3281	  && (last_insn = compress_float_constant (x, y)))
3282	return last_insn;
3283
3284      y_cst = y;
3285
3286      if (!LEGITIMATE_CONSTANT_P (y))
3287	{
3288	  y = force_const_mem (mode, y);
3289
3290	  /* If the target's cannot_force_const_mem prevented the spill,
3291	     assume that the target's move expanders will also take care
3292	     of the non-legitimate constant.  */
3293	  if (!y)
3294	    y = y_cst;
3295	  else
3296	    y = use_anchored_address (y);
3297	}
3298    }
3299
3300  /* If X or Y are memory references, verify that their addresses are valid
3301     for the machine.  */
3302  if (MEM_P (x)
3303      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3304	   && ! push_operand (x, GET_MODE (x)))
3305	  || (flag_force_addr
3306	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3307    x = validize_mem (x);
3308
3309  if (MEM_P (y)
3310      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3311	  || (flag_force_addr
3312	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3313    y = validize_mem (y);
3314
3315  gcc_assert (mode != BLKmode);
3316
3317  last_insn = emit_move_insn_1 (x, y);
3318
3319  if (y_cst && REG_P (x)
3320      && (set = single_set (last_insn)) != NULL_RTX
3321      && SET_DEST (set) == x
3322      && ! rtx_equal_p (y_cst, SET_SRC (set)))
3323    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3324
3325  return last_insn;
3326}
3327
3328/* If Y is representable exactly in a narrower mode, and the target can
3329   perform the extension directly from constant or memory, then emit the
3330   move as an extension.  */
3331
3332static rtx
3333compress_float_constant (rtx x, rtx y)
3334{
3335  enum machine_mode dstmode = GET_MODE (x);
3336  enum machine_mode orig_srcmode = GET_MODE (y);
3337  enum machine_mode srcmode;
3338  REAL_VALUE_TYPE r;
3339  int oldcost, newcost;
3340
3341  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3342
3343  if (LEGITIMATE_CONSTANT_P (y))
3344    oldcost = rtx_cost (y, SET);
3345  else
3346    oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3347
3348  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3349       srcmode != orig_srcmode;
3350       srcmode = GET_MODE_WIDER_MODE (srcmode))
3351    {
3352      enum insn_code ic;
3353      rtx trunc_y, last_insn;
3354
3355      /* Skip if the target can't extend this way.  */
3356      ic = can_extend_p (dstmode, srcmode, 0);
3357      if (ic == CODE_FOR_nothing)
3358	continue;
3359
3360      /* Skip if the narrowed value isn't exact.  */
3361      if (! exact_real_truncate (srcmode, &r))
3362	continue;
3363
3364      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3365
3366      if (LEGITIMATE_CONSTANT_P (trunc_y))
3367	{
3368	  /* Skip if the target needs extra instructions to perform
3369	     the extension.  */
3370	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3371	    continue;
3372	  /* This is valid, but may not be cheaper than the original. */
3373	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3374	  if (oldcost < newcost)
3375	    continue;
3376	}
3377      else if (float_extend_from_mem[dstmode][srcmode])
3378	{
3379	  trunc_y = force_const_mem (srcmode, trunc_y);
3380	  /* This is valid, but may not be cheaper than the original. */
3381	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3382	  if (oldcost < newcost)
3383	    continue;
3384	  trunc_y = validize_mem (trunc_y);
3385	}
3386      else
3387	continue;
3388
3389      /* For CSE's benefit, force the compressed constant pool entry
3390	 into a new pseudo.  This constant may be used in different modes,
3391	 and if not, combine will put things back together for us.  */
3392      trunc_y = force_reg (srcmode, trunc_y);
3393      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3394      last_insn = get_last_insn ();
3395
3396      if (REG_P (x))
3397	set_unique_reg_note (last_insn, REG_EQUAL, y);
3398
3399      return last_insn;
3400    }
3401
3402  return NULL_RTX;
3403}
3404
3405/* Pushing data onto the stack.  */
3406
3407/* Push a block of length SIZE (perhaps variable)
3408   and return an rtx to address the beginning of the block.
3409   The value may be virtual_outgoing_args_rtx.
3410
3411   EXTRA is the number of bytes of padding to push in addition to SIZE.
3412   BELOW nonzero means this padding comes at low addresses;
3413   otherwise, the padding comes at high addresses.  */
3414
3415rtx
3416push_block (rtx size, int extra, int below)
3417{
3418  rtx temp;
3419
3420  size = convert_modes (Pmode, ptr_mode, size, 1);
3421  if (CONSTANT_P (size))
3422    anti_adjust_stack (plus_constant (size, extra));
3423  else if (REG_P (size) && extra == 0)
3424    anti_adjust_stack (size);
3425  else
3426    {
3427      temp = copy_to_mode_reg (Pmode, size);
3428      if (extra != 0)
3429	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3430			     temp, 0, OPTAB_LIB_WIDEN);
3431      anti_adjust_stack (temp);
3432    }
3433
3434#ifndef STACK_GROWS_DOWNWARD
3435  if (0)
3436#else
3437  if (1)
3438#endif
3439    {
3440      temp = virtual_outgoing_args_rtx;
3441      if (extra != 0 && below)
3442	temp = plus_constant (temp, extra);
3443    }
3444  else
3445    {
3446      if (GET_CODE (size) == CONST_INT)
3447	temp = plus_constant (virtual_outgoing_args_rtx,
3448			      -INTVAL (size) - (below ? 0 : extra));
3449      else if (extra != 0 && !below)
3450	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3451			     negate_rtx (Pmode, plus_constant (size, extra)));
3452      else
3453	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3454			     negate_rtx (Pmode, size));
3455    }
3456
3457  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3458}
3459
3460#ifdef PUSH_ROUNDING
3461
3462/* Emit single push insn.  */
3463
3464static void
3465emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3466{
3467  rtx dest_addr;
3468  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3469  rtx dest;
3470  enum insn_code icode;
3471  insn_operand_predicate_fn pred;
3472
3473  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3474  /* If there is push pattern, use it.  Otherwise try old way of throwing
3475     MEM representing push operation to move expander.  */
3476  icode = push_optab->handlers[(int) mode].insn_code;
3477  if (icode != CODE_FOR_nothing)
3478    {
3479      if (((pred = insn_data[(int) icode].operand[0].predicate)
3480	   && !((*pred) (x, mode))))
3481	x = force_reg (mode, x);
3482      emit_insn (GEN_FCN (icode) (x));
3483      return;
3484    }
3485  if (GET_MODE_SIZE (mode) == rounded_size)
3486    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3487  /* If we are to pad downward, adjust the stack pointer first and
3488     then store X into the stack location using an offset.  This is
3489     because emit_move_insn does not know how to pad; it does not have
3490     access to type.  */
3491  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3492    {
3493      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3494      HOST_WIDE_INT offset;
3495
3496      emit_move_insn (stack_pointer_rtx,
3497		      expand_binop (Pmode,
3498#ifdef STACK_GROWS_DOWNWARD
3499				    sub_optab,
3500#else
3501				    add_optab,
3502#endif
3503				    stack_pointer_rtx,
3504				    GEN_INT (rounded_size),
3505				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3506
3507      offset = (HOST_WIDE_INT) padding_size;
3508#ifdef STACK_GROWS_DOWNWARD
3509      if (STACK_PUSH_CODE == POST_DEC)
3510	/* We have already decremented the stack pointer, so get the
3511	   previous value.  */
3512	offset += (HOST_WIDE_INT) rounded_size;
3513#else
3514      if (STACK_PUSH_CODE == POST_INC)
3515	/* We have already incremented the stack pointer, so get the
3516	   previous value.  */
3517	offset -= (HOST_WIDE_INT) rounded_size;
3518#endif
3519      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3520    }
3521  else
3522    {
3523#ifdef STACK_GROWS_DOWNWARD
3524      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3525      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3526				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3527#else
3528      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3529      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3530				GEN_INT (rounded_size));
3531#endif
3532      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3533    }
3534
3535  dest = gen_rtx_MEM (mode, dest_addr);
3536
3537  if (type != 0)
3538    {
3539      set_mem_attributes (dest, type, 1);
3540
3541      if (flag_optimize_sibling_calls)
3542	/* Function incoming arguments may overlap with sibling call
3543	   outgoing arguments and we cannot allow reordering of reads
3544	   from function arguments with stores to outgoing arguments
3545	   of sibling calls.  */
3546	set_mem_alias_set (dest, 0);
3547    }
3548  emit_move_insn (dest, x);
3549}
3550#endif
3551
3552/* Generate code to push X onto the stack, assuming it has mode MODE and
3553   type TYPE.
3554   MODE is redundant except when X is a CONST_INT (since they don't
3555   carry mode info).
3556   SIZE is an rtx for the size of data to be copied (in bytes),
3557   needed only if X is BLKmode.
3558
3559   ALIGN (in bits) is maximum alignment we can assume.
3560
3561   If PARTIAL and REG are both nonzero, then copy that many of the first
3562   bytes of X into registers starting with REG, and push the rest of X.
3563   The amount of space pushed is decreased by PARTIAL bytes.
3564   REG must be a hard register in this case.
3565   If REG is zero but PARTIAL is not, take any all others actions for an
3566   argument partially in registers, but do not actually load any
3567   registers.
3568
3569   EXTRA is the amount in bytes of extra space to leave next to this arg.
3570   This is ignored if an argument block has already been allocated.
3571
3572   On a machine that lacks real push insns, ARGS_ADDR is the address of
3573   the bottom of the argument block for this call.  We use indexing off there
3574   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3575   argument block has not been preallocated.
3576
3577   ARGS_SO_FAR is the size of args previously pushed for this call.
3578
3579   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3580   for arguments passed in registers.  If nonzero, it will be the number
3581   of bytes required.  */
3582
3583void
3584emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3585		unsigned int align, int partial, rtx reg, int extra,
3586		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3587		rtx alignment_pad)
3588{
3589  rtx xinner;
3590  enum direction stack_direction
3591#ifdef STACK_GROWS_DOWNWARD
3592    = downward;
3593#else
3594    = upward;
3595#endif
3596
3597  /* Decide where to pad the argument: `downward' for below,
3598     `upward' for above, or `none' for don't pad it.
3599     Default is below for small data on big-endian machines; else above.  */
3600  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3601
3602  /* Invert direction if stack is post-decrement.
3603     FIXME: why?  */
3604  if (STACK_PUSH_CODE == POST_DEC)
3605    if (where_pad != none)
3606      where_pad = (where_pad == downward ? upward : downward);
3607
3608  xinner = x;
3609
3610  if (mode == BLKmode)
3611    {
3612      /* Copy a block into the stack, entirely or partially.  */
3613
3614      rtx temp;
3615      int used;
3616      int offset;
3617      int skip;
3618
3619      offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3620      used = partial - offset;
3621
3622      gcc_assert (size);
3623
3624      /* USED is now the # of bytes we need not copy to the stack
3625	 because registers will take care of them.  */
3626
3627      if (partial != 0)
3628	xinner = adjust_address (xinner, BLKmode, used);
3629
3630      /* If the partial register-part of the arg counts in its stack size,
3631	 skip the part of stack space corresponding to the registers.
3632	 Otherwise, start copying to the beginning of the stack space,
3633	 by setting SKIP to 0.  */
3634      skip = (reg_parm_stack_space == 0) ? 0 : used;
3635
3636#ifdef PUSH_ROUNDING
3637      /* Do it with several push insns if that doesn't take lots of insns
3638	 and if there is no difficulty with push insns that skip bytes
3639	 on the stack for alignment purposes.  */
3640      if (args_addr == 0
3641	  && PUSH_ARGS
3642	  && GET_CODE (size) == CONST_INT
3643	  && skip == 0
3644	  && MEM_ALIGN (xinner) >= align
3645	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3646	  /* Here we avoid the case of a structure whose weak alignment
3647	     forces many pushes of a small amount of data,
3648	     and such small pushes do rounding that causes trouble.  */
3649	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3650	      || align >= BIGGEST_ALIGNMENT
3651	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3652		  == (align / BITS_PER_UNIT)))
3653	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3654	{
3655	  /* Push padding now if padding above and stack grows down,
3656	     or if padding below and stack grows up.
3657	     But if space already allocated, this has already been done.  */
3658	  if (extra && args_addr == 0
3659	      && where_pad != none && where_pad != stack_direction)
3660	    anti_adjust_stack (GEN_INT (extra));
3661
3662	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3663	}
3664      else
3665#endif /* PUSH_ROUNDING  */
3666	{
3667	  rtx target;
3668
3669	  /* Otherwise make space on the stack and copy the data
3670	     to the address of that space.  */
3671
3672	  /* Deduct words put into registers from the size we must copy.  */
3673	  if (partial != 0)
3674	    {
3675	      if (GET_CODE (size) == CONST_INT)
3676		size = GEN_INT (INTVAL (size) - used);
3677	      else
3678		size = expand_binop (GET_MODE (size), sub_optab, size,
3679				     GEN_INT (used), NULL_RTX, 0,
3680				     OPTAB_LIB_WIDEN);
3681	    }
3682
3683	  /* Get the address of the stack space.
3684	     In this case, we do not deal with EXTRA separately.
3685	     A single stack adjust will do.  */
3686	  if (! args_addr)
3687	    {
3688	      temp = push_block (size, extra, where_pad == downward);
3689	      extra = 0;
3690	    }
3691	  else if (GET_CODE (args_so_far) == CONST_INT)
3692	    temp = memory_address (BLKmode,
3693				   plus_constant (args_addr,
3694						  skip + INTVAL (args_so_far)));
3695	  else
3696	    temp = memory_address (BLKmode,
3697				   plus_constant (gen_rtx_PLUS (Pmode,
3698								args_addr,
3699								args_so_far),
3700						  skip));
3701
3702	  if (!ACCUMULATE_OUTGOING_ARGS)
3703	    {
3704	      /* If the source is referenced relative to the stack pointer,
3705		 copy it to another register to stabilize it.  We do not need
3706		 to do this if we know that we won't be changing sp.  */
3707
3708	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3709		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3710		temp = copy_to_reg (temp);
3711	    }
3712
3713	  target = gen_rtx_MEM (BLKmode, temp);
3714
3715	  /* We do *not* set_mem_attributes here, because incoming arguments
3716	     may overlap with sibling call outgoing arguments and we cannot
3717	     allow reordering of reads from function arguments with stores
3718	     to outgoing arguments of sibling calls.  We do, however, want
3719	     to record the alignment of the stack slot.  */
3720	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3721	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3722	  set_mem_align (target, align);
3723
3724	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3725	}
3726    }
3727  else if (partial > 0)
3728    {
3729      /* Scalar partly in registers.  */
3730
3731      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3732      int i;
3733      int not_stack;
3734      /* # bytes of start of argument
3735	 that we must make space for but need not store.  */
3736      int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3737      int args_offset = INTVAL (args_so_far);
3738      int skip;
3739
3740      /* Push padding now if padding above and stack grows down,
3741	 or if padding below and stack grows up.
3742	 But if space already allocated, this has already been done.  */
3743      if (extra && args_addr == 0
3744	  && where_pad != none && where_pad != stack_direction)
3745	anti_adjust_stack (GEN_INT (extra));
3746
3747      /* If we make space by pushing it, we might as well push
3748	 the real data.  Otherwise, we can leave OFFSET nonzero
3749	 and leave the space uninitialized.  */
3750      if (args_addr == 0)
3751	offset = 0;
3752
3753      /* Now NOT_STACK gets the number of words that we don't need to
3754	 allocate on the stack.  Convert OFFSET to words too.  */
3755      not_stack = (partial - offset) / UNITS_PER_WORD;
3756      offset /= UNITS_PER_WORD;
3757
3758      /* If the partial register-part of the arg counts in its stack size,
3759	 skip the part of stack space corresponding to the registers.
3760	 Otherwise, start copying to the beginning of the stack space,
3761	 by setting SKIP to 0.  */
3762      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3763
3764      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3765	x = validize_mem (force_const_mem (mode, x));
3766
3767      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3768	 SUBREGs of such registers are not allowed.  */
3769      if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3770	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3771	x = copy_to_reg (x);
3772
3773      /* Loop over all the words allocated on the stack for this arg.  */
3774      /* We can do it by words, because any scalar bigger than a word
3775	 has a size a multiple of a word.  */
3776#ifndef PUSH_ARGS_REVERSED
3777      for (i = not_stack; i < size; i++)
3778#else
3779      for (i = size - 1; i >= not_stack; i--)
3780#endif
3781	if (i >= not_stack + offset)
3782	  emit_push_insn (operand_subword_force (x, i, mode),
3783			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3784			  0, args_addr,
3785			  GEN_INT (args_offset + ((i - not_stack + skip)
3786						  * UNITS_PER_WORD)),
3787			  reg_parm_stack_space, alignment_pad);
3788    }
3789  else
3790    {
3791      rtx addr;
3792      rtx dest;
3793
3794      /* Push padding now if padding above and stack grows down,
3795	 or if padding below and stack grows up.
3796	 But if space already allocated, this has already been done.  */
3797      if (extra && args_addr == 0
3798	  && where_pad != none && where_pad != stack_direction)
3799	anti_adjust_stack (GEN_INT (extra));
3800
3801#ifdef PUSH_ROUNDING
3802      if (args_addr == 0 && PUSH_ARGS)
3803	emit_single_push_insn (mode, x, type);
3804      else
3805#endif
3806	{
3807	  if (GET_CODE (args_so_far) == CONST_INT)
3808	    addr
3809	      = memory_address (mode,
3810				plus_constant (args_addr,
3811					       INTVAL (args_so_far)));
3812	  else
3813	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3814						       args_so_far));
3815	  dest = gen_rtx_MEM (mode, addr);
3816
3817	  /* We do *not* set_mem_attributes here, because incoming arguments
3818	     may overlap with sibling call outgoing arguments and we cannot
3819	     allow reordering of reads from function arguments with stores
3820	     to outgoing arguments of sibling calls.  We do, however, want
3821	     to record the alignment of the stack slot.  */
3822	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3823	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3824	  set_mem_align (dest, align);
3825
3826	  emit_move_insn (dest, x);
3827	}
3828    }
3829
3830  /* If part should go in registers, copy that part
3831     into the appropriate registers.  Do this now, at the end,
3832     since mem-to-mem copies above may do function calls.  */
3833  if (partial > 0 && reg != 0)
3834    {
3835      /* Handle calls that pass values in multiple non-contiguous locations.
3836	 The Irix 6 ABI has examples of this.  */
3837      if (GET_CODE (reg) == PARALLEL)
3838	emit_group_load (reg, x, type, -1);
3839      else
3840	{
3841	  gcc_assert (partial % UNITS_PER_WORD == 0);
3842	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3843	}
3844    }
3845
3846  if (extra && args_addr == 0 && where_pad == stack_direction)
3847    anti_adjust_stack (GEN_INT (extra));
3848
3849  if (alignment_pad && args_addr == 0)
3850    anti_adjust_stack (alignment_pad);
3851}
3852
3853/* Return X if X can be used as a subtarget in a sequence of arithmetic
3854   operations.  */
3855
3856static rtx
3857get_subtarget (rtx x)
3858{
3859  return (optimize
3860          || x == 0
3861	   /* Only registers can be subtargets.  */
3862	   || !REG_P (x)
3863	   /* Don't use hard regs to avoid extending their life.  */
3864	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3865	  ? 0 : x);
3866}
3867
3868/* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
3869   FIELD is a bitfield.  Returns true if the optimization was successful,
3870   and there's nothing else to do.  */
3871
3872static bool
3873optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3874				 unsigned HOST_WIDE_INT bitpos,
3875				 enum machine_mode mode1, rtx str_rtx,
3876				 tree to, tree src)
3877{
3878  enum machine_mode str_mode = GET_MODE (str_rtx);
3879  unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3880  tree op0, op1;
3881  rtx value, result;
3882  optab binop;
3883
3884  if (mode1 != VOIDmode
3885      || bitsize >= BITS_PER_WORD
3886      || str_bitsize > BITS_PER_WORD
3887      || TREE_SIDE_EFFECTS (to)
3888      || TREE_THIS_VOLATILE (to))
3889    return false;
3890
3891  STRIP_NOPS (src);
3892  if (!BINARY_CLASS_P (src)
3893      || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3894    return false;
3895
3896  op0 = TREE_OPERAND (src, 0);
3897  op1 = TREE_OPERAND (src, 1);
3898  STRIP_NOPS (op0);
3899
3900  if (!operand_equal_p (to, op0, 0))
3901    return false;
3902
3903  if (MEM_P (str_rtx))
3904    {
3905      unsigned HOST_WIDE_INT offset1;
3906
3907      if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3908	str_mode = word_mode;
3909      str_mode = get_best_mode (bitsize, bitpos,
3910				MEM_ALIGN (str_rtx), str_mode, 0);
3911      if (str_mode == VOIDmode)
3912	return false;
3913      str_bitsize = GET_MODE_BITSIZE (str_mode);
3914
3915      offset1 = bitpos;
3916      bitpos %= str_bitsize;
3917      offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3918      str_rtx = adjust_address (str_rtx, str_mode, offset1);
3919    }
3920  else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3921    return false;
3922
3923  /* If the bit field covers the whole REG/MEM, store_field
3924     will likely generate better code.  */
3925  if (bitsize >= str_bitsize)
3926    return false;
3927
3928  /* We can't handle fields split across multiple entities.  */
3929  if (bitpos + bitsize > str_bitsize)
3930    return false;
3931
3932  if (BYTES_BIG_ENDIAN)
3933    bitpos = str_bitsize - bitpos - bitsize;
3934
3935  switch (TREE_CODE (src))
3936    {
3937    case PLUS_EXPR:
3938    case MINUS_EXPR:
3939      /* For now, just optimize the case of the topmost bitfield
3940	 where we don't need to do any masking and also
3941	 1 bit bitfields where xor can be used.
3942	 We might win by one instruction for the other bitfields
3943	 too if insv/extv instructions aren't used, so that
3944	 can be added later.  */
3945      if (bitpos + bitsize != str_bitsize
3946	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3947	break;
3948
3949      value = expand_expr (op1, NULL_RTX, str_mode, 0);
3950      value = convert_modes (str_mode,
3951			     TYPE_MODE (TREE_TYPE (op1)), value,
3952			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3953
3954      /* We may be accessing data outside the field, which means
3955	 we can alias adjacent data.  */
3956      if (MEM_P (str_rtx))
3957	{
3958	  str_rtx = shallow_copy_rtx (str_rtx);
3959	  set_mem_alias_set (str_rtx, 0);
3960	  set_mem_expr (str_rtx, 0);
3961	}
3962
3963      binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3964      if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3965	{
3966	  value = expand_and (str_mode, value, const1_rtx, NULL);
3967	  binop = xor_optab;
3968	}
3969      value = expand_shift (LSHIFT_EXPR, str_mode, value,
3970			    build_int_cst (NULL_TREE, bitpos),
3971			    NULL_RTX, 1);
3972      result = expand_binop (str_mode, binop, str_rtx,
3973			     value, str_rtx, 1, OPTAB_WIDEN);
3974      if (result != str_rtx)
3975	emit_move_insn (str_rtx, result);
3976      return true;
3977
3978    case BIT_IOR_EXPR:
3979    case BIT_XOR_EXPR:
3980      if (TREE_CODE (op1) != INTEGER_CST)
3981	break;
3982      value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3983      value = convert_modes (GET_MODE (str_rtx),
3984			     TYPE_MODE (TREE_TYPE (op1)), value,
3985			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3986
3987      /* We may be accessing data outside the field, which means
3988	 we can alias adjacent data.  */
3989      if (MEM_P (str_rtx))
3990	{
3991	  str_rtx = shallow_copy_rtx (str_rtx);
3992	  set_mem_alias_set (str_rtx, 0);
3993	  set_mem_expr (str_rtx, 0);
3994	}
3995
3996      binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3997      if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3998	{
3999	  rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4000			      - 1);
4001	  value = expand_and (GET_MODE (str_rtx), value, mask,
4002			      NULL_RTX);
4003	}
4004      value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4005			    build_int_cst (NULL_TREE, bitpos),
4006			    NULL_RTX, 1);
4007      result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4008			     value, str_rtx, 1, OPTAB_WIDEN);
4009      if (result != str_rtx)
4010	emit_move_insn (str_rtx, result);
4011      return true;
4012
4013    default:
4014      break;
4015    }
4016
4017  return false;
4018}
4019
4020
4021/* Expand an assignment that stores the value of FROM into TO.  */
4022
4023void
4024expand_assignment (tree to, tree from)
4025{
4026  rtx to_rtx = 0;
4027  rtx result;
4028
4029  /* Don't crash if the lhs of the assignment was erroneous.  */
4030  if (TREE_CODE (to) == ERROR_MARK)
4031    {
4032      result = expand_normal (from);
4033      return;
4034    }
4035
4036  /* Optimize away no-op moves without side-effects.  */
4037  if (operand_equal_p (to, from, 0))
4038    return;
4039
4040  /* Assignment of a structure component needs special treatment
4041     if the structure component's rtx is not simply a MEM.
4042     Assignment of an array element at a constant index, and assignment of
4043     an array element in an unaligned packed structure field, has the same
4044     problem.  */
4045  if (handled_component_p (to)
4046      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4047    {
4048      enum machine_mode mode1;
4049      HOST_WIDE_INT bitsize, bitpos;
4050      tree offset;
4051      int unsignedp;
4052      int volatilep = 0;
4053      tree tem;
4054
4055      push_temp_slots ();
4056      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4057				 &unsignedp, &volatilep, true);
4058
4059      /* If we are going to use store_bit_field and extract_bit_field,
4060	 make sure to_rtx will be safe for multiple use.  */
4061
4062      to_rtx = expand_normal (tem);
4063
4064      if (offset != 0)
4065	{
4066	  rtx offset_rtx;
4067
4068	  if (!MEM_P (to_rtx))
4069	    {
4070	      /* We can get constant negative offsets into arrays with broken
4071		 user code.  Translate this to a trap instead of ICEing.  */
4072	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4073	      expand_builtin_trap ();
4074	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4075	    }
4076
4077	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4078#ifdef POINTERS_EXTEND_UNSIGNED
4079	  if (GET_MODE (offset_rtx) != Pmode)
4080	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4081#else
4082	  if (GET_MODE (offset_rtx) != ptr_mode)
4083	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4084#endif
4085
4086	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4087	     to call force_reg for that case.  Avoid that case.  */
4088	  if (MEM_P (to_rtx)
4089	      && GET_MODE (to_rtx) == BLKmode
4090	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4091	      && bitsize > 0
4092	      && (bitpos % bitsize) == 0
4093	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4094	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4095	    {
4096	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4097	      bitpos = 0;
4098	    }
4099
4100	  to_rtx = offset_address (to_rtx, offset_rtx,
4101				   highest_pow2_factor_for_target (to,
4102				   				   offset));
4103	}
4104
4105      /* Handle expand_expr of a complex value returning a CONCAT.  */
4106      if (GET_CODE (to_rtx) == CONCAT)
4107	{
4108	  if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4109	    {
4110	      gcc_assert (bitpos == 0);
4111	      result = store_expr (from, to_rtx, false);
4112	    }
4113	  else
4114	    {
4115	      gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4116	      result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4117	    }
4118	}
4119      else
4120	{
4121	  if (MEM_P (to_rtx))
4122	    {
4123	      /* If the field is at offset zero, we could have been given the
4124		 DECL_RTX of the parent struct.  Don't munge it.  */
4125	      to_rtx = shallow_copy_rtx (to_rtx);
4126
4127	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4128
4129	      /* Deal with volatile and readonly fields.  The former is only
4130		 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4131	      if (volatilep)
4132		MEM_VOLATILE_P (to_rtx) = 1;
4133	      if (component_uses_parent_alias_set (to))
4134		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4135	    }
4136
4137	  if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4138					       to_rtx, to, from))
4139	    result = NULL;
4140	  else
4141	    result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4142				  TREE_TYPE (tem), get_alias_set (to));
4143	}
4144
4145      if (result)
4146	preserve_temp_slots (result);
4147      free_temp_slots ();
4148      pop_temp_slots ();
4149      return;
4150    }
4151
4152  /* If the rhs is a function call and its value is not an aggregate,
4153     call the function before we start to compute the lhs.
4154     This is needed for correct code for cases such as
4155     val = setjmp (buf) on machines where reference to val
4156     requires loading up part of an address in a separate insn.
4157
4158     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4159     since it might be a promoted variable where the zero- or sign- extension
4160     needs to be done.  Handling this in the normal way is safe because no
4161     computation is done before the call.  */
4162  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4163      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4164      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4165	    && REG_P (DECL_RTL (to))))
4166    {
4167      rtx value;
4168
4169      push_temp_slots ();
4170      value = expand_normal (from);
4171      if (to_rtx == 0)
4172	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4173
4174      /* Handle calls that return values in multiple non-contiguous locations.
4175	 The Irix 6 ABI has examples of this.  */
4176      if (GET_CODE (to_rtx) == PARALLEL)
4177	emit_group_load (to_rtx, value, TREE_TYPE (from),
4178			 int_size_in_bytes (TREE_TYPE (from)));
4179      else if (GET_MODE (to_rtx) == BLKmode)
4180	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4181      else
4182	{
4183	  if (POINTER_TYPE_P (TREE_TYPE (to)))
4184	    value = convert_memory_address (GET_MODE (to_rtx), value);
4185	  emit_move_insn (to_rtx, value);
4186	}
4187      preserve_temp_slots (to_rtx);
4188      free_temp_slots ();
4189      pop_temp_slots ();
4190      return;
4191    }
4192
4193  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4194     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4195
4196  if (to_rtx == 0)
4197    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4198
4199  /* Don't move directly into a return register.  */
4200  if (TREE_CODE (to) == RESULT_DECL
4201      && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4202    {
4203      rtx temp;
4204
4205      push_temp_slots ();
4206      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4207
4208      if (GET_CODE (to_rtx) == PARALLEL)
4209	emit_group_load (to_rtx, temp, TREE_TYPE (from),
4210			 int_size_in_bytes (TREE_TYPE (from)));
4211      else
4212	emit_move_insn (to_rtx, temp);
4213
4214      preserve_temp_slots (to_rtx);
4215      free_temp_slots ();
4216      pop_temp_slots ();
4217      return;
4218    }
4219
4220  /* In case we are returning the contents of an object which overlaps
4221     the place the value is being stored, use a safe function when copying
4222     a value through a pointer into a structure value return block.  */
4223  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4224      && current_function_returns_struct
4225      && !current_function_returns_pcc_struct)
4226    {
4227      rtx from_rtx, size;
4228
4229      push_temp_slots ();
4230      size = expr_size (from);
4231      from_rtx = expand_normal (from);
4232
4233      emit_library_call (memmove_libfunc, LCT_NORMAL,
4234			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4235			 XEXP (from_rtx, 0), Pmode,
4236			 convert_to_mode (TYPE_MODE (sizetype),
4237					  size, TYPE_UNSIGNED (sizetype)),
4238			 TYPE_MODE (sizetype));
4239
4240      preserve_temp_slots (to_rtx);
4241      free_temp_slots ();
4242      pop_temp_slots ();
4243      return;
4244    }
4245
4246  /* Compute FROM and store the value in the rtx we got.  */
4247
4248  push_temp_slots ();
4249  result = store_expr (from, to_rtx, 0);
4250  preserve_temp_slots (result);
4251  free_temp_slots ();
4252  pop_temp_slots ();
4253  return;
4254}
4255
4256/* Generate code for computing expression EXP,
4257   and storing the value into TARGET.
4258
4259   If the mode is BLKmode then we may return TARGET itself.
4260   It turns out that in BLKmode it doesn't cause a problem.
4261   because C has no operators that could combine two different
4262   assignments into the same BLKmode object with different values
4263   with no sequence point.  Will other languages need this to
4264   be more thorough?
4265
4266   If CALL_PARAM_P is nonzero, this is a store into a call param on the
4267   stack, and block moves may need to be treated specially.  */
4268
4269rtx
4270store_expr (tree exp, rtx target, int call_param_p)
4271{
4272  rtx temp;
4273  rtx alt_rtl = NULL_RTX;
4274  int dont_return_target = 0;
4275
4276  if (VOID_TYPE_P (TREE_TYPE (exp)))
4277    {
4278      /* C++ can generate ?: expressions with a throw expression in one
4279	 branch and an rvalue in the other. Here, we resolve attempts to
4280	 store the throw expression's nonexistent result.  */
4281      gcc_assert (!call_param_p);
4282      expand_expr (exp, const0_rtx, VOIDmode, 0);
4283      return NULL_RTX;
4284    }
4285  if (TREE_CODE (exp) == COMPOUND_EXPR)
4286    {
4287      /* Perform first part of compound expression, then assign from second
4288	 part.  */
4289      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4290		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4291      return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4292    }
4293  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4294    {
4295      /* For conditional expression, get safe form of the target.  Then
4296	 test the condition, doing the appropriate assignment on either
4297	 side.  This avoids the creation of unnecessary temporaries.
4298	 For non-BLKmode, it is more efficient not to do this.  */
4299
4300      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4301
4302      do_pending_stack_adjust ();
4303      NO_DEFER_POP;
4304      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4305      store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4306      emit_jump_insn (gen_jump (lab2));
4307      emit_barrier ();
4308      emit_label (lab1);
4309      store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4310      emit_label (lab2);
4311      OK_DEFER_POP;
4312
4313      return NULL_RTX;
4314    }
4315  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4316    /* If this is a scalar in a register that is stored in a wider mode
4317       than the declared mode, compute the result into its declared mode
4318       and then convert to the wider mode.  Our value is the computed
4319       expression.  */
4320    {
4321      rtx inner_target = 0;
4322
4323      /* We can do the conversion inside EXP, which will often result
4324	 in some optimizations.  Do the conversion in two steps: first
4325	 change the signedness, if needed, then the extend.  But don't
4326	 do this if the type of EXP is a subtype of something else
4327	 since then the conversion might involve more than just
4328	 converting modes.  */
4329      if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4330	  && TREE_TYPE (TREE_TYPE (exp)) == 0
4331	  && (!lang_hooks.reduce_bit_field_operations
4332	      || (GET_MODE_PRECISION (GET_MODE (target))
4333		  == TYPE_PRECISION (TREE_TYPE (exp)))))
4334	{
4335	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
4336	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4337	    exp = fold_convert
4338	      (lang_hooks.types.signed_or_unsigned_type
4339	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4340
4341	  exp = fold_convert (lang_hooks.types.type_for_mode
4342				(GET_MODE (SUBREG_REG (target)),
4343				 SUBREG_PROMOTED_UNSIGNED_P (target)),
4344			      exp);
4345
4346	  inner_target = SUBREG_REG (target);
4347	}
4348
4349      temp = expand_expr (exp, inner_target, VOIDmode,
4350			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4351
4352      /* If TEMP is a VOIDmode constant, use convert_modes to make
4353	 sure that we properly convert it.  */
4354      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4355	{
4356	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4357				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4358	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4359			        GET_MODE (target), temp,
4360			        SUBREG_PROMOTED_UNSIGNED_P (target));
4361	}
4362
4363      convert_move (SUBREG_REG (target), temp,
4364		    SUBREG_PROMOTED_UNSIGNED_P (target));
4365
4366      return NULL_RTX;
4367    }
4368  else
4369    {
4370      temp = expand_expr_real (exp, target, GET_MODE (target),
4371			       (call_param_p
4372				? EXPAND_STACK_PARM : EXPAND_NORMAL),
4373			       &alt_rtl);
4374      /* Return TARGET if it's a specified hardware register.
4375	 If TARGET is a volatile mem ref, either return TARGET
4376	 or return a reg copied *from* TARGET; ANSI requires this.
4377
4378	 Otherwise, if TEMP is not TARGET, return TEMP
4379	 if it is constant (for efficiency),
4380	 or if we really want the correct value.  */
4381      if (!(target && REG_P (target)
4382	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4383	  && !(MEM_P (target) && MEM_VOLATILE_P (target))
4384	  && ! rtx_equal_p (temp, target)
4385	  && CONSTANT_P (temp))
4386	dont_return_target = 1;
4387    }
4388
4389  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4390     the same as that of TARGET, adjust the constant.  This is needed, for
4391     example, in case it is a CONST_DOUBLE and we want only a word-sized
4392     value.  */
4393  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4394      && TREE_CODE (exp) != ERROR_MARK
4395      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4396    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4397			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4398
4399  /* If value was not generated in the target, store it there.
4400     Convert the value to TARGET's type first if necessary and emit the
4401     pending incrementations that have been queued when expanding EXP.
4402     Note that we cannot emit the whole queue blindly because this will
4403     effectively disable the POST_INC optimization later.
4404
4405     If TEMP and TARGET compare equal according to rtx_equal_p, but
4406     one or both of them are volatile memory refs, we have to distinguish
4407     two cases:
4408     - expand_expr has used TARGET.  In this case, we must not generate
4409       another copy.  This can be detected by TARGET being equal according
4410       to == .
4411     - expand_expr has not used TARGET - that means that the source just
4412       happens to have the same RTX form.  Since temp will have been created
4413       by expand_expr, it will compare unequal according to == .
4414       We must generate a copy in this case, to reach the correct number
4415       of volatile memory references.  */
4416
4417  if ((! rtx_equal_p (temp, target)
4418       || (temp != target && (side_effects_p (temp)
4419			      || side_effects_p (target))))
4420      && TREE_CODE (exp) != ERROR_MARK
4421      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4422	 but TARGET is not valid memory reference, TEMP will differ
4423	 from TARGET although it is really the same location.  */
4424      && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4425      /* If there's nothing to copy, don't bother.  Don't call
4426	 expr_size unless necessary, because some front-ends (C++)
4427	 expr_size-hook must not be given objects that are not
4428	 supposed to be bit-copied or bit-initialized.  */
4429      && expr_size (exp) != const0_rtx)
4430    {
4431      if (GET_MODE (temp) != GET_MODE (target)
4432	  && GET_MODE (temp) != VOIDmode)
4433	{
4434	  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4435	  if (dont_return_target)
4436	    {
4437	      /* In this case, we will return TEMP,
4438		 so make sure it has the proper mode.
4439		 But don't forget to store the value into TARGET.  */
4440	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4441	      emit_move_insn (target, temp);
4442	    }
4443	  else
4444	    convert_move (target, temp, unsignedp);
4445	}
4446
4447      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4448	{
4449	  /* Handle copying a string constant into an array.  The string
4450	     constant may be shorter than the array.  So copy just the string's
4451	     actual length, and clear the rest.  First get the size of the data
4452	     type of the string, which is actually the size of the target.  */
4453	  rtx size = expr_size (exp);
4454
4455	  if (GET_CODE (size) == CONST_INT
4456	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4457	    emit_block_move (target, temp, size,
4458			     (call_param_p
4459			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4460	  else
4461	    {
4462	      /* Compute the size of the data to copy from the string.  */
4463	      tree copy_size
4464		= size_binop (MIN_EXPR,
4465			      make_tree (sizetype, size),
4466			      size_int (TREE_STRING_LENGTH (exp)));
4467	      rtx copy_size_rtx
4468		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4469			       (call_param_p
4470				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4471	      rtx label = 0;
4472
4473	      /* Copy that much.  */
4474	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4475					       TYPE_UNSIGNED (sizetype));
4476	      emit_block_move (target, temp, copy_size_rtx,
4477			       (call_param_p
4478				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4479
4480	      /* Figure out how much is left in TARGET that we have to clear.
4481		 Do all calculations in ptr_mode.  */
4482	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4483		{
4484		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4485		  target = adjust_address (target, BLKmode,
4486					   INTVAL (copy_size_rtx));
4487		}
4488	      else
4489		{
4490		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4491				       copy_size_rtx, NULL_RTX, 0,
4492				       OPTAB_LIB_WIDEN);
4493
4494#ifdef POINTERS_EXTEND_UNSIGNED
4495		  if (GET_MODE (copy_size_rtx) != Pmode)
4496		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4497						     TYPE_UNSIGNED (sizetype));
4498#endif
4499
4500		  target = offset_address (target, copy_size_rtx,
4501					   highest_pow2_factor (copy_size));
4502		  label = gen_label_rtx ();
4503		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4504					   GET_MODE (size), 0, label);
4505		}
4506
4507	      if (size != const0_rtx)
4508		clear_storage (target, size, BLOCK_OP_NORMAL);
4509
4510	      if (label)
4511		emit_label (label);
4512	    }
4513	}
4514      /* Handle calls that return values in multiple non-contiguous locations.
4515	 The Irix 6 ABI has examples of this.  */
4516      else if (GET_CODE (target) == PARALLEL)
4517	emit_group_load (target, temp, TREE_TYPE (exp),
4518			 int_size_in_bytes (TREE_TYPE (exp)));
4519      else if (GET_MODE (temp) == BLKmode)
4520	emit_block_move (target, temp, expr_size (exp),
4521			 (call_param_p
4522			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4523      else
4524	{
4525	  temp = force_operand (temp, target);
4526	  if (temp != target)
4527	    emit_move_insn (target, temp);
4528	}
4529    }
4530
4531  return NULL_RTX;
4532}
4533
4534/* Helper for categorize_ctor_elements.  Identical interface.  */
4535
4536static bool
4537categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4538			    HOST_WIDE_INT *p_elt_count,
4539			    bool *p_must_clear)
4540{
4541  unsigned HOST_WIDE_INT idx;
4542  HOST_WIDE_INT nz_elts, elt_count;
4543  tree value, purpose;
4544
4545  /* Whether CTOR is a valid constant initializer, in accordance with what
4546     initializer_constant_valid_p does.  If inferred from the constructor
4547     elements, true until proven otherwise.  */
4548  bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4549  bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4550
4551  nz_elts = 0;
4552  elt_count = 0;
4553
4554  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4555    {
4556      HOST_WIDE_INT mult;
4557
4558      mult = 1;
4559      if (TREE_CODE (purpose) == RANGE_EXPR)
4560	{
4561	  tree lo_index = TREE_OPERAND (purpose, 0);
4562	  tree hi_index = TREE_OPERAND (purpose, 1);
4563
4564	  if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4565	    mult = (tree_low_cst (hi_index, 1)
4566		    - tree_low_cst (lo_index, 1) + 1);
4567	}
4568
4569      switch (TREE_CODE (value))
4570	{
4571	case CONSTRUCTOR:
4572	  {
4573	    HOST_WIDE_INT nz = 0, ic = 0;
4574
4575	    bool const_elt_p
4576	      = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4577
4578	    nz_elts += mult * nz;
4579 	    elt_count += mult * ic;
4580
4581	    if (const_from_elts_p && const_p)
4582	      const_p = const_elt_p;
4583	  }
4584	  break;
4585
4586	case INTEGER_CST:
4587	case REAL_CST:
4588	  if (!initializer_zerop (value))
4589	    nz_elts += mult;
4590	  elt_count += mult;
4591	  break;
4592
4593	case STRING_CST:
4594	  nz_elts += mult * TREE_STRING_LENGTH (value);
4595	  elt_count += mult * TREE_STRING_LENGTH (value);
4596	  break;
4597
4598	case COMPLEX_CST:
4599	  if (!initializer_zerop (TREE_REALPART (value)))
4600	    nz_elts += mult;
4601	  if (!initializer_zerop (TREE_IMAGPART (value)))
4602	    nz_elts += mult;
4603	  elt_count += mult;
4604	  break;
4605
4606	case VECTOR_CST:
4607	  {
4608	    tree v;
4609	    for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4610	      {
4611		if (!initializer_zerop (TREE_VALUE (v)))
4612		  nz_elts += mult;
4613		elt_count += mult;
4614	      }
4615	  }
4616	  break;
4617
4618	default:
4619	  nz_elts += mult;
4620	  elt_count += mult;
4621
4622	  if (const_from_elts_p && const_p)
4623	    const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4624		      != NULL_TREE;
4625	  break;
4626	}
4627    }
4628
4629  if (!*p_must_clear
4630      && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4631	  || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4632    {
4633      tree init_sub_type;
4634      bool clear_this = true;
4635
4636      if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4637	{
4638	  /* We don't expect more than one element of the union to be
4639	     initialized.  Not sure what we should do otherwise... */
4640          gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4641		      == 1);
4642
4643          init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4644						CONSTRUCTOR_ELTS (ctor),
4645						0)->value);
4646
4647	  /* ??? We could look at each element of the union, and find the
4648	     largest element.  Which would avoid comparing the size of the
4649	     initialized element against any tail padding in the union.
4650	     Doesn't seem worth the effort...  */
4651	  if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4652				TYPE_SIZE (init_sub_type)) == 1)
4653	    {
4654	      /* And now we have to find out if the element itself is fully
4655		 constructed.  E.g. for union { struct { int a, b; } s; } u
4656		 = { .s = { .a = 1 } }.  */
4657	      if (elt_count == count_type_elements (init_sub_type, false))
4658		clear_this = false;
4659	    }
4660	}
4661
4662      *p_must_clear = clear_this;
4663    }
4664
4665  *p_nz_elts += nz_elts;
4666  *p_elt_count += elt_count;
4667
4668  return const_p;
4669}
4670
4671/* Examine CTOR to discover:
4672   * how many scalar fields are set to nonzero values,
4673     and place it in *P_NZ_ELTS;
4674   * how many scalar fields in total are in CTOR,
4675     and place it in *P_ELT_COUNT.
4676   * if a type is a union, and the initializer from the constructor
4677     is not the largest element in the union, then set *p_must_clear.
4678
4679   Return whether or not CTOR is a valid static constant initializer, the same
4680   as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
4681
4682bool
4683categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4684			  HOST_WIDE_INT *p_elt_count,
4685			  bool *p_must_clear)
4686{
4687  *p_nz_elts = 0;
4688  *p_elt_count = 0;
4689  *p_must_clear = false;
4690
4691  return
4692    categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4693}
4694
4695/* Count the number of scalars in TYPE.  Return -1 on overflow or
4696   variable-sized.  If ALLOW_FLEXARR is true, don't count flexible
4697   array member at the end of the structure.  */
4698
4699HOST_WIDE_INT
4700count_type_elements (tree type, bool allow_flexarr)
4701{
4702  const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4703  switch (TREE_CODE (type))
4704    {
4705    case ARRAY_TYPE:
4706      {
4707	tree telts = array_type_nelts (type);
4708	if (telts && host_integerp (telts, 1))
4709	  {
4710	    HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4711	    HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4712	    if (n == 0)
4713	      return 0;
4714	    else if (max / n > m)
4715	      return n * m;
4716	  }
4717	return -1;
4718      }
4719
4720    case RECORD_TYPE:
4721      {
4722	HOST_WIDE_INT n = 0, t;
4723	tree f;
4724
4725	for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4726	  if (TREE_CODE (f) == FIELD_DECL)
4727	    {
4728	      t = count_type_elements (TREE_TYPE (f), false);
4729	      if (t < 0)
4730		{
4731		  /* Check for structures with flexible array member.  */
4732		  tree tf = TREE_TYPE (f);
4733		  if (allow_flexarr
4734		      && TREE_CHAIN (f) == NULL
4735		      && TREE_CODE (tf) == ARRAY_TYPE
4736		      && TYPE_DOMAIN (tf)
4737		      && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4738		      && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4739		      && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4740		      && int_size_in_bytes (type) >= 0)
4741		    break;
4742
4743		  return -1;
4744		}
4745	      n += t;
4746	    }
4747
4748	return n;
4749      }
4750
4751    case UNION_TYPE:
4752    case QUAL_UNION_TYPE:
4753      return -1;
4754
4755    case COMPLEX_TYPE:
4756      return 2;
4757
4758    case VECTOR_TYPE:
4759      return TYPE_VECTOR_SUBPARTS (type);
4760
4761    case INTEGER_TYPE:
4762    case REAL_TYPE:
4763    case ENUMERAL_TYPE:
4764    case BOOLEAN_TYPE:
4765    case POINTER_TYPE:
4766    case OFFSET_TYPE:
4767    case REFERENCE_TYPE:
4768      return 1;
4769
4770    case VOID_TYPE:
4771    case METHOD_TYPE:
4772    case FUNCTION_TYPE:
4773    case LANG_TYPE:
4774    default:
4775      gcc_unreachable ();
4776    }
4777}
4778
4779/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4780
4781static int
4782mostly_zeros_p (tree exp)
4783{
4784  if (TREE_CODE (exp) == CONSTRUCTOR)
4785
4786    {
4787      HOST_WIDE_INT nz_elts, count, elts;
4788      bool must_clear;
4789
4790      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4791      if (must_clear)
4792	return 1;
4793
4794      elts = count_type_elements (TREE_TYPE (exp), false);
4795
4796      return nz_elts < elts / 4;
4797    }
4798
4799  return initializer_zerop (exp);
4800}
4801
4802/* Return 1 if EXP contains all zeros.  */
4803
4804static int
4805all_zeros_p (tree exp)
4806{
4807  if (TREE_CODE (exp) == CONSTRUCTOR)
4808
4809    {
4810      HOST_WIDE_INT nz_elts, count;
4811      bool must_clear;
4812
4813      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4814      return nz_elts == 0;
4815    }
4816
4817  return initializer_zerop (exp);
4818}
4819
4820/* Helper function for store_constructor.
4821   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4822   TYPE is the type of the CONSTRUCTOR, not the element type.
4823   CLEARED is as for store_constructor.
4824   ALIAS_SET is the alias set to use for any stores.
4825
4826   This provides a recursive shortcut back to store_constructor when it isn't
4827   necessary to go through store_field.  This is so that we can pass through
4828   the cleared field to let store_constructor know that we may not have to
4829   clear a substructure if the outer structure has already been cleared.  */
4830
4831static void
4832store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4833			 HOST_WIDE_INT bitpos, enum machine_mode mode,
4834			 tree exp, tree type, int cleared, int alias_set)
4835{
4836  if (TREE_CODE (exp) == CONSTRUCTOR
4837      /* We can only call store_constructor recursively if the size and
4838	 bit position are on a byte boundary.  */
4839      && bitpos % BITS_PER_UNIT == 0
4840      && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4841      /* If we have a nonzero bitpos for a register target, then we just
4842	 let store_field do the bitfield handling.  This is unlikely to
4843	 generate unnecessary clear instructions anyways.  */
4844      && (bitpos == 0 || MEM_P (target)))
4845    {
4846      if (MEM_P (target))
4847	target
4848	  = adjust_address (target,
4849			    GET_MODE (target) == BLKmode
4850			    || 0 != (bitpos
4851				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4852			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4853
4854
4855      /* Update the alias set, if required.  */
4856      if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4857	  && MEM_ALIAS_SET (target) != 0)
4858	{
4859	  target = copy_rtx (target);
4860	  set_mem_alias_set (target, alias_set);
4861	}
4862
4863      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4864    }
4865  else
4866    store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4867}
4868
4869/* Store the value of constructor EXP into the rtx TARGET.
4870   TARGET is either a REG or a MEM; we know it cannot conflict, since
4871   safe_from_p has been called.
4872   CLEARED is true if TARGET is known to have been zero'd.
4873   SIZE is the number of bytes of TARGET we are allowed to modify: this
4874   may not be the same as the size of EXP if we are assigning to a field
4875   which has been packed to exclude padding bits.  */
4876
4877static void
4878store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4879{
4880  tree type = TREE_TYPE (exp);
4881#ifdef WORD_REGISTER_OPERATIONS
4882  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4883#endif
4884
4885  switch (TREE_CODE (type))
4886    {
4887    case RECORD_TYPE:
4888    case UNION_TYPE:
4889    case QUAL_UNION_TYPE:
4890      {
4891	unsigned HOST_WIDE_INT idx;
4892	tree field, value;
4893
4894	/* If size is zero or the target is already cleared, do nothing.  */
4895	if (size == 0 || cleared)
4896	  cleared = 1;
4897	/* We either clear the aggregate or indicate the value is dead.  */
4898	else if ((TREE_CODE (type) == UNION_TYPE
4899		  || TREE_CODE (type) == QUAL_UNION_TYPE)
4900		 && ! CONSTRUCTOR_ELTS (exp))
4901	  /* If the constructor is empty, clear the union.  */
4902	  {
4903	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4904	    cleared = 1;
4905	  }
4906
4907	/* If we are building a static constructor into a register,
4908	   set the initial value as zero so we can fold the value into
4909	   a constant.  But if more than one register is involved,
4910	   this probably loses.  */
4911	else if (REG_P (target) && TREE_STATIC (exp)
4912		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4913	  {
4914	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4915	    cleared = 1;
4916	  }
4917
4918        /* If the constructor has fewer fields than the structure or
4919	   if we are initializing the structure to mostly zeros, clear
4920	   the whole structure first.  Don't do this if TARGET is a
4921	   register whose mode size isn't equal to SIZE since
4922	   clear_storage can't handle this case.  */
4923	else if (size > 0
4924		 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4925		      != fields_length (type))
4926		     || mostly_zeros_p (exp))
4927		 && (!REG_P (target)
4928		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4929			 == size)))
4930	  {
4931	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4932	    cleared = 1;
4933	  }
4934
4935	if (! cleared)
4936	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4937
4938	/* Store each element of the constructor into the
4939	   corresponding field of TARGET.  */
4940	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4941	  {
4942	    enum machine_mode mode;
4943	    HOST_WIDE_INT bitsize;
4944	    HOST_WIDE_INT bitpos = 0;
4945	    tree offset;
4946	    rtx to_rtx = target;
4947
4948	    /* Just ignore missing fields.  We cleared the whole
4949	       structure, above, if any fields are missing.  */
4950	    if (field == 0)
4951	      continue;
4952
4953	    if (cleared && initializer_zerop (value))
4954	      continue;
4955
4956	    if (host_integerp (DECL_SIZE (field), 1))
4957	      bitsize = tree_low_cst (DECL_SIZE (field), 1);
4958	    else
4959	      bitsize = -1;
4960
4961	    mode = DECL_MODE (field);
4962	    if (DECL_BIT_FIELD (field))
4963	      mode = VOIDmode;
4964
4965	    offset = DECL_FIELD_OFFSET (field);
4966	    if (host_integerp (offset, 0)
4967		&& host_integerp (bit_position (field), 0))
4968	      {
4969		bitpos = int_bit_position (field);
4970		offset = 0;
4971	      }
4972	    else
4973	      bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4974
4975	    if (offset)
4976	      {
4977		rtx offset_rtx;
4978
4979		offset
4980		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4981						    make_tree (TREE_TYPE (exp),
4982							       target));
4983
4984		offset_rtx = expand_normal (offset);
4985		gcc_assert (MEM_P (to_rtx));
4986
4987#ifdef POINTERS_EXTEND_UNSIGNED
4988		if (GET_MODE (offset_rtx) != Pmode)
4989		  offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4990#else
4991		if (GET_MODE (offset_rtx) != ptr_mode)
4992		  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4993#endif
4994
4995		to_rtx = offset_address (to_rtx, offset_rtx,
4996					 highest_pow2_factor (offset));
4997	      }
4998
4999#ifdef WORD_REGISTER_OPERATIONS
5000	    /* If this initializes a field that is smaller than a
5001	       word, at the start of a word, try to widen it to a full
5002	       word.  This special case allows us to output C++ member
5003	       function initializations in a form that the optimizers
5004	       can understand.  */
5005	    if (REG_P (target)
5006		&& bitsize < BITS_PER_WORD
5007		&& bitpos % BITS_PER_WORD == 0
5008		&& GET_MODE_CLASS (mode) == MODE_INT
5009		&& TREE_CODE (value) == INTEGER_CST
5010		&& exp_size >= 0
5011		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5012	      {
5013		tree type = TREE_TYPE (value);
5014
5015		if (TYPE_PRECISION (type) < BITS_PER_WORD)
5016		  {
5017		    type = lang_hooks.types.type_for_size
5018		      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5019		    value = fold_convert (type, value);
5020		  }
5021
5022		if (BYTES_BIG_ENDIAN)
5023		  value
5024		   = fold_build2 (LSHIFT_EXPR, type, value,
5025				   build_int_cst (type,
5026						  BITS_PER_WORD - bitsize));
5027		bitsize = BITS_PER_WORD;
5028		mode = word_mode;
5029	      }
5030#endif
5031
5032	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5033		&& DECL_NONADDRESSABLE_P (field))
5034	      {
5035		to_rtx = copy_rtx (to_rtx);
5036		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5037	      }
5038
5039	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
5040				     value, type, cleared,
5041				     get_alias_set (TREE_TYPE (field)));
5042	  }
5043	break;
5044      }
5045    case ARRAY_TYPE:
5046      {
5047	tree value, index;
5048	unsigned HOST_WIDE_INT i;
5049	int need_to_clear;
5050	tree domain;
5051	tree elttype = TREE_TYPE (type);
5052	int const_bounds_p;
5053	HOST_WIDE_INT minelt = 0;
5054	HOST_WIDE_INT maxelt = 0;
5055
5056	domain = TYPE_DOMAIN (type);
5057	const_bounds_p = (TYPE_MIN_VALUE (domain)
5058			  && TYPE_MAX_VALUE (domain)
5059			  && host_integerp (TYPE_MIN_VALUE (domain), 0)
5060			  && host_integerp (TYPE_MAX_VALUE (domain), 0));
5061
5062	/* If we have constant bounds for the range of the type, get them.  */
5063	if (const_bounds_p)
5064	  {
5065	    minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5066	    maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5067	  }
5068
5069	/* If the constructor has fewer elements than the array, clear
5070           the whole array first.  Similarly if this is static
5071           constructor of a non-BLKmode object.  */
5072	if (cleared)
5073	  need_to_clear = 0;
5074	else if (REG_P (target) && TREE_STATIC (exp))
5075	  need_to_clear = 1;
5076	else
5077	  {
5078	    unsigned HOST_WIDE_INT idx;
5079	    tree index, value;
5080	    HOST_WIDE_INT count = 0, zero_count = 0;
5081	    need_to_clear = ! const_bounds_p;
5082
5083	    /* This loop is a more accurate version of the loop in
5084	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5085	       is also needed to check for missing elements.  */
5086	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5087	      {
5088		HOST_WIDE_INT this_node_count;
5089
5090		if (need_to_clear)
5091		  break;
5092
5093		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5094		  {
5095		    tree lo_index = TREE_OPERAND (index, 0);
5096		    tree hi_index = TREE_OPERAND (index, 1);
5097
5098		    if (! host_integerp (lo_index, 1)
5099			|| ! host_integerp (hi_index, 1))
5100		      {
5101			need_to_clear = 1;
5102			break;
5103		      }
5104
5105		    this_node_count = (tree_low_cst (hi_index, 1)
5106				       - tree_low_cst (lo_index, 1) + 1);
5107		  }
5108		else
5109		  this_node_count = 1;
5110
5111		count += this_node_count;
5112		if (mostly_zeros_p (value))
5113		  zero_count += this_node_count;
5114	      }
5115
5116	    /* Clear the entire array first if there are any missing
5117	       elements, or if the incidence of zero elements is >=
5118	       75%.  */
5119	    if (! need_to_clear
5120		&& (count < maxelt - minelt + 1
5121		    || 4 * zero_count >= 3 * count))
5122	      need_to_clear = 1;
5123	  }
5124
5125	if (need_to_clear && size > 0)
5126	  {
5127	    if (REG_P (target))
5128	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5129	    else
5130	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5131	    cleared = 1;
5132	  }
5133
5134	if (!cleared && REG_P (target))
5135	  /* Inform later passes that the old value is dead.  */
5136	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5137
5138	/* Store each element of the constructor into the
5139	   corresponding element of TARGET, determined by counting the
5140	   elements.  */
5141	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5142	  {
5143	    enum machine_mode mode;
5144	    HOST_WIDE_INT bitsize;
5145	    HOST_WIDE_INT bitpos;
5146	    int unsignedp;
5147	    rtx xtarget = target;
5148
5149	    if (cleared && initializer_zerop (value))
5150	      continue;
5151
5152	    unsignedp = TYPE_UNSIGNED (elttype);
5153	    mode = TYPE_MODE (elttype);
5154	    if (mode == BLKmode)
5155	      bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5156			 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5157			 : -1);
5158	    else
5159	      bitsize = GET_MODE_BITSIZE (mode);
5160
5161	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5162	      {
5163		tree lo_index = TREE_OPERAND (index, 0);
5164		tree hi_index = TREE_OPERAND (index, 1);
5165		rtx index_r, pos_rtx;
5166		HOST_WIDE_INT lo, hi, count;
5167		tree position;
5168
5169		/* If the range is constant and "small", unroll the loop.  */
5170		if (const_bounds_p
5171		    && host_integerp (lo_index, 0)
5172		    && host_integerp (hi_index, 0)
5173		    && (lo = tree_low_cst (lo_index, 0),
5174			hi = tree_low_cst (hi_index, 0),
5175			count = hi - lo + 1,
5176			(!MEM_P (target)
5177			 || count <= 2
5178			 || (host_integerp (TYPE_SIZE (elttype), 1)
5179			     && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5180				 <= 40 * 8)))))
5181		  {
5182		    lo -= minelt;  hi -= minelt;
5183		    for (; lo <= hi; lo++)
5184		      {
5185			bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5186
5187			if (MEM_P (target)
5188			    && !MEM_KEEP_ALIAS_SET_P (target)
5189			    && TREE_CODE (type) == ARRAY_TYPE
5190			    && TYPE_NONALIASED_COMPONENT (type))
5191			  {
5192			    target = copy_rtx (target);
5193			    MEM_KEEP_ALIAS_SET_P (target) = 1;
5194			  }
5195
5196			store_constructor_field
5197			  (target, bitsize, bitpos, mode, value, type, cleared,
5198			   get_alias_set (elttype));
5199		      }
5200		  }
5201		else
5202		  {
5203		    rtx loop_start = gen_label_rtx ();
5204		    rtx loop_end = gen_label_rtx ();
5205		    tree exit_cond;
5206
5207		    expand_normal (hi_index);
5208		    unsignedp = TYPE_UNSIGNED (domain);
5209
5210		    index = build_decl (VAR_DECL, NULL_TREE, domain);
5211
5212		    index_r
5213		      = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5214						   &unsignedp, 0));
5215		    SET_DECL_RTL (index, index_r);
5216		    store_expr (lo_index, index_r, 0);
5217
5218		    /* Build the head of the loop.  */
5219		    do_pending_stack_adjust ();
5220		    emit_label (loop_start);
5221
5222		    /* Assign value to element index.  */
5223		    position =
5224		      fold_convert (ssizetype,
5225				    fold_build2 (MINUS_EXPR,
5226						 TREE_TYPE (index),
5227						 index,
5228						 TYPE_MIN_VALUE (domain)));
5229
5230		    position =
5231			size_binop (MULT_EXPR, position,
5232				    fold_convert (ssizetype,
5233						  TYPE_SIZE_UNIT (elttype)));
5234
5235		    pos_rtx = expand_normal (position);
5236		    xtarget = offset_address (target, pos_rtx,
5237					      highest_pow2_factor (position));
5238		    xtarget = adjust_address (xtarget, mode, 0);
5239		    if (TREE_CODE (value) == CONSTRUCTOR)
5240		      store_constructor (value, xtarget, cleared,
5241					 bitsize / BITS_PER_UNIT);
5242		    else
5243		      store_expr (value, xtarget, 0);
5244
5245		    /* Generate a conditional jump to exit the loop.  */
5246		    exit_cond = build2 (LT_EXPR, integer_type_node,
5247					index, hi_index);
5248		    jumpif (exit_cond, loop_end);
5249
5250		    /* Update the loop counter, and jump to the head of
5251		       the loop.  */
5252		    expand_assignment (index,
5253				       build2 (PLUS_EXPR, TREE_TYPE (index),
5254					       index, integer_one_node));
5255
5256		    emit_jump (loop_start);
5257
5258		    /* Build the end of the loop.  */
5259		    emit_label (loop_end);
5260		  }
5261	      }
5262	    else if ((index != 0 && ! host_integerp (index, 0))
5263		     || ! host_integerp (TYPE_SIZE (elttype), 1))
5264	      {
5265		tree position;
5266
5267		if (index == 0)
5268		  index = ssize_int (1);
5269
5270		if (minelt)
5271		  index = fold_convert (ssizetype,
5272					fold_build2 (MINUS_EXPR,
5273						     TREE_TYPE (index),
5274						     index,
5275						     TYPE_MIN_VALUE (domain)));
5276
5277		position =
5278		  size_binop (MULT_EXPR, index,
5279			      fold_convert (ssizetype,
5280					    TYPE_SIZE_UNIT (elttype)));
5281		xtarget = offset_address (target,
5282					  expand_normal (position),
5283					  highest_pow2_factor (position));
5284		xtarget = adjust_address (xtarget, mode, 0);
5285		store_expr (value, xtarget, 0);
5286	      }
5287	    else
5288	      {
5289		if (index != 0)
5290		  bitpos = ((tree_low_cst (index, 0) - minelt)
5291			    * tree_low_cst (TYPE_SIZE (elttype), 1));
5292		else
5293		  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5294
5295		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5296		    && TREE_CODE (type) == ARRAY_TYPE
5297		    && TYPE_NONALIASED_COMPONENT (type))
5298		  {
5299		    target = copy_rtx (target);
5300		    MEM_KEEP_ALIAS_SET_P (target) = 1;
5301		  }
5302		store_constructor_field (target, bitsize, bitpos, mode, value,
5303					 type, cleared, get_alias_set (elttype));
5304	      }
5305	  }
5306	break;
5307      }
5308
5309    case VECTOR_TYPE:
5310      {
5311	unsigned HOST_WIDE_INT idx;
5312	constructor_elt *ce;
5313	int i;
5314	int need_to_clear;
5315	int icode = 0;
5316	tree elttype = TREE_TYPE (type);
5317	int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5318	enum machine_mode eltmode = TYPE_MODE (elttype);
5319	HOST_WIDE_INT bitsize;
5320	HOST_WIDE_INT bitpos;
5321	rtvec vector = NULL;
5322	unsigned n_elts;
5323
5324	gcc_assert (eltmode != BLKmode);
5325
5326	n_elts = TYPE_VECTOR_SUBPARTS (type);
5327	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5328	  {
5329	    enum machine_mode mode = GET_MODE (target);
5330
5331	    icode = (int) vec_init_optab->handlers[mode].insn_code;
5332	    if (icode != CODE_FOR_nothing)
5333	      {
5334		unsigned int i;
5335
5336		vector = rtvec_alloc (n_elts);
5337		for (i = 0; i < n_elts; i++)
5338		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5339	      }
5340	  }
5341
5342	/* If the constructor has fewer elements than the vector,
5343	   clear the whole array first.  Similarly if this is static
5344	   constructor of a non-BLKmode object.  */
5345	if (cleared)
5346	  need_to_clear = 0;
5347	else if (REG_P (target) && TREE_STATIC (exp))
5348	  need_to_clear = 1;
5349	else
5350	  {
5351	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5352	    tree value;
5353
5354	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5355	      {
5356		int n_elts_here = tree_low_cst
5357		  (int_const_binop (TRUNC_DIV_EXPR,
5358				    TYPE_SIZE (TREE_TYPE (value)),
5359				    TYPE_SIZE (elttype), 0), 1);
5360
5361		count += n_elts_here;
5362		if (mostly_zeros_p (value))
5363		  zero_count += n_elts_here;
5364	      }
5365
5366	    /* Clear the entire vector first if there are any missing elements,
5367	       or if the incidence of zero elements is >= 75%.  */
5368	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5369	  }
5370
5371	if (need_to_clear && size > 0 && !vector)
5372	  {
5373	    if (REG_P (target))
5374	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5375	    else
5376	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5377	    cleared = 1;
5378	  }
5379
5380	/* Inform later passes that the old value is dead.  */
5381	if (!cleared && !vector && REG_P (target))
5382	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5383
5384        /* Store each element of the constructor into the corresponding
5385	   element of TARGET, determined by counting the elements.  */
5386	for (idx = 0, i = 0;
5387	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5388	     idx++, i += bitsize / elt_size)
5389	  {
5390	    HOST_WIDE_INT eltpos;
5391	    tree value = ce->value;
5392
5393	    bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5394	    if (cleared && initializer_zerop (value))
5395	      continue;
5396
5397	    if (ce->index)
5398	      eltpos = tree_low_cst (ce->index, 1);
5399	    else
5400	      eltpos = i;
5401
5402	    if (vector)
5403	      {
5404	        /* Vector CONSTRUCTORs should only be built from smaller
5405		   vectors in the case of BLKmode vectors.  */
5406		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5407		RTVEC_ELT (vector, eltpos)
5408		  = expand_normal (value);
5409	      }
5410	    else
5411	      {
5412		enum machine_mode value_mode =
5413		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5414		  ? TYPE_MODE (TREE_TYPE (value))
5415		  : eltmode;
5416		bitpos = eltpos * elt_size;
5417		store_constructor_field (target, bitsize, bitpos,
5418					 value_mode, value, type,
5419					 cleared, get_alias_set (elttype));
5420	      }
5421	  }
5422
5423	if (vector)
5424	  emit_insn (GEN_FCN (icode)
5425		     (target,
5426		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
5427	break;
5428      }
5429
5430    default:
5431      gcc_unreachable ();
5432    }
5433}
5434
5435/* Store the value of EXP (an expression tree)
5436   into a subfield of TARGET which has mode MODE and occupies
5437   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5438   If MODE is VOIDmode, it means that we are storing into a bit-field.
5439
5440   Always return const0_rtx unless we have something particular to
5441   return.
5442
5443   TYPE is the type of the underlying object,
5444
5445   ALIAS_SET is the alias set for the destination.  This value will
5446   (in general) be different from that for TARGET, since TARGET is a
5447   reference to the containing structure.  */
5448
5449static rtx
5450store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5451	     enum machine_mode mode, tree exp, tree type, int alias_set)
5452{
5453  HOST_WIDE_INT width_mask = 0;
5454
5455  if (TREE_CODE (exp) == ERROR_MARK)
5456    return const0_rtx;
5457
5458  /* If we have nothing to store, do nothing unless the expression has
5459     side-effects.  */
5460  if (bitsize == 0)
5461    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5462  else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5463    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5464
5465  /* If we are storing into an unaligned field of an aligned union that is
5466     in a register, we may have the mode of TARGET being an integer mode but
5467     MODE == BLKmode.  In that case, get an aligned object whose size and
5468     alignment are the same as TARGET and store TARGET into it (we can avoid
5469     the store if the field being stored is the entire width of TARGET).  Then
5470     call ourselves recursively to store the field into a BLKmode version of
5471     that object.  Finally, load from the object into TARGET.  This is not
5472     very efficient in general, but should only be slightly more expensive
5473     than the otherwise-required unaligned accesses.  Perhaps this can be
5474     cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5475     twice, once with emit_move_insn and once via store_field.  */
5476
5477  if (mode == BLKmode
5478      && (REG_P (target) || GET_CODE (target) == SUBREG))
5479    {
5480      rtx object = assign_temp (type, 0, 1, 1);
5481      rtx blk_object = adjust_address (object, BLKmode, 0);
5482
5483      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5484	emit_move_insn (object, target);
5485
5486      store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5487
5488      emit_move_insn (target, object);
5489
5490      /* We want to return the BLKmode version of the data.  */
5491      return blk_object;
5492    }
5493
5494  if (GET_CODE (target) == CONCAT)
5495    {
5496      /* We're storing into a struct containing a single __complex.  */
5497
5498      gcc_assert (!bitpos);
5499      return store_expr (exp, target, 0);
5500    }
5501
5502  /* If the structure is in a register or if the component
5503     is a bit field, we cannot use addressing to access it.
5504     Use bit-field techniques or SUBREG to store in it.  */
5505
5506  if (mode == VOIDmode
5507      || (mode != BLKmode && ! direct_store[(int) mode]
5508	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5509	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5510      || REG_P (target)
5511      || GET_CODE (target) == SUBREG
5512      /* If the field isn't aligned enough to store as an ordinary memref,
5513	 store it as a bit field.  */
5514      || (mode != BLKmode
5515	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5516		|| bitpos % GET_MODE_ALIGNMENT (mode))
5517	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5518	      || (bitpos % BITS_PER_UNIT != 0)))
5519      /* If the RHS and field are a constant size and the size of the
5520	 RHS isn't the same size as the bitfield, we must use bitfield
5521	 operations.  */
5522      || (bitsize >= 0
5523	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5524	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5525    {
5526      rtx temp;
5527
5528      /* If EXP is a NOP_EXPR of precision less than its mode, then that
5529	 implies a mask operation.  If the precision is the same size as
5530	 the field we're storing into, that mask is redundant.  This is
5531	 particularly common with bit field assignments generated by the
5532	 C front end.  */
5533      if (TREE_CODE (exp) == NOP_EXPR)
5534	{
5535	  tree type = TREE_TYPE (exp);
5536	  if (INTEGRAL_TYPE_P (type)
5537	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5538	      && bitsize == TYPE_PRECISION (type))
5539	    {
5540	      type = TREE_TYPE (TREE_OPERAND (exp, 0));
5541	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5542		exp = TREE_OPERAND (exp, 0);
5543	    }
5544	}
5545
5546      temp = expand_normal (exp);
5547
5548      /* If BITSIZE is narrower than the size of the type of EXP
5549	 we will be narrowing TEMP.  Normally, what's wanted are the
5550	 low-order bits.  However, if EXP's type is a record and this is
5551	 big-endian machine, we want the upper BITSIZE bits.  */
5552      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5553	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5554	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5555	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5556			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5557				       - bitsize),
5558			     NULL_RTX, 1);
5559
5560      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5561	 MODE.  */
5562      if (mode != VOIDmode && mode != BLKmode
5563	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5564	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5565
5566      /* If the modes of TARGET and TEMP are both BLKmode, both
5567	 must be in memory and BITPOS must be aligned on a byte
5568	 boundary.  If so, we simply do a block copy.  */
5569      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5570	{
5571	  gcc_assert (MEM_P (target) && MEM_P (temp)
5572		      && !(bitpos % BITS_PER_UNIT));
5573
5574	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5575	  emit_block_move (target, temp,
5576			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5577				    / BITS_PER_UNIT),
5578			   BLOCK_OP_NORMAL);
5579
5580	  return const0_rtx;
5581	}
5582
5583      /* Store the value in the bitfield.  */
5584      store_bit_field (target, bitsize, bitpos, mode, temp);
5585
5586      return const0_rtx;
5587    }
5588  else
5589    {
5590      /* Now build a reference to just the desired component.  */
5591      rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5592
5593      if (to_rtx == target)
5594	to_rtx = copy_rtx (to_rtx);
5595
5596      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5597      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5598	set_mem_alias_set (to_rtx, alias_set);
5599
5600      return store_expr (exp, to_rtx, 0);
5601    }
5602}
5603
5604/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5605   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5606   codes and find the ultimate containing object, which we return.
5607
5608   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5609   bit position, and *PUNSIGNEDP to the signedness of the field.
5610   If the position of the field is variable, we store a tree
5611   giving the variable offset (in units) in *POFFSET.
5612   This offset is in addition to the bit position.
5613   If the position is not variable, we store 0 in *POFFSET.
5614
5615   If any of the extraction expressions is volatile,
5616   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5617
5618   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5619   is a mode that can be used to access the field.  In that case, *PBITSIZE
5620   is redundant.
5621
5622   If the field describes a variable-sized object, *PMODE is set to
5623   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5624   this case, but the address of the object can be found.
5625
5626   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5627   look through nodes that serve as markers of a greater alignment than
5628   the one that can be deduced from the expression.  These nodes make it
5629   possible for front-ends to prevent temporaries from being created by
5630   the middle-end on alignment considerations.  For that purpose, the
5631   normal operating mode at high-level is to always pass FALSE so that
5632   the ultimate containing object is really returned; moreover, the
5633   associated predicate handled_component_p will always return TRUE
5634   on these nodes, thus indicating that they are essentially handled
5635   by get_inner_reference.  TRUE should only be passed when the caller
5636   is scanning the expression in order to build another representation
5637   and specifically knows how to handle these nodes; as such, this is
5638   the normal operating mode in the RTL expanders.  */
5639
5640tree
5641get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5642		     HOST_WIDE_INT *pbitpos, tree *poffset,
5643		     enum machine_mode *pmode, int *punsignedp,
5644		     int *pvolatilep, bool keep_aligning)
5645{
5646  tree size_tree = 0;
5647  enum machine_mode mode = VOIDmode;
5648  tree offset = size_zero_node;
5649  tree bit_offset = bitsize_zero_node;
5650
5651  /* First get the mode, signedness, and size.  We do this from just the
5652     outermost expression.  */
5653  if (TREE_CODE (exp) == COMPONENT_REF)
5654    {
5655      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5656      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5657	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5658
5659      *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5660    }
5661  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5662    {
5663      size_tree = TREE_OPERAND (exp, 1);
5664      *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5665    }
5666  else
5667    {
5668      mode = TYPE_MODE (TREE_TYPE (exp));
5669      *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5670
5671      if (mode == BLKmode)
5672	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5673      else
5674	*pbitsize = GET_MODE_BITSIZE (mode);
5675    }
5676
5677  if (size_tree != 0)
5678    {
5679      if (! host_integerp (size_tree, 1))
5680	mode = BLKmode, *pbitsize = -1;
5681      else
5682	*pbitsize = tree_low_cst (size_tree, 1);
5683    }
5684
5685  *pmode = mode;
5686
5687  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5688     and find the ultimate containing object.  */
5689  while (1)
5690    {
5691      switch (TREE_CODE (exp))
5692	{
5693	case BIT_FIELD_REF:
5694	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5695				   TREE_OPERAND (exp, 2));
5696	  break;
5697
5698	case COMPONENT_REF:
5699	  {
5700	    tree field = TREE_OPERAND (exp, 1);
5701	    tree this_offset = component_ref_field_offset (exp);
5702
5703	    /* If this field hasn't been filled in yet, don't go past it.
5704	       This should only happen when folding expressions made during
5705	       type construction.  */
5706	    if (this_offset == 0)
5707	      break;
5708
5709	    offset = size_binop (PLUS_EXPR, offset, this_offset);
5710	    bit_offset = size_binop (PLUS_EXPR, bit_offset,
5711				     DECL_FIELD_BIT_OFFSET (field));
5712
5713	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5714	  }
5715	  break;
5716
5717	case ARRAY_REF:
5718	case ARRAY_RANGE_REF:
5719	  {
5720	    tree index = TREE_OPERAND (exp, 1);
5721	    tree low_bound = array_ref_low_bound (exp);
5722	    tree unit_size = array_ref_element_size (exp);
5723
5724	    /* We assume all arrays have sizes that are a multiple of a byte.
5725	       First subtract the lower bound, if any, in the type of the
5726	       index, then convert to sizetype and multiply by the size of
5727	       the array element.  */
5728	    if (! integer_zerop (low_bound))
5729	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5730				   index, low_bound);
5731
5732	    offset = size_binop (PLUS_EXPR, offset,
5733			         size_binop (MULT_EXPR,
5734					     fold_convert (sizetype, index),
5735					     unit_size));
5736	  }
5737	  break;
5738
5739	case REALPART_EXPR:
5740	  break;
5741
5742	case IMAGPART_EXPR:
5743	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5744				   bitsize_int (*pbitsize));
5745	  break;
5746
5747	case VIEW_CONVERT_EXPR:
5748	  if (keep_aligning && STRICT_ALIGNMENT
5749	      && (TYPE_ALIGN (TREE_TYPE (exp))
5750	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5751	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5752		  < BIGGEST_ALIGNMENT)
5753	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5754		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5755	    goto done;
5756	  break;
5757
5758	default:
5759	  goto done;
5760	}
5761
5762      /* If any reference in the chain is volatile, the effect is volatile.  */
5763      if (TREE_THIS_VOLATILE (exp))
5764	*pvolatilep = 1;
5765
5766      exp = TREE_OPERAND (exp, 0);
5767    }
5768 done:
5769
5770  /* If OFFSET is constant, see if we can return the whole thing as a
5771     constant bit position.  Make sure to handle overflow during
5772     this conversion.  */
5773  if (host_integerp (offset, 0))
5774    {
5775      double_int tem = double_int_mul (tree_to_double_int (offset),
5776				       uhwi_to_double_int (BITS_PER_UNIT));
5777      tem = double_int_add (tem, tree_to_double_int (bit_offset));
5778      if (double_int_fits_in_shwi_p (tem))
5779	{
5780	  *pbitpos = double_int_to_shwi (tem);
5781	  *poffset = NULL_TREE;
5782	  return exp;
5783	}
5784    }
5785
5786  /* Otherwise, split it up.  */
5787  *pbitpos = tree_low_cst (bit_offset, 0);
5788  *poffset = offset;
5789
5790  return exp;
5791}
5792
5793/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5794   look for whether EXP or any nested component-refs within EXP is marked
5795   as PACKED.  */
5796
5797bool
5798contains_packed_reference (tree exp)
5799{
5800  bool packed_p = false;
5801
5802  while (1)
5803    {
5804      switch (TREE_CODE (exp))
5805	{
5806	case COMPONENT_REF:
5807	  {
5808	    tree field = TREE_OPERAND (exp, 1);
5809	    packed_p = DECL_PACKED (field)
5810		       || TYPE_PACKED (TREE_TYPE (field))
5811		       || TYPE_PACKED (TREE_TYPE (exp));
5812	    if (packed_p)
5813	      goto done;
5814	  }
5815	  break;
5816
5817	case BIT_FIELD_REF:
5818	case ARRAY_REF:
5819	case ARRAY_RANGE_REF:
5820	case REALPART_EXPR:
5821	case IMAGPART_EXPR:
5822	case VIEW_CONVERT_EXPR:
5823	  break;
5824
5825	default:
5826	  goto done;
5827	}
5828      exp = TREE_OPERAND (exp, 0);
5829    }
5830 done:
5831  return packed_p;
5832}
5833
5834/* Return a tree of sizetype representing the size, in bytes, of the element
5835   of EXP, an ARRAY_REF.  */
5836
5837tree
5838array_ref_element_size (tree exp)
5839{
5840  tree aligned_size = TREE_OPERAND (exp, 3);
5841  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5842
5843  /* If a size was specified in the ARRAY_REF, it's the size measured
5844     in alignment units of the element type.  So multiply by that value.  */
5845  if (aligned_size)
5846    {
5847      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5848	 sizetype from another type of the same width and signedness.  */
5849      if (TREE_TYPE (aligned_size) != sizetype)
5850	aligned_size = fold_convert (sizetype, aligned_size);
5851      return size_binop (MULT_EXPR, aligned_size,
5852		         size_int (TYPE_ALIGN_UNIT (elmt_type)));
5853    }
5854
5855  /* Otherwise, take the size from that of the element type.  Substitute
5856     any PLACEHOLDER_EXPR that we have.  */
5857  else
5858    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5859}
5860
5861/* Return a tree representing the lower bound of the array mentioned in
5862   EXP, an ARRAY_REF.  */
5863
5864tree
5865array_ref_low_bound (tree exp)
5866{
5867  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5868
5869  /* If a lower bound is specified in EXP, use it.  */
5870  if (TREE_OPERAND (exp, 2))
5871    return TREE_OPERAND (exp, 2);
5872
5873  /* Otherwise, if there is a domain type and it has a lower bound, use it,
5874     substituting for a PLACEHOLDER_EXPR as needed.  */
5875  if (domain_type && TYPE_MIN_VALUE (domain_type))
5876    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5877
5878  /* Otherwise, return a zero of the appropriate type.  */
5879  return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5880}
5881
5882/* Return a tree representing the upper bound of the array mentioned in
5883   EXP, an ARRAY_REF.  */
5884
5885tree
5886array_ref_up_bound (tree exp)
5887{
5888  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5889
5890  /* If there is a domain type and it has an upper bound, use it, substituting
5891     for a PLACEHOLDER_EXPR as needed.  */
5892  if (domain_type && TYPE_MAX_VALUE (domain_type))
5893    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5894
5895  /* Otherwise fail.  */
5896  return NULL_TREE;
5897}
5898
5899/* Return a tree representing the offset, in bytes, of the field referenced
5900   by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
5901
5902tree
5903component_ref_field_offset (tree exp)
5904{
5905  tree aligned_offset = TREE_OPERAND (exp, 2);
5906  tree field = TREE_OPERAND (exp, 1);
5907
5908  /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5909     in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
5910     value.  */
5911  if (aligned_offset)
5912    {
5913      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5914	 sizetype from another type of the same width and signedness.  */
5915      if (TREE_TYPE (aligned_offset) != sizetype)
5916	aligned_offset = fold_convert (sizetype, aligned_offset);
5917      return size_binop (MULT_EXPR, aligned_offset,
5918		         size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5919    }
5920
5921  /* Otherwise, take the offset from that of the field.  Substitute
5922     any PLACEHOLDER_EXPR that we have.  */
5923  else
5924    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5925}
5926
5927/* Return 1 if T is an expression that get_inner_reference handles.  */
5928
5929int
5930handled_component_p (tree t)
5931{
5932  switch (TREE_CODE (t))
5933    {
5934    case BIT_FIELD_REF:
5935    case COMPONENT_REF:
5936    case ARRAY_REF:
5937    case ARRAY_RANGE_REF:
5938    case VIEW_CONVERT_EXPR:
5939    case REALPART_EXPR:
5940    case IMAGPART_EXPR:
5941      return 1;
5942
5943    default:
5944      return 0;
5945    }
5946}
5947
5948/* Given an rtx VALUE that may contain additions and multiplications, return
5949   an equivalent value that just refers to a register, memory, or constant.
5950   This is done by generating instructions to perform the arithmetic and
5951   returning a pseudo-register containing the value.
5952
5953   The returned value may be a REG, SUBREG, MEM or constant.  */
5954
5955rtx
5956force_operand (rtx value, rtx target)
5957{
5958  rtx op1, op2;
5959  /* Use subtarget as the target for operand 0 of a binary operation.  */
5960  rtx subtarget = get_subtarget (target);
5961  enum rtx_code code = GET_CODE (value);
5962
5963  /* Check for subreg applied to an expression produced by loop optimizer.  */
5964  if (code == SUBREG
5965      && !REG_P (SUBREG_REG (value))
5966      && !MEM_P (SUBREG_REG (value)))
5967    {
5968      value = simplify_gen_subreg (GET_MODE (value),
5969				   force_reg (GET_MODE (SUBREG_REG (value)),
5970					      force_operand (SUBREG_REG (value),
5971							     NULL_RTX)),
5972				   GET_MODE (SUBREG_REG (value)),
5973				   SUBREG_BYTE (value));
5974      code = GET_CODE (value);
5975    }
5976
5977  /* Check for a PIC address load.  */
5978  if ((code == PLUS || code == MINUS)
5979      && XEXP (value, 0) == pic_offset_table_rtx
5980      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5981	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5982	  || GET_CODE (XEXP (value, 1)) == CONST))
5983    {
5984      if (!subtarget)
5985	subtarget = gen_reg_rtx (GET_MODE (value));
5986      emit_move_insn (subtarget, value);
5987      return subtarget;
5988    }
5989
5990  if (ARITHMETIC_P (value))
5991    {
5992      op2 = XEXP (value, 1);
5993      if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5994	subtarget = 0;
5995      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5996	{
5997	  code = PLUS;
5998	  op2 = negate_rtx (GET_MODE (value), op2);
5999	}
6000
6001      /* Check for an addition with OP2 a constant integer and our first
6002         operand a PLUS of a virtual register and something else.  In that
6003         case, we want to emit the sum of the virtual register and the
6004         constant first and then add the other value.  This allows virtual
6005         register instantiation to simply modify the constant rather than
6006         creating another one around this addition.  */
6007      if (code == PLUS && GET_CODE (op2) == CONST_INT
6008	  && GET_CODE (XEXP (value, 0)) == PLUS
6009	  && REG_P (XEXP (XEXP (value, 0), 0))
6010	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6011	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6012	{
6013	  rtx temp = expand_simple_binop (GET_MODE (value), code,
6014					  XEXP (XEXP (value, 0), 0), op2,
6015					  subtarget, 0, OPTAB_LIB_WIDEN);
6016	  return expand_simple_binop (GET_MODE (value), code, temp,
6017				      force_operand (XEXP (XEXP (value,
6018								 0), 1), 0),
6019				      target, 0, OPTAB_LIB_WIDEN);
6020	}
6021
6022      op1 = force_operand (XEXP (value, 0), subtarget);
6023      op2 = force_operand (op2, NULL_RTX);
6024      switch (code)
6025	{
6026	case MULT:
6027	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
6028	case DIV:
6029	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
6030	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
6031					target, 1, OPTAB_LIB_WIDEN);
6032	  else
6033	    return expand_divmod (0,
6034				  FLOAT_MODE_P (GET_MODE (value))
6035				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
6036				  GET_MODE (value), op1, op2, target, 0);
6037	  break;
6038	case MOD:
6039	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6040				target, 0);
6041	  break;
6042	case UDIV:
6043	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6044				target, 1);
6045	  break;
6046	case UMOD:
6047	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6048				target, 1);
6049	  break;
6050	case ASHIFTRT:
6051	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6052				      target, 0, OPTAB_LIB_WIDEN);
6053	  break;
6054	default:
6055	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6056				      target, 1, OPTAB_LIB_WIDEN);
6057	}
6058    }
6059  if (UNARY_P (value))
6060    {
6061      if (!target)
6062	target = gen_reg_rtx (GET_MODE (value));
6063      op1 = force_operand (XEXP (value, 0), NULL_RTX);
6064      switch (code)
6065	{
6066	case ZERO_EXTEND:
6067	case SIGN_EXTEND:
6068	case TRUNCATE:
6069	case FLOAT_EXTEND:
6070	case FLOAT_TRUNCATE:
6071	  convert_move (target, op1, code == ZERO_EXTEND);
6072	  return target;
6073
6074	case FIX:
6075	case UNSIGNED_FIX:
6076	  expand_fix (target, op1, code == UNSIGNED_FIX);
6077	  return target;
6078
6079	case FLOAT:
6080	case UNSIGNED_FLOAT:
6081	  expand_float (target, op1, code == UNSIGNED_FLOAT);
6082	  return target;
6083
6084	default:
6085	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6086	}
6087    }
6088
6089#ifdef INSN_SCHEDULING
6090  /* On machines that have insn scheduling, we want all memory reference to be
6091     explicit, so we need to deal with such paradoxical SUBREGs.  */
6092  if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6093      && (GET_MODE_SIZE (GET_MODE (value))
6094	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6095    value
6096      = simplify_gen_subreg (GET_MODE (value),
6097			     force_reg (GET_MODE (SUBREG_REG (value)),
6098					force_operand (SUBREG_REG (value),
6099						       NULL_RTX)),
6100			     GET_MODE (SUBREG_REG (value)),
6101			     SUBREG_BYTE (value));
6102#endif
6103
6104  return value;
6105}
6106
6107/* Subroutine of expand_expr: return nonzero iff there is no way that
6108   EXP can reference X, which is being modified.  TOP_P is nonzero if this
6109   call is going to be used to determine whether we need a temporary
6110   for EXP, as opposed to a recursive call to this function.
6111
6112   It is always safe for this routine to return zero since it merely
6113   searches for optimization opportunities.  */
6114
6115int
6116safe_from_p (rtx x, tree exp, int top_p)
6117{
6118  rtx exp_rtl = 0;
6119  int i, nops;
6120
6121  if (x == 0
6122      /* If EXP has varying size, we MUST use a target since we currently
6123	 have no way of allocating temporaries of variable size
6124	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6125	 So we assume here that something at a higher level has prevented a
6126	 clash.  This is somewhat bogus, but the best we can do.  Only
6127	 do this when X is BLKmode and when we are at the top level.  */
6128      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6129	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6130	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6131	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6132	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6133	      != INTEGER_CST)
6134	  && GET_MODE (x) == BLKmode)
6135      /* If X is in the outgoing argument area, it is always safe.  */
6136      || (MEM_P (x)
6137	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
6138	      || (GET_CODE (XEXP (x, 0)) == PLUS
6139		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6140    return 1;
6141
6142  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6143     find the underlying pseudo.  */
6144  if (GET_CODE (x) == SUBREG)
6145    {
6146      x = SUBREG_REG (x);
6147      if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6148	return 0;
6149    }
6150
6151  /* Now look at our tree code and possibly recurse.  */
6152  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6153    {
6154    case tcc_declaration:
6155      exp_rtl = DECL_RTL_IF_SET (exp);
6156      break;
6157
6158    case tcc_constant:
6159      return 1;
6160
6161    case tcc_exceptional:
6162      if (TREE_CODE (exp) == TREE_LIST)
6163	{
6164	  while (1)
6165	    {
6166	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6167		return 0;
6168	      exp = TREE_CHAIN (exp);
6169	      if (!exp)
6170		return 1;
6171	      if (TREE_CODE (exp) != TREE_LIST)
6172		return safe_from_p (x, exp, 0);
6173	    }
6174	}
6175      else if (TREE_CODE (exp) == CONSTRUCTOR)
6176	{
6177	  constructor_elt *ce;
6178	  unsigned HOST_WIDE_INT idx;
6179
6180	  for (idx = 0;
6181	       VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6182	       idx++)
6183	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6184		|| !safe_from_p (x, ce->value, 0))
6185	      return 0;
6186	  return 1;
6187	}
6188      else if (TREE_CODE (exp) == ERROR_MARK)
6189	return 1;	/* An already-visited SAVE_EXPR? */
6190      else
6191	return 0;
6192
6193    case tcc_statement:
6194      /* The only case we look at here is the DECL_INITIAL inside a
6195	 DECL_EXPR.  */
6196      return (TREE_CODE (exp) != DECL_EXPR
6197	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6198	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6199	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6200
6201    case tcc_binary:
6202    case tcc_comparison:
6203      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6204	return 0;
6205      /* Fall through.  */
6206
6207    case tcc_unary:
6208      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6209
6210    case tcc_expression:
6211    case tcc_reference:
6212      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6213	 the expression.  If it is set, we conflict iff we are that rtx or
6214	 both are in memory.  Otherwise, we check all operands of the
6215	 expression recursively.  */
6216
6217      switch (TREE_CODE (exp))
6218	{
6219	case ADDR_EXPR:
6220	  /* If the operand is static or we are static, we can't conflict.
6221	     Likewise if we don't conflict with the operand at all.  */
6222	  if (staticp (TREE_OPERAND (exp, 0))
6223	      || TREE_STATIC (exp)
6224	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6225	    return 1;
6226
6227	  /* Otherwise, the only way this can conflict is if we are taking
6228	     the address of a DECL a that address if part of X, which is
6229	     very rare.  */
6230	  exp = TREE_OPERAND (exp, 0);
6231	  if (DECL_P (exp))
6232	    {
6233	      if (!DECL_RTL_SET_P (exp)
6234		  || !MEM_P (DECL_RTL (exp)))
6235		return 0;
6236	      else
6237		exp_rtl = XEXP (DECL_RTL (exp), 0);
6238	    }
6239	  break;
6240
6241	case MISALIGNED_INDIRECT_REF:
6242	case ALIGN_INDIRECT_REF:
6243	case INDIRECT_REF:
6244	  if (MEM_P (x)
6245	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6246					get_alias_set (exp)))
6247	    return 0;
6248	  break;
6249
6250	case CALL_EXPR:
6251	  /* Assume that the call will clobber all hard registers and
6252	     all of memory.  */
6253	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6254	      || MEM_P (x))
6255	    return 0;
6256	  break;
6257
6258	case WITH_CLEANUP_EXPR:
6259	case CLEANUP_POINT_EXPR:
6260	  /* Lowered by gimplify.c.  */
6261	  gcc_unreachable ();
6262
6263	case SAVE_EXPR:
6264	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6265
6266	default:
6267	  break;
6268	}
6269
6270      /* If we have an rtx, we do not need to scan our operands.  */
6271      if (exp_rtl)
6272	break;
6273
6274      nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6275      for (i = 0; i < nops; i++)
6276	if (TREE_OPERAND (exp, i) != 0
6277	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6278	  return 0;
6279
6280      /* If this is a language-specific tree code, it may require
6281	 special handling.  */
6282      if ((unsigned int) TREE_CODE (exp)
6283	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6284	  && !lang_hooks.safe_from_p (x, exp))
6285	return 0;
6286      break;
6287
6288    case tcc_type:
6289      /* Should never get a type here.  */
6290      gcc_unreachable ();
6291    }
6292
6293  /* If we have an rtl, find any enclosed object.  Then see if we conflict
6294     with it.  */
6295  if (exp_rtl)
6296    {
6297      if (GET_CODE (exp_rtl) == SUBREG)
6298	{
6299	  exp_rtl = SUBREG_REG (exp_rtl);
6300	  if (REG_P (exp_rtl)
6301	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6302	    return 0;
6303	}
6304
6305      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6306	 are memory and they conflict.  */
6307      return ! (rtx_equal_p (x, exp_rtl)
6308		|| (MEM_P (x) && MEM_P (exp_rtl)
6309		    && true_dependence (exp_rtl, VOIDmode, x,
6310					rtx_addr_varies_p)));
6311    }
6312
6313  /* If we reach here, it is safe.  */
6314  return 1;
6315}
6316
6317
6318/* Return the highest power of two that EXP is known to be a multiple of.
6319   This is used in updating alignment of MEMs in array references.  */
6320
6321unsigned HOST_WIDE_INT
6322highest_pow2_factor (tree exp)
6323{
6324  unsigned HOST_WIDE_INT c0, c1;
6325
6326  switch (TREE_CODE (exp))
6327    {
6328    case INTEGER_CST:
6329      /* We can find the lowest bit that's a one.  If the low
6330	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6331	 We need to handle this case since we can find it in a COND_EXPR,
6332	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
6333	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6334	 later ICE.  */
6335      if (TREE_CONSTANT_OVERFLOW (exp))
6336	return BIGGEST_ALIGNMENT;
6337      else
6338	{
6339	  /* Note: tree_low_cst is intentionally not used here,
6340	     we don't care about the upper bits.  */
6341	  c0 = TREE_INT_CST_LOW (exp);
6342	  c0 &= -c0;
6343	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6344	}
6345      break;
6346
6347    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6348      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6349      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6350      return MIN (c0, c1);
6351
6352    case MULT_EXPR:
6353      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6354      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6355      return c0 * c1;
6356
6357    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6358    case CEIL_DIV_EXPR:
6359      if (integer_pow2p (TREE_OPERAND (exp, 1))
6360	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6361	{
6362	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6363	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6364	  return MAX (1, c0 / c1);
6365	}
6366      break;
6367
6368    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6369    case SAVE_EXPR:
6370      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6371
6372    case COMPOUND_EXPR:
6373      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6374
6375    case COND_EXPR:
6376      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6377      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6378      return MIN (c0, c1);
6379
6380    default:
6381      break;
6382    }
6383
6384  return 1;
6385}
6386
6387/* Similar, except that the alignment requirements of TARGET are
6388   taken into account.  Assume it is at least as aligned as its
6389   type, unless it is a COMPONENT_REF in which case the layout of
6390   the structure gives the alignment.  */
6391
6392static unsigned HOST_WIDE_INT
6393highest_pow2_factor_for_target (tree target, tree exp)
6394{
6395  unsigned HOST_WIDE_INT target_align, factor;
6396
6397  factor = highest_pow2_factor (exp);
6398  if (TREE_CODE (target) == COMPONENT_REF)
6399    target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6400  else
6401    target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6402  return MAX (factor, target_align);
6403}
6404
6405/* Expands variable VAR.  */
6406
6407void
6408expand_var (tree var)
6409{
6410  if (DECL_EXTERNAL (var))
6411    return;
6412
6413  if (TREE_STATIC (var))
6414    /* If this is an inlined copy of a static local variable,
6415       look up the original decl.  */
6416    var = DECL_ORIGIN (var);
6417
6418  if (TREE_STATIC (var)
6419      ? !TREE_ASM_WRITTEN (var)
6420      : !DECL_RTL_SET_P (var))
6421    {
6422      if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6423	/* Should be ignored.  */;
6424      else if (lang_hooks.expand_decl (var))
6425	/* OK.  */;
6426      else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6427	expand_decl (var);
6428      else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6429	rest_of_decl_compilation (var, 0, 0);
6430      else
6431	/* No expansion needed.  */
6432	gcc_assert (TREE_CODE (var) == TYPE_DECL
6433		    || TREE_CODE (var) == CONST_DECL
6434		    || TREE_CODE (var) == FUNCTION_DECL
6435		    || TREE_CODE (var) == LABEL_DECL);
6436    }
6437}
6438
6439/* Subroutine of expand_expr.  Expand the two operands of a binary
6440   expression EXP0 and EXP1 placing the results in OP0 and OP1.
6441   The value may be stored in TARGET if TARGET is nonzero.  The
6442   MODIFIER argument is as documented by expand_expr.  */
6443
6444static void
6445expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6446		 enum expand_modifier modifier)
6447{
6448  if (! safe_from_p (target, exp1, 1))
6449    target = 0;
6450  if (operand_equal_p (exp0, exp1, 0))
6451    {
6452      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6453      *op1 = copy_rtx (*op0);
6454    }
6455  else
6456    {
6457      /* If we need to preserve evaluation order, copy exp0 into its own
6458	 temporary variable so that it can't be clobbered by exp1.  */
6459      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6460	exp0 = save_expr (exp0);
6461      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6462      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6463    }
6464}
6465
6466
6467/* Return a MEM that contains constant EXP.  DEFER is as for
6468   output_constant_def and MODIFIER is as for expand_expr.  */
6469
6470static rtx
6471expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6472{
6473  rtx mem;
6474
6475  mem = output_constant_def (exp, defer);
6476  if (modifier != EXPAND_INITIALIZER)
6477    mem = use_anchored_address (mem);
6478  return mem;
6479}
6480
6481/* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
6482   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6483
6484static rtx
6485expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6486		         enum expand_modifier modifier)
6487{
6488  rtx result, subtarget;
6489  tree inner, offset;
6490  HOST_WIDE_INT bitsize, bitpos;
6491  int volatilep, unsignedp;
6492  enum machine_mode mode1;
6493
6494  /* If we are taking the address of a constant and are at the top level,
6495     we have to use output_constant_def since we can't call force_const_mem
6496     at top level.  */
6497  /* ??? This should be considered a front-end bug.  We should not be
6498     generating ADDR_EXPR of something that isn't an LVALUE.  The only
6499     exception here is STRING_CST.  */
6500  if (TREE_CODE (exp) == CONSTRUCTOR
6501      || CONSTANT_CLASS_P (exp))
6502    return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6503
6504  /* Everything must be something allowed by is_gimple_addressable.  */
6505  switch (TREE_CODE (exp))
6506    {
6507    case INDIRECT_REF:
6508      /* This case will happen via recursion for &a->b.  */
6509      return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6510
6511    case CONST_DECL:
6512      /* Recurse and make the output_constant_def clause above handle this.  */
6513      return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6514				      tmode, modifier);
6515
6516    case REALPART_EXPR:
6517      /* The real part of the complex number is always first, therefore
6518	 the address is the same as the address of the parent object.  */
6519      offset = 0;
6520      bitpos = 0;
6521      inner = TREE_OPERAND (exp, 0);
6522      break;
6523
6524    case IMAGPART_EXPR:
6525      /* The imaginary part of the complex number is always second.
6526	 The expression is therefore always offset by the size of the
6527	 scalar type.  */
6528      offset = 0;
6529      bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6530      inner = TREE_OPERAND (exp, 0);
6531      break;
6532
6533    default:
6534      /* If the object is a DECL, then expand it for its rtl.  Don't bypass
6535	 expand_expr, as that can have various side effects; LABEL_DECLs for
6536	 example, may not have their DECL_RTL set yet.  Assume language
6537	 specific tree nodes can be expanded in some interesting way.  */
6538      if (DECL_P (exp)
6539	  || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6540	{
6541	  result = expand_expr (exp, target, tmode,
6542				modifier == EXPAND_INITIALIZER
6543				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6544
6545	  /* If the DECL isn't in memory, then the DECL wasn't properly
6546	     marked TREE_ADDRESSABLE, which will be either a front-end
6547	     or a tree optimizer bug.  */
6548	  gcc_assert (MEM_P (result));
6549	  result = XEXP (result, 0);
6550
6551	  /* ??? Is this needed anymore?  */
6552	  if (DECL_P (exp) && !TREE_USED (exp) == 0)
6553	    {
6554	      assemble_external (exp);
6555	      TREE_USED (exp) = 1;
6556	    }
6557
6558	  if (modifier != EXPAND_INITIALIZER
6559	      && modifier != EXPAND_CONST_ADDRESS)
6560	    result = force_operand (result, target);
6561	  return result;
6562	}
6563
6564      /* Pass FALSE as the last argument to get_inner_reference although
6565	 we are expanding to RTL.  The rationale is that we know how to
6566	 handle "aligning nodes" here: we can just bypass them because
6567	 they won't change the final object whose address will be returned
6568	 (they actually exist only for that purpose).  */
6569      inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6570				   &mode1, &unsignedp, &volatilep, false);
6571      break;
6572    }
6573
6574  /* We must have made progress.  */
6575  gcc_assert (inner != exp);
6576
6577  subtarget = offset || bitpos ? NULL_RTX : target;
6578  result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6579
6580  if (offset)
6581    {
6582      rtx tmp;
6583
6584      if (modifier != EXPAND_NORMAL)
6585	result = force_operand (result, NULL);
6586      tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6587
6588      result = convert_memory_address (tmode, result);
6589      tmp = convert_memory_address (tmode, tmp);
6590
6591      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6592	result = gen_rtx_PLUS (tmode, result, tmp);
6593      else
6594	{
6595	  subtarget = bitpos ? NULL_RTX : target;
6596	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6597					1, OPTAB_LIB_WIDEN);
6598	}
6599    }
6600
6601  if (bitpos)
6602    {
6603      /* Someone beforehand should have rejected taking the address
6604	 of such an object.  */
6605      gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6606
6607      result = plus_constant (result, bitpos / BITS_PER_UNIT);
6608      if (modifier < EXPAND_SUM)
6609	result = force_operand (result, target);
6610    }
6611
6612  return result;
6613}
6614
6615/* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
6616   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6617
6618static rtx
6619expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6620		       enum expand_modifier modifier)
6621{
6622  enum machine_mode rmode;
6623  rtx result;
6624
6625  /* Target mode of VOIDmode says "whatever's natural".  */
6626  if (tmode == VOIDmode)
6627    tmode = TYPE_MODE (TREE_TYPE (exp));
6628
6629  /* We can get called with some Weird Things if the user does silliness
6630     like "(short) &a".  In that case, convert_memory_address won't do
6631     the right thing, so ignore the given target mode.  */
6632  if (tmode != Pmode && tmode != ptr_mode)
6633    tmode = Pmode;
6634
6635  result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6636				    tmode, modifier);
6637
6638  /* Despite expand_expr claims concerning ignoring TMODE when not
6639     strictly convenient, stuff breaks if we don't honor it.  Note
6640     that combined with the above, we only do this for pointer modes.  */
6641  rmode = GET_MODE (result);
6642  if (rmode == VOIDmode)
6643    rmode = tmode;
6644  if (rmode != tmode)
6645    result = convert_memory_address (tmode, result);
6646
6647  return result;
6648}
6649
6650
6651/* expand_expr: generate code for computing expression EXP.
6652   An rtx for the computed value is returned.  The value is never null.
6653   In the case of a void EXP, const0_rtx is returned.
6654
6655   The value may be stored in TARGET if TARGET is nonzero.
6656   TARGET is just a suggestion; callers must assume that
6657   the rtx returned may not be the same as TARGET.
6658
6659   If TARGET is CONST0_RTX, it means that the value will be ignored.
6660
6661   If TMODE is not VOIDmode, it suggests generating the
6662   result in mode TMODE.  But this is done only when convenient.
6663   Otherwise, TMODE is ignored and the value generated in its natural mode.
6664   TMODE is just a suggestion; callers must assume that
6665   the rtx returned may not have mode TMODE.
6666
6667   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6668   probably will not be used.
6669
6670   If MODIFIER is EXPAND_SUM then when EXP is an addition
6671   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6672   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6673   products as above, or REG or MEM, or constant.
6674   Ordinarily in such cases we would output mul or add instructions
6675   and then return a pseudo reg containing the sum.
6676
6677   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6678   it also marks a label as absolutely required (it can't be dead).
6679   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6680   This is used for outputting expressions used in initializers.
6681
6682   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6683   with a constant address even if that address is not normally legitimate.
6684   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6685
6686   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6687   a call parameter.  Such targets require special care as we haven't yet
6688   marked TARGET so that it's safe from being trashed by libcalls.  We
6689   don't want to use TARGET for anything but the final result;
6690   Intermediate values must go elsewhere.   Additionally, calls to
6691   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6692
6693   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6694   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6695   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
6696   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6697   recursively.  */
6698
6699static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6700			       enum expand_modifier, rtx *);
6701
6702rtx
6703expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6704		  enum expand_modifier modifier, rtx *alt_rtl)
6705{
6706  int rn = -1;
6707  rtx ret, last = NULL;
6708
6709  /* Handle ERROR_MARK before anybody tries to access its type.  */
6710  if (TREE_CODE (exp) == ERROR_MARK
6711      || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6712    {
6713      ret = CONST0_RTX (tmode);
6714      return ret ? ret : const0_rtx;
6715    }
6716
6717  if (flag_non_call_exceptions)
6718    {
6719      rn = lookup_stmt_eh_region (exp);
6720      /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw.  */
6721      if (rn >= 0)
6722	last = get_last_insn ();
6723    }
6724
6725  /* If this is an expression of some kind and it has an associated line
6726     number, then emit the line number before expanding the expression.
6727
6728     We need to save and restore the file and line information so that
6729     errors discovered during expansion are emitted with the right
6730     information.  It would be better of the diagnostic routines
6731     used the file/line information embedded in the tree nodes rather
6732     than globals.  */
6733  if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6734    {
6735      location_t saved_location = input_location;
6736      input_location = EXPR_LOCATION (exp);
6737      emit_line_note (input_location);
6738
6739      /* Record where the insns produced belong.  */
6740      record_block_change (TREE_BLOCK (exp));
6741
6742      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6743
6744      input_location = saved_location;
6745    }
6746  else
6747    {
6748      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6749    }
6750
6751  /* If using non-call exceptions, mark all insns that may trap.
6752     expand_call() will mark CALL_INSNs before we get to this code,
6753     but it doesn't handle libcalls, and these may trap.  */
6754  if (rn >= 0)
6755    {
6756      rtx insn;
6757      for (insn = next_real_insn (last); insn;
6758	   insn = next_real_insn (insn))
6759	{
6760	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6761	      /* If we want exceptions for non-call insns, any
6762		 may_trap_p instruction may throw.  */
6763	      && GET_CODE (PATTERN (insn)) != CLOBBER
6764	      && GET_CODE (PATTERN (insn)) != USE
6765	      && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6766	    {
6767	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6768						  REG_NOTES (insn));
6769	    }
6770	}
6771    }
6772
6773  return ret;
6774}
6775
6776static rtx
6777expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6778		    enum expand_modifier modifier, rtx *alt_rtl)
6779{
6780  rtx op0, op1, temp, decl_rtl;
6781  tree type = TREE_TYPE (exp);
6782  int unsignedp;
6783  enum machine_mode mode;
6784  enum tree_code code = TREE_CODE (exp);
6785  optab this_optab;
6786  rtx subtarget, original_target;
6787  int ignore;
6788  tree context, subexp0, subexp1;
6789  bool reduce_bit_field = false;
6790#define REDUCE_BIT_FIELD(expr)	(reduce_bit_field && !ignore		  \
6791				 ? reduce_to_bit_field_precision ((expr), \
6792								  target, \
6793								  type)	  \
6794				 : (expr))
6795
6796  mode = TYPE_MODE (type);
6797  unsignedp = TYPE_UNSIGNED (type);
6798  if (lang_hooks.reduce_bit_field_operations
6799      && TREE_CODE (type) == INTEGER_TYPE
6800      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6801    {
6802      /* An operation in what may be a bit-field type needs the
6803	 result to be reduced to the precision of the bit-field type,
6804	 which is narrower than that of the type's mode.  */
6805      reduce_bit_field = true;
6806      if (modifier == EXPAND_STACK_PARM)
6807	target = 0;
6808    }
6809
6810  /* Use subtarget as the target for operand 0 of a binary operation.  */
6811  subtarget = get_subtarget (target);
6812  original_target = target;
6813  ignore = (target == const0_rtx
6814	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6815		 || code == CONVERT_EXPR || code == COND_EXPR
6816		 || code == VIEW_CONVERT_EXPR)
6817		&& TREE_CODE (type) == VOID_TYPE));
6818
6819  /* If we are going to ignore this result, we need only do something
6820     if there is a side-effect somewhere in the expression.  If there
6821     is, short-circuit the most common cases here.  Note that we must
6822     not call expand_expr with anything but const0_rtx in case this
6823     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6824
6825  if (ignore)
6826    {
6827      if (! TREE_SIDE_EFFECTS (exp))
6828	return const0_rtx;
6829
6830      /* Ensure we reference a volatile object even if value is ignored, but
6831	 don't do this if all we are doing is taking its address.  */
6832      if (TREE_THIS_VOLATILE (exp)
6833	  && TREE_CODE (exp) != FUNCTION_DECL
6834	  && mode != VOIDmode && mode != BLKmode
6835	  && modifier != EXPAND_CONST_ADDRESS)
6836	{
6837	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6838	  if (MEM_P (temp))
6839	    temp = copy_to_reg (temp);
6840	  return const0_rtx;
6841	}
6842
6843      if (TREE_CODE_CLASS (code) == tcc_unary
6844	  || code == COMPONENT_REF || code == INDIRECT_REF)
6845	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6846			    modifier);
6847
6848      else if (TREE_CODE_CLASS (code) == tcc_binary
6849	       || TREE_CODE_CLASS (code) == tcc_comparison
6850	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6851	{
6852	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6853	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6854	  return const0_rtx;
6855	}
6856      else if (code == BIT_FIELD_REF)
6857	{
6858	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6859	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6860	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6861	  return const0_rtx;
6862	}
6863
6864      target = 0;
6865    }
6866
6867
6868  switch (code)
6869    {
6870    case LABEL_DECL:
6871      {
6872	tree function = decl_function_context (exp);
6873
6874	temp = label_rtx (exp);
6875	temp = gen_rtx_LABEL_REF (Pmode, temp);
6876
6877	if (function != current_function_decl
6878	    && function != 0)
6879	  LABEL_REF_NONLOCAL_P (temp) = 1;
6880
6881	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6882	return temp;
6883      }
6884
6885    case SSA_NAME:
6886      return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6887				 NULL);
6888
6889    case PARM_DECL:
6890    case VAR_DECL:
6891      /* If a static var's type was incomplete when the decl was written,
6892	 but the type is complete now, lay out the decl now.  */
6893      if (DECL_SIZE (exp) == 0
6894	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6895	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6896	layout_decl (exp, 0);
6897
6898      /* ... fall through ...  */
6899
6900    case FUNCTION_DECL:
6901    case RESULT_DECL:
6902      decl_rtl = DECL_RTL (exp);
6903      gcc_assert (decl_rtl);
6904
6905      /* Ensure variable marked as used even if it doesn't go through
6906	 a parser.  If it hasn't be used yet, write out an external
6907	 definition.  */
6908      if (! TREE_USED (exp))
6909	{
6910	  assemble_external (exp);
6911	  TREE_USED (exp) = 1;
6912	}
6913
6914      /* Show we haven't gotten RTL for this yet.  */
6915      temp = 0;
6916
6917      /* Variables inherited from containing functions should have
6918	 been lowered by this point.  */
6919      context = decl_function_context (exp);
6920      gcc_assert (!context
6921		  || context == current_function_decl
6922		  || TREE_STATIC (exp)
6923		  /* ??? C++ creates functions that are not TREE_STATIC.  */
6924		  || TREE_CODE (exp) == FUNCTION_DECL);
6925
6926      /* This is the case of an array whose size is to be determined
6927	 from its initializer, while the initializer is still being parsed.
6928	 See expand_decl.  */
6929
6930      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6931	temp = validize_mem (decl_rtl);
6932
6933      /* If DECL_RTL is memory, we are in the normal case and either
6934	 the address is not valid or it is not a register and -fforce-addr
6935	 is specified, get the address into a register.  */
6936
6937      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6938	{
6939	  if (alt_rtl)
6940	    *alt_rtl = decl_rtl;
6941	  decl_rtl = use_anchored_address (decl_rtl);
6942	  if (modifier != EXPAND_CONST_ADDRESS
6943	      && modifier != EXPAND_SUM
6944	      && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6945		  || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6946	    temp = replace_equiv_address (decl_rtl,
6947					  copy_rtx (XEXP (decl_rtl, 0)));
6948	}
6949
6950      /* If we got something, return it.  But first, set the alignment
6951	 if the address is a register.  */
6952      if (temp != 0)
6953	{
6954	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6955	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6956
6957	  return temp;
6958	}
6959
6960      /* If the mode of DECL_RTL does not match that of the decl, it
6961	 must be a promoted value.  We return a SUBREG of the wanted mode,
6962	 but mark it so that we know that it was already extended.  */
6963
6964      if (REG_P (decl_rtl)
6965	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
6966	{
6967	  enum machine_mode pmode;
6968
6969	  /* Get the signedness used for this variable.  Ensure we get the
6970	     same mode we got when the variable was declared.  */
6971	  pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6972				(TREE_CODE (exp) == RESULT_DECL
6973				 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6974	  gcc_assert (GET_MODE (decl_rtl) == pmode);
6975
6976	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
6977	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6978	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6979	  return temp;
6980	}
6981
6982      return decl_rtl;
6983
6984    case INTEGER_CST:
6985      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6986				 TREE_INT_CST_HIGH (exp), mode);
6987
6988      /* ??? If overflow is set, fold will have done an incomplete job,
6989	 which can result in (plus xx (const_int 0)), which can get
6990	 simplified by validate_replace_rtx during virtual register
6991	 instantiation, which can result in unrecognizable insns.
6992	 Avoid this by forcing all overflows into registers.  */
6993      if (TREE_CONSTANT_OVERFLOW (exp)
6994	  && modifier != EXPAND_INITIALIZER)
6995	temp = force_reg (mode, temp);
6996
6997      return temp;
6998
6999    case VECTOR_CST:
7000      {
7001	tree tmp = NULL_TREE;
7002	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7003	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7004	  return const_vector_from_tree (exp);
7005	if (GET_MODE_CLASS (mode) == MODE_INT)
7006	  {
7007	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7008	    if (type_for_mode)
7009	      tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7010	  }
7011	if (!tmp)
7012	  tmp = build_constructor_from_list (type,
7013					     TREE_VECTOR_CST_ELTS (exp));
7014	return expand_expr (tmp, ignore ? const0_rtx : target,
7015			    tmode, modifier);
7016      }
7017
7018    case CONST_DECL:
7019      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7020
7021    case REAL_CST:
7022      /* If optimized, generate immediate CONST_DOUBLE
7023	 which will be turned into memory by reload if necessary.
7024
7025	 We used to force a register so that loop.c could see it.  But
7026	 this does not allow gen_* patterns to perform optimizations with
7027	 the constants.  It also produces two insns in cases like "x = 1.0;".
7028	 On most machines, floating-point constants are not permitted in
7029	 many insns, so we'd end up copying it to a register in any case.
7030
7031	 Now, we do the copying in expand_binop, if appropriate.  */
7032      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7033					   TYPE_MODE (TREE_TYPE (exp)));
7034
7035    case COMPLEX_CST:
7036      /* Handle evaluating a complex constant in a CONCAT target.  */
7037      if (original_target && GET_CODE (original_target) == CONCAT)
7038	{
7039	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7040	  rtx rtarg, itarg;
7041
7042	  rtarg = XEXP (original_target, 0);
7043	  itarg = XEXP (original_target, 1);
7044
7045	  /* Move the real and imaginary parts separately.  */
7046	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7047	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7048
7049	  if (op0 != rtarg)
7050	    emit_move_insn (rtarg, op0);
7051	  if (op1 != itarg)
7052	    emit_move_insn (itarg, op1);
7053
7054	  return original_target;
7055	}
7056
7057      /* ... fall through ...  */
7058
7059    case STRING_CST:
7060      temp = expand_expr_constant (exp, 1, modifier);
7061
7062      /* temp contains a constant address.
7063	 On RISC machines where a constant address isn't valid,
7064	 make some insns to get that address into a register.  */
7065      if (modifier != EXPAND_CONST_ADDRESS
7066	  && modifier != EXPAND_INITIALIZER
7067	  && modifier != EXPAND_SUM
7068	  && (! memory_address_p (mode, XEXP (temp, 0))
7069	      || flag_force_addr))
7070	return replace_equiv_address (temp,
7071				      copy_rtx (XEXP (temp, 0)));
7072      return temp;
7073
7074    case SAVE_EXPR:
7075      {
7076	tree val = TREE_OPERAND (exp, 0);
7077	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7078
7079	if (!SAVE_EXPR_RESOLVED_P (exp))
7080	  {
7081	    /* We can indeed still hit this case, typically via builtin
7082	       expanders calling save_expr immediately before expanding
7083	       something.  Assume this means that we only have to deal
7084	       with non-BLKmode values.  */
7085	    gcc_assert (GET_MODE (ret) != BLKmode);
7086
7087	    val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7088	    DECL_ARTIFICIAL (val) = 1;
7089	    DECL_IGNORED_P (val) = 1;
7090	    TREE_OPERAND (exp, 0) = val;
7091	    SAVE_EXPR_RESOLVED_P (exp) = 1;
7092
7093	    if (!CONSTANT_P (ret))
7094	      ret = copy_to_reg (ret);
7095	    SET_DECL_RTL (val, ret);
7096	  }
7097
7098        return ret;
7099      }
7100
7101    case GOTO_EXPR:
7102      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7103	expand_goto (TREE_OPERAND (exp, 0));
7104      else
7105	expand_computed_goto (TREE_OPERAND (exp, 0));
7106      return const0_rtx;
7107
7108    case CONSTRUCTOR:
7109      /* If we don't need the result, just ensure we evaluate any
7110	 subexpressions.  */
7111      if (ignore)
7112	{
7113	  unsigned HOST_WIDE_INT idx;
7114	  tree value;
7115
7116	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7117	    expand_expr (value, const0_rtx, VOIDmode, 0);
7118
7119	  return const0_rtx;
7120	}
7121
7122      /* Try to avoid creating a temporary at all.  This is possible
7123	 if all of the initializer is zero.
7124	 FIXME: try to handle all [0..255] initializers we can handle
7125	 with memset.  */
7126      else if (TREE_STATIC (exp)
7127	       && !TREE_ADDRESSABLE (exp)
7128	       && target != 0 && mode == BLKmode
7129	       && all_zeros_p (exp))
7130	{
7131	  clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7132	  return target;
7133	}
7134
7135      /* All elts simple constants => refer to a constant in memory.  But
7136	 if this is a non-BLKmode mode, let it store a field at a time
7137	 since that should make a CONST_INT or CONST_DOUBLE when we
7138	 fold.  Likewise, if we have a target we can use, it is best to
7139	 store directly into the target unless the type is large enough
7140	 that memcpy will be used.  If we are making an initializer and
7141	 all operands are constant, put it in memory as well.
7142
7143	FIXME: Avoid trying to fill vector constructors piece-meal.
7144	Output them with output_constant_def below unless we're sure
7145	they're zeros.  This should go away when vector initializers
7146	are treated like VECTOR_CST instead of arrays.
7147      */
7148      else if ((TREE_STATIC (exp)
7149		&& ((mode == BLKmode
7150		     && ! (target != 0 && safe_from_p (target, exp, 1)))
7151		    || TREE_ADDRESSABLE (exp)
7152		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7153			&& (! MOVE_BY_PIECES_P
7154			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7155			     TYPE_ALIGN (type)))
7156			&& ! mostly_zeros_p (exp))))
7157	       || ((modifier == EXPAND_INITIALIZER
7158		    || modifier == EXPAND_CONST_ADDRESS)
7159		   && TREE_CONSTANT (exp)))
7160	{
7161	  rtx constructor = expand_expr_constant (exp, 1, modifier);
7162
7163	  if (modifier != EXPAND_CONST_ADDRESS
7164	      && modifier != EXPAND_INITIALIZER
7165	      && modifier != EXPAND_SUM)
7166	    constructor = validize_mem (constructor);
7167
7168	  return constructor;
7169	}
7170      else
7171	{
7172	  /* Handle calls that pass values in multiple non-contiguous
7173	     locations.  The Irix 6 ABI has examples of this.  */
7174	  if (target == 0 || ! safe_from_p (target, exp, 1)
7175	      || GET_CODE (target) == PARALLEL
7176	      || modifier == EXPAND_STACK_PARM)
7177	    target
7178	      = assign_temp (build_qualified_type (type,
7179						   (TYPE_QUALS (type)
7180						    | (TREE_READONLY (exp)
7181						       * TYPE_QUAL_CONST))),
7182			     0, TREE_ADDRESSABLE (exp), 1);
7183
7184	  store_constructor (exp, target, 0, int_expr_size (exp));
7185	  return target;
7186	}
7187
7188    case MISALIGNED_INDIRECT_REF:
7189    case ALIGN_INDIRECT_REF:
7190    case INDIRECT_REF:
7191      {
7192	tree exp1 = TREE_OPERAND (exp, 0);
7193
7194	if (modifier != EXPAND_WRITE)
7195	  {
7196	    tree t;
7197
7198	    t = fold_read_from_constant_string (exp);
7199	    if (t)
7200	      return expand_expr (t, target, tmode, modifier);
7201	  }
7202
7203	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7204	op0 = memory_address (mode, op0);
7205
7206	if (code == ALIGN_INDIRECT_REF)
7207	  {
7208	    int align = TYPE_ALIGN_UNIT (type);
7209	    op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7210	    op0 = memory_address (mode, op0);
7211	  }
7212
7213	temp = gen_rtx_MEM (mode, op0);
7214
7215	set_mem_attributes (temp, exp, 0);
7216
7217	/* Resolve the misalignment now, so that we don't have to remember
7218	   to resolve it later.  Of course, this only works for reads.  */
7219	/* ??? When we get around to supporting writes, we'll have to handle
7220	   this in store_expr directly.  The vectorizer isn't generating
7221	   those yet, however.  */
7222	if (code == MISALIGNED_INDIRECT_REF)
7223	  {
7224	    int icode;
7225	    rtx reg, insn;
7226
7227	    gcc_assert (modifier == EXPAND_NORMAL
7228			|| modifier == EXPAND_STACK_PARM);
7229
7230	    /* The vectorizer should have already checked the mode.  */
7231	    icode = movmisalign_optab->handlers[mode].insn_code;
7232	    gcc_assert (icode != CODE_FOR_nothing);
7233
7234	    /* We've already validated the memory, and we're creating a
7235	       new pseudo destination.  The predicates really can't fail.  */
7236	    reg = gen_reg_rtx (mode);
7237
7238	    /* Nor can the insn generator.  */
7239	    insn = GEN_FCN (icode) (reg, temp);
7240	    emit_insn (insn);
7241
7242	    return reg;
7243	  }
7244
7245	return temp;
7246      }
7247
7248    case TARGET_MEM_REF:
7249      {
7250	struct mem_address addr;
7251
7252	get_address_description (exp, &addr);
7253	op0 = addr_for_mem_ref (&addr, true);
7254	op0 = memory_address (mode, op0);
7255	temp = gen_rtx_MEM (mode, op0);
7256	set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7257      }
7258      return temp;
7259
7260    case ARRAY_REF:
7261
7262      {
7263	tree array = TREE_OPERAND (exp, 0);
7264	tree index = TREE_OPERAND (exp, 1);
7265
7266	/* Fold an expression like: "foo"[2].
7267	   This is not done in fold so it won't happen inside &.
7268	   Don't fold if this is for wide characters since it's too
7269	   difficult to do correctly and this is a very rare case.  */
7270
7271	if (modifier != EXPAND_CONST_ADDRESS
7272	    && modifier != EXPAND_INITIALIZER
7273	    && modifier != EXPAND_MEMORY)
7274	  {
7275	    tree t = fold_read_from_constant_string (exp);
7276
7277	    if (t)
7278	      return expand_expr (t, target, tmode, modifier);
7279	  }
7280
7281	/* If this is a constant index into a constant array,
7282	   just get the value from the array.  Handle both the cases when
7283	   we have an explicit constructor and when our operand is a variable
7284	   that was declared const.  */
7285
7286	if (modifier != EXPAND_CONST_ADDRESS
7287	    && modifier != EXPAND_INITIALIZER
7288	    && modifier != EXPAND_MEMORY
7289	    && TREE_CODE (array) == CONSTRUCTOR
7290	    && ! TREE_SIDE_EFFECTS (array)
7291	    && TREE_CODE (index) == INTEGER_CST)
7292	  {
7293	    unsigned HOST_WIDE_INT ix;
7294	    tree field, value;
7295
7296	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7297				      field, value)
7298	      if (tree_int_cst_equal (field, index))
7299		{
7300		  if (!TREE_SIDE_EFFECTS (value))
7301		    return expand_expr (fold (value), target, tmode, modifier);
7302		  break;
7303		}
7304	  }
7305
7306	else if (optimize >= 1
7307		 && modifier != EXPAND_CONST_ADDRESS
7308		 && modifier != EXPAND_INITIALIZER
7309		 && modifier != EXPAND_MEMORY
7310		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7311		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7312		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7313		 && targetm.binds_local_p (array))
7314	  {
7315	    if (TREE_CODE (index) == INTEGER_CST)
7316	      {
7317		tree init = DECL_INITIAL (array);
7318
7319		if (TREE_CODE (init) == CONSTRUCTOR)
7320		  {
7321		    unsigned HOST_WIDE_INT ix;
7322		    tree field, value;
7323
7324		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7325					      field, value)
7326		      if (tree_int_cst_equal (field, index))
7327			{
7328			  if (!TREE_SIDE_EFFECTS (value))
7329			    return expand_expr (fold (value), target, tmode,
7330						modifier);
7331			  break;
7332			}
7333		  }
7334		else if(TREE_CODE (init) == STRING_CST)
7335		  {
7336		    tree index1 = index;
7337		    tree low_bound = array_ref_low_bound (exp);
7338		    index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7339
7340		    /* Optimize the special-case of a zero lower bound.
7341
7342		       We convert the low_bound to sizetype to avoid some problems
7343		       with constant folding.  (E.g. suppose the lower bound is 1,
7344		       and its mode is QI.  Without the conversion,l (ARRAY
7345		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7346		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
7347
7348		    if (! integer_zerop (low_bound))
7349		      index1 = size_diffop (index1, fold_convert (sizetype,
7350								  low_bound));
7351
7352		    if (0 > compare_tree_int (index1,
7353					      TREE_STRING_LENGTH (init)))
7354		      {
7355			tree type = TREE_TYPE (TREE_TYPE (init));
7356			enum machine_mode mode = TYPE_MODE (type);
7357
7358			if (GET_MODE_CLASS (mode) == MODE_INT
7359			    && GET_MODE_SIZE (mode) == 1)
7360			  return gen_int_mode (TREE_STRING_POINTER (init)
7361					       [TREE_INT_CST_LOW (index1)],
7362					       mode);
7363		      }
7364		  }
7365	      }
7366	  }
7367      }
7368      goto normal_inner_ref;
7369
7370    case COMPONENT_REF:
7371      /* If the operand is a CONSTRUCTOR, we can just extract the
7372	 appropriate field if it is present.  */
7373      if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7374	{
7375	  unsigned HOST_WIDE_INT idx;
7376	  tree field, value;
7377
7378	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7379				    idx, field, value)
7380	    if (field == TREE_OPERAND (exp, 1)
7381		/* We can normally use the value of the field in the
7382		   CONSTRUCTOR.  However, if this is a bitfield in
7383		   an integral mode that we can fit in a HOST_WIDE_INT,
7384		   we must mask only the number of bits in the bitfield,
7385		   since this is done implicitly by the constructor.  If
7386		   the bitfield does not meet either of those conditions,
7387		   we can't do this optimization.  */
7388		&& (! DECL_BIT_FIELD (field)
7389		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7390			&& (GET_MODE_BITSIZE (DECL_MODE (field))
7391			    <= HOST_BITS_PER_WIDE_INT))))
7392	      {
7393		if (DECL_BIT_FIELD (field)
7394		    && modifier == EXPAND_STACK_PARM)
7395		  target = 0;
7396		op0 = expand_expr (value, target, tmode, modifier);
7397		if (DECL_BIT_FIELD (field))
7398		  {
7399		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7400		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7401
7402		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
7403		      {
7404			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7405			op0 = expand_and (imode, op0, op1, target);
7406		      }
7407		    else
7408		      {
7409			tree count
7410			  = build_int_cst (NULL_TREE,
7411					   GET_MODE_BITSIZE (imode) - bitsize);
7412
7413			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7414					    target, 0);
7415			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7416					    target, 0);
7417		      }
7418		  }
7419
7420		return op0;
7421	      }
7422	}
7423      goto normal_inner_ref;
7424
7425    case BIT_FIELD_REF:
7426    case ARRAY_RANGE_REF:
7427    normal_inner_ref:
7428      {
7429	enum machine_mode mode1;
7430	HOST_WIDE_INT bitsize, bitpos;
7431	tree offset;
7432	int volatilep = 0;
7433	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7434					&mode1, &unsignedp, &volatilep, true);
7435	rtx orig_op0;
7436
7437	/* If we got back the original object, something is wrong.  Perhaps
7438	   we are evaluating an expression too early.  In any event, don't
7439	   infinitely recurse.  */
7440	gcc_assert (tem != exp);
7441
7442	/* If TEM's type is a union of variable size, pass TARGET to the inner
7443	   computation, since it will need a temporary and TARGET is known
7444	   to have to do.  This occurs in unchecked conversion in Ada.  */
7445
7446	orig_op0 = op0
7447	  = expand_expr (tem,
7448			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7449			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7450			      != INTEGER_CST)
7451			  && modifier != EXPAND_STACK_PARM
7452			  ? target : NULL_RTX),
7453			 VOIDmode,
7454			 (modifier == EXPAND_INITIALIZER
7455			  || modifier == EXPAND_CONST_ADDRESS
7456			  || modifier == EXPAND_STACK_PARM)
7457			 ? modifier : EXPAND_NORMAL);
7458
7459	/* If this is a constant, put it into a register if it is a legitimate
7460	   constant, OFFSET is 0, and we won't try to extract outside the
7461	   register (in case we were passed a partially uninitialized object
7462	   or a view_conversion to a larger size).  Force the constant to
7463	   memory otherwise.  */
7464	if (CONSTANT_P (op0))
7465	  {
7466	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7467	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7468		&& offset == 0
7469		&& bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7470	      op0 = force_reg (mode, op0);
7471	    else
7472	      op0 = validize_mem (force_const_mem (mode, op0));
7473	  }
7474
7475	/* Otherwise, if this object not in memory and we either have an
7476	   offset, a BLKmode result, or a reference outside the object, put it
7477	   there.  Such cases can occur in Ada if we have unchecked conversion
7478	   of an expression from a scalar type to an array or record type or
7479	   for an ARRAY_RANGE_REF whose type is BLKmode.  */
7480	else if (!MEM_P (op0)
7481		 && (offset != 0
7482		     || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7483		     || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7484	  {
7485	    tree nt = build_qualified_type (TREE_TYPE (tem),
7486					    (TYPE_QUALS (TREE_TYPE (tem))
7487					     | TYPE_QUAL_CONST));
7488	    rtx memloc = assign_temp (nt, 1, 1, 1);
7489
7490	    emit_move_insn (memloc, op0);
7491	    op0 = memloc;
7492	  }
7493
7494	if (offset != 0)
7495	  {
7496	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7497					  EXPAND_SUM);
7498
7499	    gcc_assert (MEM_P (op0));
7500
7501#ifdef POINTERS_EXTEND_UNSIGNED
7502	    if (GET_MODE (offset_rtx) != Pmode)
7503	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7504#else
7505	    if (GET_MODE (offset_rtx) != ptr_mode)
7506	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7507#endif
7508
7509	    if (GET_MODE (op0) == BLKmode
7510		/* A constant address in OP0 can have VOIDmode, we must
7511		   not try to call force_reg in that case.  */
7512		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
7513		&& bitsize != 0
7514		&& (bitpos % bitsize) == 0
7515		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7516		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7517	      {
7518		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7519		bitpos = 0;
7520	      }
7521
7522	    op0 = offset_address (op0, offset_rtx,
7523				  highest_pow2_factor (offset));
7524	  }
7525
7526	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7527	   record its alignment as BIGGEST_ALIGNMENT.  */
7528	if (MEM_P (op0) && bitpos == 0 && offset != 0
7529	    && is_aligning_offset (offset, tem))
7530	  set_mem_align (op0, BIGGEST_ALIGNMENT);
7531
7532	/* Don't forget about volatility even if this is a bitfield.  */
7533	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7534	  {
7535	    if (op0 == orig_op0)
7536	      op0 = copy_rtx (op0);
7537
7538	    MEM_VOLATILE_P (op0) = 1;
7539	  }
7540
7541	/* The following code doesn't handle CONCAT.
7542	   Assume only bitpos == 0 can be used for CONCAT, due to
7543	   one element arrays having the same mode as its element.  */
7544	if (GET_CODE (op0) == CONCAT)
7545	  {
7546	    gcc_assert (bitpos == 0
7547			&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7548	    return op0;
7549	  }
7550
7551	/* In cases where an aligned union has an unaligned object
7552	   as a field, we might be extracting a BLKmode value from
7553	   an integer-mode (e.g., SImode) object.  Handle this case
7554	   by doing the extract into an object as wide as the field
7555	   (which we know to be the width of a basic mode), then
7556	   storing into memory, and changing the mode to BLKmode.  */
7557	if (mode1 == VOIDmode
7558	    || REG_P (op0) || GET_CODE (op0) == SUBREG
7559	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7560		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7561		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7562		&& modifier != EXPAND_CONST_ADDRESS
7563		&& modifier != EXPAND_INITIALIZER)
7564	    /* If the field isn't aligned enough to fetch as a memref,
7565	       fetch it as a bit field.  */
7566	    || (mode1 != BLKmode
7567		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7568		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7569		      || (MEM_P (op0)
7570			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7571			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7572		     && ((modifier == EXPAND_CONST_ADDRESS
7573			  || modifier == EXPAND_INITIALIZER)
7574			 ? STRICT_ALIGNMENT
7575			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7576		    || (bitpos % BITS_PER_UNIT != 0)))
7577	    /* If the type and the field are a constant size and the
7578	       size of the type isn't the same size as the bitfield,
7579	       we must use bitfield operations.  */
7580	    || (bitsize >= 0
7581		&& TYPE_SIZE (TREE_TYPE (exp))
7582		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7583		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7584					  bitsize)))
7585	  {
7586	    enum machine_mode ext_mode = mode;
7587
7588	    if (ext_mode == BLKmode
7589		&& ! (target != 0 && MEM_P (op0)
7590		      && MEM_P (target)
7591		      && bitpos % BITS_PER_UNIT == 0))
7592	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7593
7594	    if (ext_mode == BLKmode)
7595	      {
7596		if (target == 0)
7597		  target = assign_temp (type, 0, 1, 1);
7598
7599		if (bitsize == 0)
7600		  return target;
7601
7602		/* In this case, BITPOS must start at a byte boundary and
7603		   TARGET, if specified, must be a MEM.  */
7604		gcc_assert (MEM_P (op0)
7605			    && (!target || MEM_P (target))
7606			    && !(bitpos % BITS_PER_UNIT));
7607
7608		emit_block_move (target,
7609				 adjust_address (op0, VOIDmode,
7610						 bitpos / BITS_PER_UNIT),
7611				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7612					  / BITS_PER_UNIT),
7613				 (modifier == EXPAND_STACK_PARM
7614				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7615
7616		return target;
7617	      }
7618
7619	    op0 = validize_mem (op0);
7620
7621	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7622	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7623
7624	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7625				     (modifier == EXPAND_STACK_PARM
7626				      ? NULL_RTX : target),
7627				     ext_mode, ext_mode);
7628
7629	    /* If the result is a record type and BITSIZE is narrower than
7630	       the mode of OP0, an integral mode, and this is a big endian
7631	       machine, we must put the field into the high-order bits.  */
7632	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7633		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7634		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7635	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7636				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7637					    - bitsize),
7638				  op0, 1);
7639
7640	    /* If the result type is BLKmode, store the data into a temporary
7641	       of the appropriate type, but with the mode corresponding to the
7642	       mode for the data we have (op0's mode).  It's tempting to make
7643	       this a constant type, since we know it's only being stored once,
7644	       but that can cause problems if we are taking the address of this
7645	       COMPONENT_REF because the MEM of any reference via that address
7646	       will have flags corresponding to the type, which will not
7647	       necessarily be constant.  */
7648	    if (mode == BLKmode)
7649	      {
7650		rtx new
7651		  = assign_stack_temp_for_type
7652		    (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7653
7654		emit_move_insn (new, op0);
7655		op0 = copy_rtx (new);
7656		PUT_MODE (op0, BLKmode);
7657		set_mem_attributes (op0, exp, 1);
7658	      }
7659
7660	    return op0;
7661	  }
7662
7663	/* If the result is BLKmode, use that to access the object
7664	   now as well.  */
7665	if (mode == BLKmode)
7666	  mode1 = BLKmode;
7667
7668	/* Get a reference to just this component.  */
7669	if (modifier == EXPAND_CONST_ADDRESS
7670	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7671	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7672	else
7673	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7674
7675	if (op0 == orig_op0)
7676	  op0 = copy_rtx (op0);
7677
7678	set_mem_attributes (op0, exp, 0);
7679	if (REG_P (XEXP (op0, 0)))
7680	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7681
7682	MEM_VOLATILE_P (op0) |= volatilep;
7683	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7684	    || modifier == EXPAND_CONST_ADDRESS
7685	    || modifier == EXPAND_INITIALIZER)
7686	  return op0;
7687	else if (target == 0)
7688	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7689
7690	convert_move (target, op0, unsignedp);
7691	return target;
7692      }
7693
7694    case OBJ_TYPE_REF:
7695      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7696
7697    case CALL_EXPR:
7698      /* Check for a built-in function.  */
7699      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7700	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7701	      == FUNCTION_DECL)
7702	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7703	{
7704	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7705	      == BUILT_IN_FRONTEND)
7706	    return lang_hooks.expand_expr (exp, original_target,
7707					   tmode, modifier,
7708					   alt_rtl);
7709	  else
7710	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7711	}
7712
7713      return expand_call (exp, target, ignore);
7714
7715    case NON_LVALUE_EXPR:
7716    case NOP_EXPR:
7717    case CONVERT_EXPR:
7718      if (TREE_OPERAND (exp, 0) == error_mark_node)
7719	return const0_rtx;
7720
7721      if (TREE_CODE (type) == UNION_TYPE)
7722	{
7723	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7724
7725	  /* If both input and output are BLKmode, this conversion isn't doing
7726	     anything except possibly changing memory attribute.  */
7727	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7728	    {
7729	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7730					modifier);
7731
7732	      result = copy_rtx (result);
7733	      set_mem_attributes (result, exp, 0);
7734	      return result;
7735	    }
7736
7737	  if (target == 0)
7738	    {
7739	      if (TYPE_MODE (type) != BLKmode)
7740		target = gen_reg_rtx (TYPE_MODE (type));
7741	      else
7742		target = assign_temp (type, 0, 1, 1);
7743	    }
7744
7745	  if (MEM_P (target))
7746	    /* Store data into beginning of memory target.  */
7747	    store_expr (TREE_OPERAND (exp, 0),
7748			adjust_address (target, TYPE_MODE (valtype), 0),
7749			modifier == EXPAND_STACK_PARM);
7750
7751	  else
7752	    {
7753	      gcc_assert (REG_P (target));
7754
7755	      /* Store this field into a union of the proper type.  */
7756	      store_field (target,
7757			   MIN ((int_size_in_bytes (TREE_TYPE
7758						    (TREE_OPERAND (exp, 0)))
7759				 * BITS_PER_UNIT),
7760				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7761			   0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7762			   type, 0);
7763	    }
7764
7765	  /* Return the entire union.  */
7766	  return target;
7767	}
7768
7769      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7770	{
7771	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7772			     modifier);
7773
7774	  /* If the signedness of the conversion differs and OP0 is
7775	     a promoted SUBREG, clear that indication since we now
7776	     have to do the proper extension.  */
7777	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7778	      && GET_CODE (op0) == SUBREG)
7779	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7780
7781	  return REDUCE_BIT_FIELD (op0);
7782	}
7783
7784      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7785			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7786      if (GET_MODE (op0) == mode)
7787	;
7788
7789      /* If OP0 is a constant, just convert it into the proper mode.  */
7790      else if (CONSTANT_P (op0))
7791	{
7792	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7793	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7794
7795	  if (modifier == EXPAND_INITIALIZER)
7796	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
7797				       subreg_lowpart_offset (mode,
7798							      inner_mode));
7799	  else
7800	    op0=  convert_modes (mode, inner_mode, op0,
7801				 TYPE_UNSIGNED (inner_type));
7802	}
7803
7804      else if (modifier == EXPAND_INITIALIZER)
7805	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7806
7807      else if (target == 0)
7808	op0 = convert_to_mode (mode, op0,
7809			       TYPE_UNSIGNED (TREE_TYPE
7810					      (TREE_OPERAND (exp, 0))));
7811      else
7812	{
7813	  convert_move (target, op0,
7814			TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7815	  op0 = target;
7816	}
7817
7818      return REDUCE_BIT_FIELD (op0);
7819
7820    case VIEW_CONVERT_EXPR:
7821      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7822
7823      /* If the input and output modes are both the same, we are done.  */
7824      if (TYPE_MODE (type) == GET_MODE (op0))
7825	;
7826      /* If neither mode is BLKmode, and both modes are the same size
7827	 then we can use gen_lowpart.  */
7828      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7829	       && GET_MODE_SIZE (TYPE_MODE (type))
7830		   == GET_MODE_SIZE (GET_MODE (op0)))
7831	{
7832	  if (GET_CODE (op0) == SUBREG)
7833	    op0 = force_reg (GET_MODE (op0), op0);
7834	  op0 = gen_lowpart (TYPE_MODE (type), op0);
7835	}
7836      /* If both modes are integral, then we can convert from one to the
7837	 other.  */
7838      else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7839	       && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7840	op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7841			     TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7842      /* As a last resort, spill op0 to memory, and reload it in a
7843	 different mode.  */
7844      else if (!MEM_P (op0))
7845	{
7846	  /* If the operand is not a MEM, force it into memory.  Since we
7847	     are going to be changing the mode of the MEM, don't call
7848	     force_const_mem for constants because we don't allow pool
7849	     constants to change mode.  */
7850	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7851
7852	  gcc_assert (!TREE_ADDRESSABLE (exp));
7853
7854	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7855	    target
7856	      = assign_stack_temp_for_type
7857		(TYPE_MODE (inner_type),
7858		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7859
7860	  emit_move_insn (target, op0);
7861	  op0 = target;
7862	}
7863
7864      /* At this point, OP0 is in the correct mode.  If the output type is such
7865	 that the operand is known to be aligned, indicate that it is.
7866	 Otherwise, we need only be concerned about alignment for non-BLKmode
7867	 results.  */
7868      if (MEM_P (op0))
7869	{
7870	  op0 = copy_rtx (op0);
7871
7872	  if (TYPE_ALIGN_OK (type))
7873	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7874	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7875		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7876	    {
7877	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7878	      HOST_WIDE_INT temp_size
7879		= MAX (int_size_in_bytes (inner_type),
7880		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7881	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7882						    temp_size, 0, type);
7883	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7884
7885	      gcc_assert (!TREE_ADDRESSABLE (exp));
7886
7887	      if (GET_MODE (op0) == BLKmode)
7888		emit_block_move (new_with_op0_mode, op0,
7889				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7890				 (modifier == EXPAND_STACK_PARM
7891				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7892	      else
7893		emit_move_insn (new_with_op0_mode, op0);
7894
7895	      op0 = new;
7896	    }
7897
7898	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7899	}
7900
7901      return op0;
7902
7903    case PLUS_EXPR:
7904      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7905	 something else, make sure we add the register to the constant and
7906	 then to the other thing.  This case can occur during strength
7907	 reduction and doing it this way will produce better code if the
7908	 frame pointer or argument pointer is eliminated.
7909
7910	 fold-const.c will ensure that the constant is always in the inner
7911	 PLUS_EXPR, so the only case we need to do anything about is if
7912	 sp, ap, or fp is our second argument, in which case we must swap
7913	 the innermost first argument and our second argument.  */
7914
7915      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7916	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7917	  && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7918	  && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7919	      || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7920	      || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7921	{
7922	  tree t = TREE_OPERAND (exp, 1);
7923
7924	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7925	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7926	}
7927
7928      /* If the result is to be ptr_mode and we are adding an integer to
7929	 something, we might be forming a constant.  So try to use
7930	 plus_constant.  If it produces a sum and we can't accept it,
7931	 use force_operand.  This allows P = &ARR[const] to generate
7932	 efficient code on machines where a SYMBOL_REF is not a valid
7933	 address.
7934
7935	 If this is an EXPAND_SUM call, always return the sum.  */
7936      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7937	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7938	{
7939	  if (modifier == EXPAND_STACK_PARM)
7940	    target = 0;
7941	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7942	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7943	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7944	    {
7945	      rtx constant_part;
7946
7947	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7948				 EXPAND_SUM);
7949	      /* Use immed_double_const to ensure that the constant is
7950		 truncated according to the mode of OP1, then sign extended
7951		 to a HOST_WIDE_INT.  Using the constant directly can result
7952		 in non-canonical RTL in a 64x32 cross compile.  */
7953	      constant_part
7954		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7955				      (HOST_WIDE_INT) 0,
7956				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7957	      op1 = plus_constant (op1, INTVAL (constant_part));
7958	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7959		op1 = force_operand (op1, target);
7960	      return REDUCE_BIT_FIELD (op1);
7961	    }
7962
7963	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7964		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7965		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7966	    {
7967	      rtx constant_part;
7968
7969	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7970				 (modifier == EXPAND_INITIALIZER
7971				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7972	      if (! CONSTANT_P (op0))
7973		{
7974		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7975				     VOIDmode, modifier);
7976		  /* Return a PLUS if modifier says it's OK.  */
7977		  if (modifier == EXPAND_SUM
7978		      || modifier == EXPAND_INITIALIZER)
7979		    return simplify_gen_binary (PLUS, mode, op0, op1);
7980		  goto binop2;
7981		}
7982	      /* Use immed_double_const to ensure that the constant is
7983		 truncated according to the mode of OP1, then sign extended
7984		 to a HOST_WIDE_INT.  Using the constant directly can result
7985		 in non-canonical RTL in a 64x32 cross compile.  */
7986	      constant_part
7987		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7988				      (HOST_WIDE_INT) 0,
7989				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7990	      op0 = plus_constant (op0, INTVAL (constant_part));
7991	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7992		op0 = force_operand (op0, target);
7993	      return REDUCE_BIT_FIELD (op0);
7994	    }
7995	}
7996
7997      /* No sense saving up arithmetic to be done
7998	 if it's all in the wrong mode to form part of an address.
7999	 And force_operand won't know whether to sign-extend or
8000	 zero-extend.  */
8001      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8002	  || mode != ptr_mode)
8003	{
8004	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8005			   subtarget, &op0, &op1, 0);
8006	  if (op0 == const0_rtx)
8007	    return op1;
8008	  if (op1 == const0_rtx)
8009	    return op0;
8010	  goto binop2;
8011	}
8012
8013      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8014		       subtarget, &op0, &op1, modifier);
8015      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8016
8017    case MINUS_EXPR:
8018      /* For initializers, we are allowed to return a MINUS of two
8019	 symbolic constants.  Here we handle all cases when both operands
8020	 are constant.  */
8021      /* Handle difference of two symbolic constants,
8022	 for the sake of an initializer.  */
8023      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8024	  && really_constant_p (TREE_OPERAND (exp, 0))
8025	  && really_constant_p (TREE_OPERAND (exp, 1)))
8026	{
8027	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8028			   NULL_RTX, &op0, &op1, modifier);
8029
8030	  /* If the last operand is a CONST_INT, use plus_constant of
8031	     the negated constant.  Else make the MINUS.  */
8032	  if (GET_CODE (op1) == CONST_INT)
8033	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8034	  else
8035	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8036	}
8037
8038      /* No sense saving up arithmetic to be done
8039	 if it's all in the wrong mode to form part of an address.
8040	 And force_operand won't know whether to sign-extend or
8041	 zero-extend.  */
8042      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8043	  || mode != ptr_mode)
8044	goto binop;
8045
8046      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8047		       subtarget, &op0, &op1, modifier);
8048
8049      /* Convert A - const to A + (-const).  */
8050      if (GET_CODE (op1) == CONST_INT)
8051	{
8052	  op1 = negate_rtx (mode, op1);
8053	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8054	}
8055
8056      goto binop2;
8057
8058    case MULT_EXPR:
8059      /* If first operand is constant, swap them.
8060	 Thus the following special case checks need only
8061	 check the second operand.  */
8062      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8063	{
8064	  tree t1 = TREE_OPERAND (exp, 0);
8065	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8066	  TREE_OPERAND (exp, 1) = t1;
8067	}
8068
8069      /* Attempt to return something suitable for generating an
8070	 indexed address, for machines that support that.  */
8071
8072      if (modifier == EXPAND_SUM && mode == ptr_mode
8073	  && host_integerp (TREE_OPERAND (exp, 1), 0))
8074	{
8075	  tree exp1 = TREE_OPERAND (exp, 1);
8076
8077	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8078			     EXPAND_SUM);
8079
8080	  if (!REG_P (op0))
8081	    op0 = force_operand (op0, NULL_RTX);
8082	  if (!REG_P (op0))
8083	    op0 = copy_to_mode_reg (mode, op0);
8084
8085	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8086			       gen_int_mode (tree_low_cst (exp1, 0),
8087					     TYPE_MODE (TREE_TYPE (exp1)))));
8088	}
8089
8090      if (modifier == EXPAND_STACK_PARM)
8091	target = 0;
8092
8093      /* Check for multiplying things that have been extended
8094	 from a narrower type.  If this machine supports multiplying
8095	 in that narrower type with a result in the desired type,
8096	 do it that way, and avoid the explicit type-conversion.  */
8097
8098      subexp0 = TREE_OPERAND (exp, 0);
8099      subexp1 = TREE_OPERAND (exp, 1);
8100      /* First, check if we have a multiplication of one signed and one
8101	 unsigned operand.  */
8102      if (TREE_CODE (subexp0) == NOP_EXPR
8103	  && TREE_CODE (subexp1) == NOP_EXPR
8104	  && TREE_CODE (type) == INTEGER_TYPE
8105	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8106	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8107	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8108	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8109	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8110	      != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8111	{
8112	  enum machine_mode innermode
8113	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8114	  this_optab = usmul_widen_optab;
8115	  if (mode == GET_MODE_WIDER_MODE (innermode))
8116	    {
8117	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8118		{
8119		  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8120		    expand_operands (TREE_OPERAND (subexp0, 0),
8121				     TREE_OPERAND (subexp1, 0),
8122				     NULL_RTX, &op0, &op1, 0);
8123		  else
8124		    expand_operands (TREE_OPERAND (subexp0, 0),
8125				     TREE_OPERAND (subexp1, 0),
8126				     NULL_RTX, &op1, &op0, 0);
8127
8128		  goto binop3;
8129		}
8130	    }
8131	}
8132      /* Check for a multiplication with matching signedness.  */
8133      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8134	  && TREE_CODE (type) == INTEGER_TYPE
8135	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8136	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8137	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8138	       && int_fits_type_p (TREE_OPERAND (exp, 1),
8139				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8140	       /* Don't use a widening multiply if a shift will do.  */
8141	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8142		    > HOST_BITS_PER_WIDE_INT)
8143		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8144	      ||
8145	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8146	       && (TYPE_PRECISION (TREE_TYPE
8147				   (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8148		   == TYPE_PRECISION (TREE_TYPE
8149				      (TREE_OPERAND
8150				       (TREE_OPERAND (exp, 0), 0))))
8151	       /* If both operands are extended, they must either both
8152		  be zero-extended or both be sign-extended.  */
8153	       && (TYPE_UNSIGNED (TREE_TYPE
8154				  (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8155		   == TYPE_UNSIGNED (TREE_TYPE
8156				     (TREE_OPERAND
8157				      (TREE_OPERAND (exp, 0), 0)))))))
8158	{
8159	  tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8160	  enum machine_mode innermode = TYPE_MODE (op0type);
8161	  bool zextend_p = TYPE_UNSIGNED (op0type);
8162	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8163	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8164
8165	  if (mode == GET_MODE_2XWIDER_MODE (innermode))
8166	    {
8167	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8168		{
8169		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8170		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8171				     TREE_OPERAND (exp, 1),
8172				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8173		  else
8174		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8175				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8176				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8177		  goto binop3;
8178		}
8179	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8180		       && innermode == word_mode)
8181		{
8182		  rtx htem, hipart;
8183		  op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8184		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8185		    op1 = convert_modes (innermode, mode,
8186					 expand_normal (TREE_OPERAND (exp, 1)),
8187					 unsignedp);
8188		  else
8189		    op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8190		  temp = expand_binop (mode, other_optab, op0, op1, target,
8191				       unsignedp, OPTAB_LIB_WIDEN);
8192		  hipart = gen_highpart (innermode, temp);
8193		  htem = expand_mult_highpart_adjust (innermode, hipart,
8194						      op0, op1, hipart,
8195						      zextend_p);
8196		  if (htem != hipart)
8197		    emit_move_insn (hipart, htem);
8198		  return REDUCE_BIT_FIELD (temp);
8199		}
8200	    }
8201	}
8202      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8203		       subtarget, &op0, &op1, 0);
8204      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8205
8206    case TRUNC_DIV_EXPR:
8207    case FLOOR_DIV_EXPR:
8208    case CEIL_DIV_EXPR:
8209    case ROUND_DIV_EXPR:
8210    case EXACT_DIV_EXPR:
8211      if (modifier == EXPAND_STACK_PARM)
8212	target = 0;
8213      /* Possible optimization: compute the dividend with EXPAND_SUM
8214	 then if the divisor is constant can optimize the case
8215	 where some terms of the dividend have coeffs divisible by it.  */
8216      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8217		       subtarget, &op0, &op1, 0);
8218      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8219
8220    case RDIV_EXPR:
8221      goto binop;
8222
8223    case TRUNC_MOD_EXPR:
8224    case FLOOR_MOD_EXPR:
8225    case CEIL_MOD_EXPR:
8226    case ROUND_MOD_EXPR:
8227      if (modifier == EXPAND_STACK_PARM)
8228	target = 0;
8229      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8230		       subtarget, &op0, &op1, 0);
8231      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8232
8233    case FIX_ROUND_EXPR:
8234    case FIX_FLOOR_EXPR:
8235    case FIX_CEIL_EXPR:
8236      gcc_unreachable ();			/* Not used for C.  */
8237
8238    case FIX_TRUNC_EXPR:
8239      op0 = expand_normal (TREE_OPERAND (exp, 0));
8240      if (target == 0 || modifier == EXPAND_STACK_PARM)
8241	target = gen_reg_rtx (mode);
8242      expand_fix (target, op0, unsignedp);
8243      return target;
8244
8245    case FLOAT_EXPR:
8246      op0 = expand_normal (TREE_OPERAND (exp, 0));
8247      if (target == 0 || modifier == EXPAND_STACK_PARM)
8248	target = gen_reg_rtx (mode);
8249      /* expand_float can't figure out what to do if FROM has VOIDmode.
8250	 So give it the correct mode.  With -O, cse will optimize this.  */
8251      if (GET_MODE (op0) == VOIDmode)
8252	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8253				op0);
8254      expand_float (target, op0,
8255		    TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8256      return target;
8257
8258    case NEGATE_EXPR:
8259      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8260      if (modifier == EXPAND_STACK_PARM)
8261	target = 0;
8262      temp = expand_unop (mode,
8263      			  optab_for_tree_code (NEGATE_EXPR, type),
8264			  op0, target, 0);
8265      gcc_assert (temp);
8266      return REDUCE_BIT_FIELD (temp);
8267
8268    case ABS_EXPR:
8269      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8270      if (modifier == EXPAND_STACK_PARM)
8271	target = 0;
8272
8273      /* ABS_EXPR is not valid for complex arguments.  */
8274      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8275		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8276
8277      /* Unsigned abs is simply the operand.  Testing here means we don't
8278	 risk generating incorrect code below.  */
8279      if (TYPE_UNSIGNED (type))
8280	return op0;
8281
8282      return expand_abs (mode, op0, target, unsignedp,
8283			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8284
8285    case MAX_EXPR:
8286    case MIN_EXPR:
8287      target = original_target;
8288      if (target == 0
8289	  || modifier == EXPAND_STACK_PARM
8290	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8291	  || GET_MODE (target) != mode
8292	  || (REG_P (target)
8293	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8294	target = gen_reg_rtx (mode);
8295      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8296		       target, &op0, &op1, 0);
8297
8298      /* First try to do it with a special MIN or MAX instruction.
8299	 If that does not win, use a conditional jump to select the proper
8300	 value.  */
8301      this_optab = optab_for_tree_code (code, type);
8302      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8303			   OPTAB_WIDEN);
8304      if (temp != 0)
8305	return temp;
8306
8307      /* At this point, a MEM target is no longer useful; we will get better
8308	 code without it.  */
8309
8310      if (! REG_P (target))
8311	target = gen_reg_rtx (mode);
8312
8313      /* If op1 was placed in target, swap op0 and op1.  */
8314      if (target != op0 && target == op1)
8315	{
8316	  temp = op0;
8317	  op0 = op1;
8318	  op1 = temp;
8319	}
8320
8321      /* We generate better code and avoid problems with op1 mentioning
8322	 target by forcing op1 into a pseudo if it isn't a constant.  */
8323      if (! CONSTANT_P (op1))
8324	op1 = force_reg (mode, op1);
8325
8326      {
8327	enum rtx_code comparison_code;
8328	rtx cmpop1 = op1;
8329
8330	if (code == MAX_EXPR)
8331	  comparison_code = unsignedp ? GEU : GE;
8332	else
8333	  comparison_code = unsignedp ? LEU : LE;
8334
8335	/* Canonicalize to comparisons against 0.  */
8336	if (op1 == const1_rtx)
8337	  {
8338	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8339	       or (a != 0 ? a : 1) for unsigned.
8340	       For MIN we are safe converting (a <= 1 ? a : 1)
8341	       into (a <= 0 ? a : 1)  */
8342	    cmpop1 = const0_rtx;
8343	    if (code == MAX_EXPR)
8344	      comparison_code = unsignedp ? NE : GT;
8345	  }
8346	if (op1 == constm1_rtx && !unsignedp)
8347	  {
8348	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8349	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8350	    cmpop1 = const0_rtx;
8351	    if (code == MIN_EXPR)
8352	      comparison_code = LT;
8353	  }
8354#ifdef HAVE_conditional_move
8355	/* Use a conditional move if possible.  */
8356	if (can_conditionally_move_p (mode))
8357	  {
8358	    rtx insn;
8359
8360	    /* ??? Same problem as in expmed.c: emit_conditional_move
8361	       forces a stack adjustment via compare_from_rtx, and we
8362	       lose the stack adjustment if the sequence we are about
8363	       to create is discarded.  */
8364	    do_pending_stack_adjust ();
8365
8366	    start_sequence ();
8367
8368	    /* Try to emit the conditional move.  */
8369	    insn = emit_conditional_move (target, comparison_code,
8370					  op0, cmpop1, mode,
8371					  op0, op1, mode,
8372					  unsignedp);
8373
8374	    /* If we could do the conditional move, emit the sequence,
8375	       and return.  */
8376	    if (insn)
8377	      {
8378		rtx seq = get_insns ();
8379		end_sequence ();
8380		emit_insn (seq);
8381		return target;
8382	      }
8383
8384	    /* Otherwise discard the sequence and fall back to code with
8385	       branches.  */
8386	    end_sequence ();
8387	  }
8388#endif
8389	if (target != op0)
8390	  emit_move_insn (target, op0);
8391
8392	temp = gen_label_rtx ();
8393	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8394				 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8395      }
8396      emit_move_insn (target, op1);
8397      emit_label (temp);
8398      return target;
8399
8400    case BIT_NOT_EXPR:
8401      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8402      if (modifier == EXPAND_STACK_PARM)
8403	target = 0;
8404      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8405      gcc_assert (temp);
8406      return temp;
8407
8408      /* ??? Can optimize bitwise operations with one arg constant.
8409	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8410	 and (a bitwise1 b) bitwise2 b (etc)
8411	 but that is probably not worth while.  */
8412
8413      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8414	 boolean values when we want in all cases to compute both of them.  In
8415	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8416	 as actual zero-or-1 values and then bitwise anding.  In cases where
8417	 there cannot be any side effects, better code would be made by
8418	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8419	 how to recognize those cases.  */
8420
8421    case TRUTH_AND_EXPR:
8422      code = BIT_AND_EXPR;
8423    case BIT_AND_EXPR:
8424      goto binop;
8425
8426    case TRUTH_OR_EXPR:
8427      code = BIT_IOR_EXPR;
8428    case BIT_IOR_EXPR:
8429      goto binop;
8430
8431    case TRUTH_XOR_EXPR:
8432      code = BIT_XOR_EXPR;
8433    case BIT_XOR_EXPR:
8434      goto binop;
8435
8436    case LSHIFT_EXPR:
8437    case RSHIFT_EXPR:
8438    case LROTATE_EXPR:
8439    case RROTATE_EXPR:
8440      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8441	subtarget = 0;
8442      if (modifier == EXPAND_STACK_PARM)
8443	target = 0;
8444      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8445      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8446			   unsignedp);
8447
8448      /* Could determine the answer when only additive constants differ.  Also,
8449	 the addition of one can be handled by changing the condition.  */
8450    case LT_EXPR:
8451    case LE_EXPR:
8452    case GT_EXPR:
8453    case GE_EXPR:
8454    case EQ_EXPR:
8455    case NE_EXPR:
8456    case UNORDERED_EXPR:
8457    case ORDERED_EXPR:
8458    case UNLT_EXPR:
8459    case UNLE_EXPR:
8460    case UNGT_EXPR:
8461    case UNGE_EXPR:
8462    case UNEQ_EXPR:
8463    case LTGT_EXPR:
8464      temp = do_store_flag (exp,
8465			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8466			    tmode != VOIDmode ? tmode : mode, 0);
8467      if (temp != 0)
8468	return temp;
8469
8470      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8471      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8472	  && original_target
8473	  && REG_P (original_target)
8474	  && (GET_MODE (original_target)
8475	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8476	{
8477	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8478			      VOIDmode, 0);
8479
8480	  /* If temp is constant, we can just compute the result.  */
8481	  if (GET_CODE (temp) == CONST_INT)
8482	    {
8483	      if (INTVAL (temp) != 0)
8484	        emit_move_insn (target, const1_rtx);
8485	      else
8486	        emit_move_insn (target, const0_rtx);
8487
8488	      return target;
8489	    }
8490
8491	  if (temp != original_target)
8492	    {
8493	      enum machine_mode mode1 = GET_MODE (temp);
8494	      if (mode1 == VOIDmode)
8495		mode1 = tmode != VOIDmode ? tmode : mode;
8496
8497	      temp = copy_to_mode_reg (mode1, temp);
8498	    }
8499
8500	  op1 = gen_label_rtx ();
8501	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8502				   GET_MODE (temp), unsignedp, op1);
8503	  emit_move_insn (temp, const1_rtx);
8504	  emit_label (op1);
8505	  return temp;
8506	}
8507
8508      /* If no set-flag instruction, must generate a conditional store
8509	 into a temporary variable.  Drop through and handle this
8510	 like && and ||.  */
8511
8512      if (! ignore
8513	  && (target == 0
8514	      || modifier == EXPAND_STACK_PARM
8515	      || ! safe_from_p (target, exp, 1)
8516	      /* Make sure we don't have a hard reg (such as function's return
8517		 value) live across basic blocks, if not optimizing.  */
8518	      || (!optimize && REG_P (target)
8519		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8520	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8521
8522      if (target)
8523	emit_move_insn (target, const0_rtx);
8524
8525      op1 = gen_label_rtx ();
8526      jumpifnot (exp, op1);
8527
8528      if (target)
8529	emit_move_insn (target, const1_rtx);
8530
8531      emit_label (op1);
8532      return ignore ? const0_rtx : target;
8533
8534    case TRUTH_NOT_EXPR:
8535      if (modifier == EXPAND_STACK_PARM)
8536	target = 0;
8537      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8538      /* The parser is careful to generate TRUTH_NOT_EXPR
8539	 only with operands that are always zero or one.  */
8540      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8541			   target, 1, OPTAB_LIB_WIDEN);
8542      gcc_assert (temp);
8543      return temp;
8544
8545    case STATEMENT_LIST:
8546      {
8547	tree_stmt_iterator iter;
8548
8549	gcc_assert (ignore);
8550
8551	for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8552	  expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8553      }
8554      return const0_rtx;
8555
8556    case COND_EXPR:
8557      /* A COND_EXPR with its type being VOID_TYPE represents a
8558	 conditional jump and is handled in
8559	 expand_gimple_cond_expr.  */
8560      gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8561
8562        /* Note that COND_EXPRs whose type is a structure or union
8563  	 are required to be constructed to contain assignments of
8564  	 a temporary variable, so that we can evaluate them here
8565  	 for side effect only.  If type is void, we must do likewise.  */
8566
8567        gcc_assert (!TREE_ADDRESSABLE (type)
8568		    && !ignore
8569		    && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8570		    && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8571
8572       /* If we are not to produce a result, we have no target.  Otherwise,
8573 	 if a target was specified use it; it will not be used as an
8574 	 intermediate target unless it is safe.  If no target, use a
8575 	 temporary.  */
8576
8577       if (modifier != EXPAND_STACK_PARM
8578 	  && original_target
8579 	  && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8580 	  && GET_MODE (original_target) == mode
8581#ifdef HAVE_conditional_move
8582 	  && (! can_conditionally_move_p (mode)
8583 	      || REG_P (original_target))
8584#endif
8585 	  && !MEM_P (original_target))
8586 	temp = original_target;
8587       else
8588 	temp = assign_temp (type, 0, 0, 1);
8589
8590       do_pending_stack_adjust ();
8591       NO_DEFER_POP;
8592       op0 = gen_label_rtx ();
8593       op1 = gen_label_rtx ();
8594       jumpifnot (TREE_OPERAND (exp, 0), op0);
8595       store_expr (TREE_OPERAND (exp, 1), temp,
8596 		  modifier == EXPAND_STACK_PARM);
8597
8598       emit_jump_insn (gen_jump (op1));
8599       emit_barrier ();
8600       emit_label (op0);
8601       store_expr (TREE_OPERAND (exp, 2), temp,
8602 		  modifier == EXPAND_STACK_PARM);
8603
8604       emit_label (op1);
8605       OK_DEFER_POP;
8606       return temp;
8607
8608    case VEC_COND_EXPR:
8609	target = expand_vec_cond_expr (exp, target);
8610	return target;
8611
8612    case MODIFY_EXPR:
8613      {
8614	tree lhs = TREE_OPERAND (exp, 0);
8615	tree rhs = TREE_OPERAND (exp, 1);
8616
8617	gcc_assert (ignore);
8618
8619	/* Check for |= or &= of a bitfield of size one into another bitfield
8620	   of size 1.  In this case, (unless we need the result of the
8621	   assignment) we can do this more efficiently with a
8622	   test followed by an assignment, if necessary.
8623
8624	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8625	   things change so we do, this code should be enhanced to
8626	   support it.  */
8627	if (TREE_CODE (lhs) == COMPONENT_REF
8628	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8629		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8630	    && TREE_OPERAND (rhs, 0) == lhs
8631	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8632	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8633	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8634	  {
8635	    rtx label = gen_label_rtx ();
8636	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8637	    do_jump (TREE_OPERAND (rhs, 1),
8638		     value ? label : 0,
8639		     value ? 0 : label);
8640	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8641	    do_pending_stack_adjust ();
8642	    emit_label (label);
8643	    return const0_rtx;
8644	  }
8645
8646	expand_assignment (lhs, rhs);
8647
8648	return const0_rtx;
8649      }
8650
8651    case RETURN_EXPR:
8652      if (!TREE_OPERAND (exp, 0))
8653	expand_null_return ();
8654      else
8655	expand_return (TREE_OPERAND (exp, 0));
8656      return const0_rtx;
8657
8658    case ADDR_EXPR:
8659      return expand_expr_addr_expr (exp, target, tmode, modifier);
8660
8661    case COMPLEX_EXPR:
8662      /* Get the rtx code of the operands.  */
8663      op0 = expand_normal (TREE_OPERAND (exp, 0));
8664      op1 = expand_normal (TREE_OPERAND (exp, 1));
8665
8666      if (!target)
8667	target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8668
8669      /* Move the real (op0) and imaginary (op1) parts to their location.  */
8670      write_complex_part (target, op0, false);
8671      write_complex_part (target, op1, true);
8672
8673      return target;
8674
8675    case REALPART_EXPR:
8676      op0 = expand_normal (TREE_OPERAND (exp, 0));
8677      return read_complex_part (op0, false);
8678
8679    case IMAGPART_EXPR:
8680      op0 = expand_normal (TREE_OPERAND (exp, 0));
8681      return read_complex_part (op0, true);
8682
8683    case RESX_EXPR:
8684      expand_resx_expr (exp);
8685      return const0_rtx;
8686
8687    case TRY_CATCH_EXPR:
8688    case CATCH_EXPR:
8689    case EH_FILTER_EXPR:
8690    case TRY_FINALLY_EXPR:
8691      /* Lowered by tree-eh.c.  */
8692      gcc_unreachable ();
8693
8694    case WITH_CLEANUP_EXPR:
8695    case CLEANUP_POINT_EXPR:
8696    case TARGET_EXPR:
8697    case CASE_LABEL_EXPR:
8698    case VA_ARG_EXPR:
8699    case BIND_EXPR:
8700    case INIT_EXPR:
8701    case CONJ_EXPR:
8702    case COMPOUND_EXPR:
8703    case PREINCREMENT_EXPR:
8704    case PREDECREMENT_EXPR:
8705    case POSTINCREMENT_EXPR:
8706    case POSTDECREMENT_EXPR:
8707    case LOOP_EXPR:
8708    case EXIT_EXPR:
8709    case TRUTH_ANDIF_EXPR:
8710    case TRUTH_ORIF_EXPR:
8711      /* Lowered by gimplify.c.  */
8712      gcc_unreachable ();
8713
8714    case EXC_PTR_EXPR:
8715      return get_exception_pointer (cfun);
8716
8717    case FILTER_EXPR:
8718      return get_exception_filter (cfun);
8719
8720    case FDESC_EXPR:
8721      /* Function descriptors are not valid except for as
8722	 initialization constants, and should not be expanded.  */
8723      gcc_unreachable ();
8724
8725    case SWITCH_EXPR:
8726      expand_case (exp);
8727      return const0_rtx;
8728
8729    case LABEL_EXPR:
8730      expand_label (TREE_OPERAND (exp, 0));
8731      return const0_rtx;
8732
8733    case ASM_EXPR:
8734      expand_asm_expr (exp);
8735      return const0_rtx;
8736
8737    case WITH_SIZE_EXPR:
8738      /* WITH_SIZE_EXPR expands to its first argument.  The caller should
8739	 have pulled out the size to use in whatever context it needed.  */
8740      return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8741			       modifier, alt_rtl);
8742
8743    case REALIGN_LOAD_EXPR:
8744      {
8745        tree oprnd0 = TREE_OPERAND (exp, 0);
8746        tree oprnd1 = TREE_OPERAND (exp, 1);
8747        tree oprnd2 = TREE_OPERAND (exp, 2);
8748        rtx op2;
8749
8750        this_optab = optab_for_tree_code (code, type);
8751        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8752        op2 = expand_normal (oprnd2);
8753        temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8754				  target, unsignedp);
8755        gcc_assert (temp);
8756        return temp;
8757      }
8758
8759    case DOT_PROD_EXPR:
8760      {
8761	tree oprnd0 = TREE_OPERAND (exp, 0);
8762	tree oprnd1 = TREE_OPERAND (exp, 1);
8763	tree oprnd2 = TREE_OPERAND (exp, 2);
8764	rtx op2;
8765
8766	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8767	op2 = expand_normal (oprnd2);
8768	target = expand_widen_pattern_expr (exp, op0, op1, op2,
8769					    target, unsignedp);
8770	return target;
8771      }
8772
8773    case WIDEN_SUM_EXPR:
8774      {
8775        tree oprnd0 = TREE_OPERAND (exp, 0);
8776        tree oprnd1 = TREE_OPERAND (exp, 1);
8777
8778        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8779        target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8780                                            target, unsignedp);
8781        return target;
8782      }
8783
8784    case REDUC_MAX_EXPR:
8785    case REDUC_MIN_EXPR:
8786    case REDUC_PLUS_EXPR:
8787      {
8788        op0 = expand_normal (TREE_OPERAND (exp, 0));
8789        this_optab = optab_for_tree_code (code, type);
8790        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8791        gcc_assert (temp);
8792        return temp;
8793      }
8794
8795    case VEC_LSHIFT_EXPR:
8796    case VEC_RSHIFT_EXPR:
8797      {
8798	target = expand_vec_shift_expr (exp, target);
8799	return target;
8800      }
8801
8802    default:
8803      return lang_hooks.expand_expr (exp, original_target, tmode,
8804				     modifier, alt_rtl);
8805    }
8806
8807  /* Here to do an ordinary binary operator.  */
8808 binop:
8809  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8810		   subtarget, &op0, &op1, 0);
8811 binop2:
8812  this_optab = optab_for_tree_code (code, type);
8813 binop3:
8814  if (modifier == EXPAND_STACK_PARM)
8815    target = 0;
8816  temp = expand_binop (mode, this_optab, op0, op1, target,
8817		       unsignedp, OPTAB_LIB_WIDEN);
8818  gcc_assert (temp);
8819  return REDUCE_BIT_FIELD (temp);
8820}
8821#undef REDUCE_BIT_FIELD
8822
8823/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8824   signedness of TYPE), possibly returning the result in TARGET.  */
8825static rtx
8826reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8827{
8828  HOST_WIDE_INT prec = TYPE_PRECISION (type);
8829  if (target && GET_MODE (target) != GET_MODE (exp))
8830    target = 0;
8831  /* For constant values, reduce using build_int_cst_type. */
8832  if (GET_CODE (exp) == CONST_INT)
8833    {
8834      HOST_WIDE_INT value = INTVAL (exp);
8835      tree t = build_int_cst_type (type, value);
8836      return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8837    }
8838  else if (TYPE_UNSIGNED (type))
8839    {
8840      rtx mask;
8841      if (prec < HOST_BITS_PER_WIDE_INT)
8842	mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8843				   GET_MODE (exp));
8844      else
8845	mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8846				   ((unsigned HOST_WIDE_INT) 1
8847				    << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8848				   GET_MODE (exp));
8849      return expand_and (GET_MODE (exp), exp, mask, target);
8850    }
8851  else
8852    {
8853      tree count = build_int_cst (NULL_TREE,
8854				  GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8855      exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8856      return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8857    }
8858}
8859
8860/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8861   when applied to the address of EXP produces an address known to be
8862   aligned more than BIGGEST_ALIGNMENT.  */
8863
8864static int
8865is_aligning_offset (tree offset, tree exp)
8866{
8867  /* Strip off any conversions.  */
8868  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8869	 || TREE_CODE (offset) == NOP_EXPR
8870	 || TREE_CODE (offset) == CONVERT_EXPR)
8871    offset = TREE_OPERAND (offset, 0);
8872
8873  /* We must now have a BIT_AND_EXPR with a constant that is one less than
8874     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8875  if (TREE_CODE (offset) != BIT_AND_EXPR
8876      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8877      || compare_tree_int (TREE_OPERAND (offset, 1),
8878			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8879      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8880    return 0;
8881
8882  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8883     It must be NEGATE_EXPR.  Then strip any more conversions.  */
8884  offset = TREE_OPERAND (offset, 0);
8885  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8886	 || TREE_CODE (offset) == NOP_EXPR
8887	 || TREE_CODE (offset) == CONVERT_EXPR)
8888    offset = TREE_OPERAND (offset, 0);
8889
8890  if (TREE_CODE (offset) != NEGATE_EXPR)
8891    return 0;
8892
8893  offset = TREE_OPERAND (offset, 0);
8894  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8895	 || TREE_CODE (offset) == NOP_EXPR
8896	 || TREE_CODE (offset) == CONVERT_EXPR)
8897    offset = TREE_OPERAND (offset, 0);
8898
8899  /* This must now be the address of EXP.  */
8900  return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8901}
8902
8903/* Return the tree node if an ARG corresponds to a string constant or zero
8904   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
8905   in bytes within the string that ARG is accessing.  The type of the
8906   offset will be `sizetype'.  */
8907
8908tree
8909string_constant (tree arg, tree *ptr_offset)
8910{
8911  tree array, offset;
8912  STRIP_NOPS (arg);
8913
8914  if (TREE_CODE (arg) == ADDR_EXPR)
8915    {
8916      if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8917	{
8918	  *ptr_offset = size_zero_node;
8919	  return TREE_OPERAND (arg, 0);
8920	}
8921      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8922	{
8923	  array = TREE_OPERAND (arg, 0);
8924	  offset = size_zero_node;
8925	}
8926      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8927	{
8928	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8929	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8930	  if (TREE_CODE (array) != STRING_CST
8931	      && TREE_CODE (array) != VAR_DECL)
8932	    return 0;
8933	}
8934      else
8935	return 0;
8936    }
8937  else if (TREE_CODE (arg) == PLUS_EXPR)
8938    {
8939      tree arg0 = TREE_OPERAND (arg, 0);
8940      tree arg1 = TREE_OPERAND (arg, 1);
8941
8942      STRIP_NOPS (arg0);
8943      STRIP_NOPS (arg1);
8944
8945      if (TREE_CODE (arg0) == ADDR_EXPR
8946	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8947	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8948	{
8949	  array = TREE_OPERAND (arg0, 0);
8950	  offset = arg1;
8951	}
8952      else if (TREE_CODE (arg1) == ADDR_EXPR
8953	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8954		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8955	{
8956	  array = TREE_OPERAND (arg1, 0);
8957	  offset = arg0;
8958	}
8959      else
8960	return 0;
8961    }
8962  else
8963    return 0;
8964
8965  if (TREE_CODE (array) == STRING_CST)
8966    {
8967      *ptr_offset = fold_convert (sizetype, offset);
8968      return array;
8969    }
8970  else if (TREE_CODE (array) == VAR_DECL)
8971    {
8972      int length;
8973
8974      /* Variables initialized to string literals can be handled too.  */
8975      if (DECL_INITIAL (array) == NULL_TREE
8976	  || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8977	return 0;
8978
8979      /* If they are read-only, non-volatile and bind locally.  */
8980      if (! TREE_READONLY (array)
8981	  || TREE_SIDE_EFFECTS (array)
8982	  || ! targetm.binds_local_p (array))
8983	return 0;
8984
8985      /* Avoid const char foo[4] = "abcde";  */
8986      if (DECL_SIZE_UNIT (array) == NULL_TREE
8987	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8988	  || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8989	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8990	return 0;
8991
8992      /* If variable is bigger than the string literal, OFFSET must be constant
8993	 and inside of the bounds of the string literal.  */
8994      offset = fold_convert (sizetype, offset);
8995      if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8996	  && (! host_integerp (offset, 1)
8997	      || compare_tree_int (offset, length) >= 0))
8998	return 0;
8999
9000      *ptr_offset = offset;
9001      return DECL_INITIAL (array);
9002    }
9003
9004  return 0;
9005}
9006
9007/* Generate code to calculate EXP using a store-flag instruction
9008   and return an rtx for the result.  EXP is either a comparison
9009   or a TRUTH_NOT_EXPR whose operand is a comparison.
9010
9011   If TARGET is nonzero, store the result there if convenient.
9012
9013   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9014   cheap.
9015
9016   Return zero if there is no suitable set-flag instruction
9017   available on this machine.
9018
9019   Once expand_expr has been called on the arguments of the comparison,
9020   we are committed to doing the store flag, since it is not safe to
9021   re-evaluate the expression.  We emit the store-flag insn by calling
9022   emit_store_flag, but only expand the arguments if we have a reason
9023   to believe that emit_store_flag will be successful.  If we think that
9024   it will, but it isn't, we have to simulate the store-flag with a
9025   set/jump/set sequence.  */
9026
9027static rtx
9028do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9029{
9030  enum rtx_code code;
9031  tree arg0, arg1, type;
9032  tree tem;
9033  enum machine_mode operand_mode;
9034  int invert = 0;
9035  int unsignedp;
9036  rtx op0, op1;
9037  enum insn_code icode;
9038  rtx subtarget = target;
9039  rtx result, label;
9040
9041  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9042     result at the end.  We can't simply invert the test since it would
9043     have already been inverted if it were valid.  This case occurs for
9044     some floating-point comparisons.  */
9045
9046  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9047    invert = 1, exp = TREE_OPERAND (exp, 0);
9048
9049  arg0 = TREE_OPERAND (exp, 0);
9050  arg1 = TREE_OPERAND (exp, 1);
9051
9052  /* Don't crash if the comparison was erroneous.  */
9053  if (arg0 == error_mark_node || arg1 == error_mark_node)
9054    return const0_rtx;
9055
9056  type = TREE_TYPE (arg0);
9057  operand_mode = TYPE_MODE (type);
9058  unsignedp = TYPE_UNSIGNED (type);
9059
9060  /* We won't bother with BLKmode store-flag operations because it would mean
9061     passing a lot of information to emit_store_flag.  */
9062  if (operand_mode == BLKmode)
9063    return 0;
9064
9065  /* We won't bother with store-flag operations involving function pointers
9066     when function pointers must be canonicalized before comparisons.  */
9067#ifdef HAVE_canonicalize_funcptr_for_compare
9068  if (HAVE_canonicalize_funcptr_for_compare
9069      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9070	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9071	       == FUNCTION_TYPE))
9072	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9073	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9074		  == FUNCTION_TYPE))))
9075    return 0;
9076#endif
9077
9078  STRIP_NOPS (arg0);
9079  STRIP_NOPS (arg1);
9080
9081  /* Get the rtx comparison code to use.  We know that EXP is a comparison
9082     operation of some type.  Some comparisons against 1 and -1 can be
9083     converted to comparisons with zero.  Do so here so that the tests
9084     below will be aware that we have a comparison with zero.   These
9085     tests will not catch constants in the first operand, but constants
9086     are rarely passed as the first operand.  */
9087
9088  switch (TREE_CODE (exp))
9089    {
9090    case EQ_EXPR:
9091      code = EQ;
9092      break;
9093    case NE_EXPR:
9094      code = NE;
9095      break;
9096    case LT_EXPR:
9097      if (integer_onep (arg1))
9098	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9099      else
9100	code = unsignedp ? LTU : LT;
9101      break;
9102    case LE_EXPR:
9103      if (! unsignedp && integer_all_onesp (arg1))
9104	arg1 = integer_zero_node, code = LT;
9105      else
9106	code = unsignedp ? LEU : LE;
9107      break;
9108    case GT_EXPR:
9109      if (! unsignedp && integer_all_onesp (arg1))
9110	arg1 = integer_zero_node, code = GE;
9111      else
9112	code = unsignedp ? GTU : GT;
9113      break;
9114    case GE_EXPR:
9115      if (integer_onep (arg1))
9116	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9117      else
9118	code = unsignedp ? GEU : GE;
9119      break;
9120
9121    case UNORDERED_EXPR:
9122      code = UNORDERED;
9123      break;
9124    case ORDERED_EXPR:
9125      code = ORDERED;
9126      break;
9127    case UNLT_EXPR:
9128      code = UNLT;
9129      break;
9130    case UNLE_EXPR:
9131      code = UNLE;
9132      break;
9133    case UNGT_EXPR:
9134      code = UNGT;
9135      break;
9136    case UNGE_EXPR:
9137      code = UNGE;
9138      break;
9139    case UNEQ_EXPR:
9140      code = UNEQ;
9141      break;
9142    case LTGT_EXPR:
9143      code = LTGT;
9144      break;
9145
9146    default:
9147      gcc_unreachable ();
9148    }
9149
9150  /* Put a constant second.  */
9151  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9152    {
9153      tem = arg0; arg0 = arg1; arg1 = tem;
9154      code = swap_condition (code);
9155    }
9156
9157  /* If this is an equality or inequality test of a single bit, we can
9158     do this by shifting the bit being tested to the low-order bit and
9159     masking the result with the constant 1.  If the condition was EQ,
9160     we xor it with 1.  This does not require an scc insn and is faster
9161     than an scc insn even if we have it.
9162
9163     The code to make this transformation was moved into fold_single_bit_test,
9164     so we just call into the folder and expand its result.  */
9165
9166  if ((code == NE || code == EQ)
9167      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9168      && integer_pow2p (TREE_OPERAND (arg0, 1)))
9169    {
9170      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9171      return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9172						arg0, arg1, type),
9173			  target, VOIDmode, EXPAND_NORMAL);
9174    }
9175
9176  /* Now see if we are likely to be able to do this.  Return if not.  */
9177  if (! can_compare_p (code, operand_mode, ccp_store_flag))
9178    return 0;
9179
9180  icode = setcc_gen_code[(int) code];
9181  if (icode == CODE_FOR_nothing
9182      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9183    {
9184      /* We can only do this if it is one of the special cases that
9185	 can be handled without an scc insn.  */
9186      if ((code == LT && integer_zerop (arg1))
9187	  || (! only_cheap && code == GE && integer_zerop (arg1)))
9188	;
9189      else if (! only_cheap && (code == NE || code == EQ)
9190	       && TREE_CODE (type) != REAL_TYPE
9191	       && ((abs_optab->handlers[(int) operand_mode].insn_code
9192		    != CODE_FOR_nothing)
9193		   || (ffs_optab->handlers[(int) operand_mode].insn_code
9194		       != CODE_FOR_nothing)))
9195	;
9196      else
9197	return 0;
9198    }
9199
9200  if (! get_subtarget (target)
9201      || GET_MODE (subtarget) != operand_mode)
9202    subtarget = 0;
9203
9204  expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9205
9206  if (target == 0)
9207    target = gen_reg_rtx (mode);
9208
9209  result = emit_store_flag (target, code, op0, op1,
9210			    operand_mode, unsignedp, 1);
9211
9212  if (result)
9213    {
9214      if (invert)
9215	result = expand_binop (mode, xor_optab, result, const1_rtx,
9216			       result, 0, OPTAB_LIB_WIDEN);
9217      return result;
9218    }
9219
9220  /* If this failed, we have to do this with set/compare/jump/set code.  */
9221  if (!REG_P (target)
9222      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9223    target = gen_reg_rtx (GET_MODE (target));
9224
9225  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9226  result = compare_from_rtx (op0, op1, code, unsignedp,
9227			     operand_mode, NULL_RTX);
9228  if (GET_CODE (result) == CONST_INT)
9229    return (((result == const0_rtx && ! invert)
9230	     || (result != const0_rtx && invert))
9231	    ? const0_rtx : const1_rtx);
9232
9233  /* The code of RESULT may not match CODE if compare_from_rtx
9234     decided to swap its operands and reverse the original code.
9235
9236     We know that compare_from_rtx returns either a CONST_INT or
9237     a new comparison code, so it is safe to just extract the
9238     code from RESULT.  */
9239  code = GET_CODE (result);
9240
9241  label = gen_label_rtx ();
9242  gcc_assert (bcc_gen_fctn[(int) code]);
9243
9244  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9245  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9246  emit_label (label);
9247
9248  return target;
9249}
9250
9251
9252/* Stubs in case we haven't got a casesi insn.  */
9253#ifndef HAVE_casesi
9254# define HAVE_casesi 0
9255# define gen_casesi(a, b, c, d, e) (0)
9256# define CODE_FOR_casesi CODE_FOR_nothing
9257#endif
9258
9259/* If the machine does not have a case insn that compares the bounds,
9260   this means extra overhead for dispatch tables, which raises the
9261   threshold for using them.  */
9262#ifndef CASE_VALUES_THRESHOLD
9263#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9264#endif /* CASE_VALUES_THRESHOLD */
9265
9266unsigned int
9267case_values_threshold (void)
9268{
9269  return CASE_VALUES_THRESHOLD;
9270}
9271
9272/* Attempt to generate a casesi instruction.  Returns 1 if successful,
9273   0 otherwise (i.e. if there is no casesi instruction).  */
9274int
9275try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9276	    rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9277{
9278  enum machine_mode index_mode = SImode;
9279  int index_bits = GET_MODE_BITSIZE (index_mode);
9280  rtx op1, op2, index;
9281  enum machine_mode op_mode;
9282
9283  if (! HAVE_casesi)
9284    return 0;
9285
9286  /* Convert the index to SImode.  */
9287  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9288    {
9289      enum machine_mode omode = TYPE_MODE (index_type);
9290      rtx rangertx = expand_normal (range);
9291
9292      /* We must handle the endpoints in the original mode.  */
9293      index_expr = build2 (MINUS_EXPR, index_type,
9294			   index_expr, minval);
9295      minval = integer_zero_node;
9296      index = expand_normal (index_expr);
9297      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9298			       omode, 1, default_label);
9299      /* Now we can safely truncate.  */
9300      index = convert_to_mode (index_mode, index, 0);
9301    }
9302  else
9303    {
9304      if (TYPE_MODE (index_type) != index_mode)
9305	{
9306	  index_type = lang_hooks.types.type_for_size (index_bits, 0);
9307	  index_expr = fold_convert (index_type, index_expr);
9308	}
9309
9310      index = expand_normal (index_expr);
9311    }
9312
9313  do_pending_stack_adjust ();
9314
9315  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9316  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9317      (index, op_mode))
9318    index = copy_to_mode_reg (op_mode, index);
9319
9320  op1 = expand_normal (minval);
9321
9322  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9323  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9324		       op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9325  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9326      (op1, op_mode))
9327    op1 = copy_to_mode_reg (op_mode, op1);
9328
9329  op2 = expand_normal (range);
9330
9331  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9332  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9333		       op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9334  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9335      (op2, op_mode))
9336    op2 = copy_to_mode_reg (op_mode, op2);
9337
9338  emit_jump_insn (gen_casesi (index, op1, op2,
9339			      table_label, default_label));
9340  return 1;
9341}
9342
9343/* Attempt to generate a tablejump instruction; same concept.  */
9344#ifndef HAVE_tablejump
9345#define HAVE_tablejump 0
9346#define gen_tablejump(x, y) (0)
9347#endif
9348
9349/* Subroutine of the next function.
9350
9351   INDEX is the value being switched on, with the lowest value
9352   in the table already subtracted.
9353   MODE is its expected mode (needed if INDEX is constant).
9354   RANGE is the length of the jump table.
9355   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9356
9357   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9358   index value is out of range.  */
9359
9360static void
9361do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9362	      rtx default_label)
9363{
9364  rtx temp, vector;
9365
9366  if (INTVAL (range) > cfun->max_jumptable_ents)
9367    cfun->max_jumptable_ents = INTVAL (range);
9368
9369  /* Do an unsigned comparison (in the proper mode) between the index
9370     expression and the value which represents the length of the range.
9371     Since we just finished subtracting the lower bound of the range
9372     from the index expression, this comparison allows us to simultaneously
9373     check that the original index expression value is both greater than
9374     or equal to the minimum value of the range and less than or equal to
9375     the maximum value of the range.  */
9376
9377  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9378			   default_label);
9379
9380  /* If index is in range, it must fit in Pmode.
9381     Convert to Pmode so we can index with it.  */
9382  if (mode != Pmode)
9383    index = convert_to_mode (Pmode, index, 1);
9384
9385  /* Don't let a MEM slip through, because then INDEX that comes
9386     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9387     and break_out_memory_refs will go to work on it and mess it up.  */
9388#ifdef PIC_CASE_VECTOR_ADDRESS
9389  if (flag_pic && !REG_P (index))
9390    index = copy_to_mode_reg (Pmode, index);
9391#endif
9392
9393  /* If flag_force_addr were to affect this address
9394     it could interfere with the tricky assumptions made
9395     about addresses that contain label-refs,
9396     which may be valid only very near the tablejump itself.  */
9397  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9398     GET_MODE_SIZE, because this indicates how large insns are.  The other
9399     uses should all be Pmode, because they are addresses.  This code
9400     could fail if addresses and insns are not the same size.  */
9401  index = gen_rtx_PLUS (Pmode,
9402			gen_rtx_MULT (Pmode, index,
9403				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9404			gen_rtx_LABEL_REF (Pmode, table_label));
9405#ifdef PIC_CASE_VECTOR_ADDRESS
9406  if (flag_pic)
9407    index = PIC_CASE_VECTOR_ADDRESS (index);
9408  else
9409#endif
9410    index = memory_address_noforce (CASE_VECTOR_MODE, index);
9411  temp = gen_reg_rtx (CASE_VECTOR_MODE);
9412  vector = gen_const_mem (CASE_VECTOR_MODE, index);
9413  convert_move (temp, vector, 0);
9414
9415  emit_jump_insn (gen_tablejump (temp, table_label));
9416
9417  /* If we are generating PIC code or if the table is PC-relative, the
9418     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
9419  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9420    emit_barrier ();
9421}
9422
9423int
9424try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9425	       rtx table_label, rtx default_label)
9426{
9427  rtx index;
9428
9429  if (! HAVE_tablejump)
9430    return 0;
9431
9432  index_expr = fold_build2 (MINUS_EXPR, index_type,
9433			    fold_convert (index_type, index_expr),
9434			    fold_convert (index_type, minval));
9435  index = expand_normal (index_expr);
9436  do_pending_stack_adjust ();
9437
9438  do_tablejump (index, TYPE_MODE (index_type),
9439		convert_modes (TYPE_MODE (index_type),
9440			       TYPE_MODE (TREE_TYPE (range)),
9441			       expand_normal (range),
9442			       TYPE_UNSIGNED (TREE_TYPE (range))),
9443		table_label, default_label);
9444  return 1;
9445}
9446
9447/* Nonzero if the mode is a valid vector mode for this architecture.
9448   This returns nonzero even if there is no hardware support for the
9449   vector mode, but we can emulate with narrower modes.  */
9450
9451int
9452vector_mode_valid_p (enum machine_mode mode)
9453{
9454  enum mode_class class = GET_MODE_CLASS (mode);
9455  enum machine_mode innermode;
9456
9457  /* Doh!  What's going on?  */
9458  if (class != MODE_VECTOR_INT
9459      && class != MODE_VECTOR_FLOAT)
9460    return 0;
9461
9462  /* Hardware support.  Woo hoo!  */
9463  if (targetm.vector_mode_supported_p (mode))
9464    return 1;
9465
9466  innermode = GET_MODE_INNER (mode);
9467
9468  /* We should probably return 1 if requesting V4DI and we have no DI,
9469     but we have V2DI, but this is probably very unlikely.  */
9470
9471  /* If we have support for the inner mode, we can safely emulate it.
9472     We may not have V2DI, but me can emulate with a pair of DIs.  */
9473  return targetm.scalar_mode_supported_p (innermode);
9474}
9475
9476/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
9477static rtx
9478const_vector_from_tree (tree exp)
9479{
9480  rtvec v;
9481  int units, i;
9482  tree link, elt;
9483  enum machine_mode inner, mode;
9484
9485  mode = TYPE_MODE (TREE_TYPE (exp));
9486
9487  if (initializer_zerop (exp))
9488    return CONST0_RTX (mode);
9489
9490  units = GET_MODE_NUNITS (mode);
9491  inner = GET_MODE_INNER (mode);
9492
9493  v = rtvec_alloc (units);
9494
9495  link = TREE_VECTOR_CST_ELTS (exp);
9496  for (i = 0; link; link = TREE_CHAIN (link), ++i)
9497    {
9498      elt = TREE_VALUE (link);
9499
9500      if (TREE_CODE (elt) == REAL_CST)
9501	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9502							 inner);
9503      else
9504	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9505					       TREE_INT_CST_HIGH (elt),
9506					       inner);
9507    }
9508
9509  /* Initialize remaining elements to 0.  */
9510  for (; i < units; ++i)
9511    RTVEC_ELT (v, i) = CONST0_RTX (inner);
9512
9513  return gen_rtx_CONST_VECTOR (mode, v);
9514}
9515#include "gt-expr.h"
9516