1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4   Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "machmode.h"
28#include "real.h"
29#include "rtl.h"
30#include "tree.h"
31#include "flags.h"
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "except.h"
35#include "function.h"
36#include "insn-config.h"
37#include "insn-attr.h"
38/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
39#include "expr.h"
40#include "optabs.h"
41#include "libfuncs.h"
42#include "recog.h"
43#include "reload.h"
44#include "output.h"
45#include "typeclass.h"
46#include "toplev.h"
47#include "ggc.h"
48#include "langhooks.h"
49#include "intl.h"
50#include "tm_p.h"
51#include "tree-iterator.h"
52#include "tree-pass.h"
53#include "tree-flow.h"
54#include "target.h"
55#include "timevar.h"
56
57/* Decide whether a function's arguments should be processed
58   from first to last or from last to first.
59
60   They should if the stack and args grow in opposite directions, but
61   only if we have push insns.  */
62
63#ifdef PUSH_ROUNDING
64
65#ifndef PUSH_ARGS_REVERSED
66#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
68#endif
69#endif
70
71#endif
72
73#ifndef STACK_PUSH_CODE
74#ifdef STACK_GROWS_DOWNWARD
75#define STACK_PUSH_CODE PRE_DEC
76#else
77#define STACK_PUSH_CODE PRE_INC
78#endif
79#endif
80
81
82/* If this is nonzero, we do not bother generating VOLATILE
83   around volatile memory references, and we are willing to
84   output indirect addresses.  If cse is to follow, we reject
85   indirect addresses so a useful potential cse is generated;
86   if it is used only once, instruction combination will produce
87   the same indirect address eventually.  */
88int cse_not_expected;
89
90/* This structure is used by move_by_pieces to describe the move to
91   be performed.  */
92struct move_by_pieces
93{
94  rtx to;
95  rtx to_addr;
96  int autinc_to;
97  int explicit_inc_to;
98  rtx from;
99  rtx from_addr;
100  int autinc_from;
101  int explicit_inc_from;
102  unsigned HOST_WIDE_INT len;
103  HOST_WIDE_INT offset;
104  int reverse;
105};
106
107/* This structure is used by store_by_pieces to describe the clear to
108   be performed.  */
109
110struct store_by_pieces
111{
112  rtx to;
113  rtx to_addr;
114  int autinc_to;
115  int explicit_inc_to;
116  unsigned HOST_WIDE_INT len;
117  HOST_WIDE_INT offset;
118  rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119  void *constfundata;
120  int reverse;
121};
122
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124						     unsigned int,
125						     unsigned int);
126static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127			      struct move_by_pieces *);
128static bool block_move_libcall_safe_for_call_parm (void);
129static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131static tree emit_block_move_libcall_fn (int);
132static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137			       struct store_by_pieces *);
138static rtx clear_storage_via_libcall (rtx, rtx, bool);
139static tree clear_storage_libcall_fn (int);
140static rtx compress_float_constant (rtx, rtx);
141static rtx get_subtarget (rtx);
142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143				     HOST_WIDE_INT, enum machine_mode,
144				     tree, tree, int, int);
145static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147			tree, tree, int);
148
149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150
151static int is_aligning_offset (tree, tree);
152static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153			     enum expand_modifier);
154static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156#ifdef PUSH_ROUNDING
157static void emit_single_push_insn (enum machine_mode, rtx, tree);
158#endif
159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160static rtx const_vector_from_tree (tree);
161static void write_complex_part (rtx, rtx, bool);
162
163/* Record for each mode whether we can move a register directly to or
164   from an object of that mode in memory.  If we can't, we won't try
165   to use that mode directly when accessing a field of that mode.  */
166
167static char direct_load[NUM_MACHINE_MODES];
168static char direct_store[NUM_MACHINE_MODES];
169
170/* Record for each mode whether we can float-extend from memory.  */
171
172static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174/* This macro is used to determine whether move_by_pieces should be called
175   to perform a structure copy.  */
176#ifndef MOVE_BY_PIECES_P
177#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178  (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179   < (unsigned int) MOVE_RATIO)
180#endif
181
182/* This macro is used to determine whether clear_by_pieces should be
183   called to clear storage.  */
184#ifndef CLEAR_BY_PIECES_P
185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187   < (unsigned int) CLEAR_RATIO)
188#endif
189
190/* This macro is used to determine whether store_by_pieces should be
191   called to "memset" storage with byte values other than zero, or
192   to "memcpy" storage when the source is a constant string.  */
193#ifndef STORE_BY_PIECES_P
194#define STORE_BY_PIECES_P(SIZE, ALIGN) \
195  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196   < (unsigned int) MOVE_RATIO)
197#endif
198
199/* This array records the insn_code of insns to perform block moves.  */
200enum insn_code movmem_optab[NUM_MACHINE_MODES];
201
202/* This array records the insn_code of insns to perform block sets.  */
203enum insn_code setmem_optab[NUM_MACHINE_MODES];
204
205/* These arrays record the insn_code of three different kinds of insns
206   to perform block compares.  */
207enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
211/* Synchronization primitives.  */
212enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
235/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
236
237#ifndef SLOW_UNALIGNED_ACCESS
238#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239#endif
240
241/* This is run once per compilation to set up which modes can be used
242   directly in memory and to initialize the block move optab.  */
243
244void
245init_expr_once (void)
246{
247  rtx insn, pat;
248  enum machine_mode mode;
249  int num_clobbers;
250  rtx mem, mem1;
251  rtx reg;
252
253  /* Try indexing by frame ptr and try by stack ptr.
254     It is known that on the Convex the stack ptr isn't a valid index.
255     With luck, one or the other is valid on any machine.  */
256  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258
259  /* A scratch register we can modify in-place below to avoid
260     useless RTL allocations.  */
261  reg = gen_rtx_REG (VOIDmode, -1);
262
263  insn = rtx_alloc (INSN);
264  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265  PATTERN (insn) = pat;
266
267  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268       mode = (enum machine_mode) ((int) mode + 1))
269    {
270      int regno;
271
272      direct_load[(int) mode] = direct_store[(int) mode] = 0;
273      PUT_MODE (mem, mode);
274      PUT_MODE (mem1, mode);
275      PUT_MODE (reg, mode);
276
277      /* See if there is some register that can be used in this mode and
278	 directly loaded or stored from memory.  */
279
280      if (mode != VOIDmode && mode != BLKmode)
281	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283	     regno++)
284	  {
285	    if (! HARD_REGNO_MODE_OK (regno, mode))
286	      continue;
287
288	    REGNO (reg) = regno;
289
290	    SET_SRC (pat) = mem;
291	    SET_DEST (pat) = reg;
292	    if (recog (pat, insn, &num_clobbers) >= 0)
293	      direct_load[(int) mode] = 1;
294
295	    SET_SRC (pat) = mem1;
296	    SET_DEST (pat) = reg;
297	    if (recog (pat, insn, &num_clobbers) >= 0)
298	      direct_load[(int) mode] = 1;
299
300	    SET_SRC (pat) = reg;
301	    SET_DEST (pat) = mem;
302	    if (recog (pat, insn, &num_clobbers) >= 0)
303	      direct_store[(int) mode] = 1;
304
305	    SET_SRC (pat) = reg;
306	    SET_DEST (pat) = mem1;
307	    if (recog (pat, insn, &num_clobbers) >= 0)
308	      direct_store[(int) mode] = 1;
309	  }
310    }
311
312  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315       mode = GET_MODE_WIDER_MODE (mode))
316    {
317      enum machine_mode srcmode;
318      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319	   srcmode = GET_MODE_WIDER_MODE (srcmode))
320	{
321	  enum insn_code ic;
322
323	  ic = can_extend_p (mode, srcmode, 0);
324	  if (ic == CODE_FOR_nothing)
325	    continue;
326
327	  PUT_MODE (mem, srcmode);
328
329	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330	    float_extend_from_mem[mode][srcmode] = true;
331	}
332    }
333}
334
335/* This is run at the start of compiling a function.  */
336
337void
338init_expr (void)
339{
340  cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341}
342
343/* Copy data from FROM to TO, where the machine modes are not the same.
344   Both modes may be integer, or both may be floating.
345   UNSIGNEDP should be nonzero if FROM is an unsigned type.
346   This causes zero-extension instead of sign-extension.  */
347
348void
349convert_move (rtx to, rtx from, int unsignedp)
350{
351  enum machine_mode to_mode = GET_MODE (to);
352  enum machine_mode from_mode = GET_MODE (from);
353  int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354  int from_real = SCALAR_FLOAT_MODE_P (from_mode);
355  enum insn_code code;
356  rtx libcall;
357
358  /* rtx code for making an equivalent value.  */
359  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361
362
363  gcc_assert (to_real == from_real);
364  gcc_assert (to_mode != BLKmode);
365  gcc_assert (from_mode != BLKmode);
366
367  /* If the source and destination are already the same, then there's
368     nothing to do.  */
369  if (to == from)
370    return;
371
372  /* If FROM is a SUBREG that indicates that we have already done at least
373     the required extension, strip it.  We don't handle such SUBREGs as
374     TO here.  */
375
376  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
377      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
378	  >= GET_MODE_SIZE (to_mode))
379      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
380    from = gen_lowpart (to_mode, from), from_mode = to_mode;
381
382  gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
383
384  if (to_mode == from_mode
385      || (from_mode == VOIDmode && CONSTANT_P (from)))
386    {
387      emit_move_insn (to, from);
388      return;
389    }
390
391  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
392    {
393      gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
394
395      if (VECTOR_MODE_P (to_mode))
396	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
397      else
398	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
399
400      emit_move_insn (to, from);
401      return;
402    }
403
404  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
405    {
406      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
407      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
408      return;
409    }
410
411  if (to_real)
412    {
413      rtx value, insns;
414      convert_optab tab;
415
416      gcc_assert ((GET_MODE_PRECISION (from_mode)
417		   != GET_MODE_PRECISION (to_mode))
418		  || (DECIMAL_FLOAT_MODE_P (from_mode)
419		      != DECIMAL_FLOAT_MODE_P (to_mode)));
420
421      if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
422	/* Conversion between decimal float and binary float, same size.  */
423	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
424      else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
425	tab = sext_optab;
426      else
427	tab = trunc_optab;
428
429      /* Try converting directly if the insn is supported.  */
430
431      code = tab->handlers[to_mode][from_mode].insn_code;
432      if (code != CODE_FOR_nothing)
433	{
434	  emit_unop_insn (code, to, from,
435			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
436	  return;
437	}
438
439      /* Otherwise use a libcall.  */
440      libcall = tab->handlers[to_mode][from_mode].libfunc;
441
442      /* Is this conversion implemented yet?  */
443      gcc_assert (libcall);
444
445      start_sequence ();
446      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
447				       1, from, from_mode);
448      insns = get_insns ();
449      end_sequence ();
450      emit_libcall_block (insns, to, value,
451			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
452								       from)
453			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
454      return;
455    }
456
457  /* Handle pointer conversion.  */			/* SPEE 900220.  */
458  /* Targets are expected to provide conversion insns between PxImode and
459     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
460  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
461    {
462      enum machine_mode full_mode
463	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
464
465      gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
466		  != CODE_FOR_nothing);
467
468      if (full_mode != from_mode)
469	from = convert_to_mode (full_mode, from, unsignedp);
470      emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
471		      to, from, UNKNOWN);
472      return;
473    }
474  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
475    {
476      rtx new_from;
477      enum machine_mode full_mode
478	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
479
480      gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
481		  != CODE_FOR_nothing);
482
483      if (to_mode == full_mode)
484	{
485	  emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
486			  to, from, UNKNOWN);
487	  return;
488	}
489
490      new_from = gen_reg_rtx (full_mode);
491      emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
492		      new_from, from, UNKNOWN);
493
494      /* else proceed to integer conversions below.  */
495      from_mode = full_mode;
496      from = new_from;
497    }
498
499  /* Now both modes are integers.  */
500
501  /* Handle expanding beyond a word.  */
502  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
503      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
504    {
505      rtx insns;
506      rtx lowpart;
507      rtx fill_value;
508      rtx lowfrom;
509      int i;
510      enum machine_mode lowpart_mode;
511      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
512
513      /* Try converting directly if the insn is supported.  */
514      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
515	  != CODE_FOR_nothing)
516	{
517	  /* If FROM is a SUBREG, put it into a register.  Do this
518	     so that we always generate the same set of insns for
519	     better cse'ing; if an intermediate assignment occurred,
520	     we won't be doing the operation directly on the SUBREG.  */
521	  if (optimize > 0 && GET_CODE (from) == SUBREG)
522	    from = force_reg (from_mode, from);
523	  emit_unop_insn (code, to, from, equiv_code);
524	  return;
525	}
526      /* Next, try converting via full word.  */
527      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
528	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
529		   != CODE_FOR_nothing))
530	{
531	  if (REG_P (to))
532	    {
533	      if (reg_overlap_mentioned_p (to, from))
534		from = force_reg (from_mode, from);
535	      emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
536	    }
537	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
538	  emit_unop_insn (code, to,
539			  gen_lowpart (word_mode, to), equiv_code);
540	  return;
541	}
542
543      /* No special multiword conversion insn; do it by hand.  */
544      start_sequence ();
545
546      /* Since we will turn this into a no conflict block, we must ensure
547	 that the source does not overlap the target.  */
548
549      if (reg_overlap_mentioned_p (to, from))
550	from = force_reg (from_mode, from);
551
552      /* Get a copy of FROM widened to a word, if necessary.  */
553      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
554	lowpart_mode = word_mode;
555      else
556	lowpart_mode = from_mode;
557
558      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
559
560      lowpart = gen_lowpart (lowpart_mode, to);
561      emit_move_insn (lowpart, lowfrom);
562
563      /* Compute the value to put in each remaining word.  */
564      if (unsignedp)
565	fill_value = const0_rtx;
566      else
567	{
568#ifdef HAVE_slt
569	  if (HAVE_slt
570	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
571	      && STORE_FLAG_VALUE == -1)
572	    {
573	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
574			     lowpart_mode, 0);
575	      fill_value = gen_reg_rtx (word_mode);
576	      emit_insn (gen_slt (fill_value));
577	    }
578	  else
579#endif
580	    {
581	      fill_value
582		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
583				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
584				NULL_RTX, 0);
585	      fill_value = convert_to_mode (word_mode, fill_value, 1);
586	    }
587	}
588
589      /* Fill the remaining words.  */
590      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
591	{
592	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
593	  rtx subword = operand_subword (to, index, 1, to_mode);
594
595	  gcc_assert (subword);
596
597	  if (fill_value != subword)
598	    emit_move_insn (subword, fill_value);
599	}
600
601      insns = get_insns ();
602      end_sequence ();
603
604      emit_no_conflict_block (insns, to, from, NULL_RTX,
605			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
606      return;
607    }
608
609  /* Truncating multi-word to a word or less.  */
610  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
611      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
612    {
613      if (!((MEM_P (from)
614	     && ! MEM_VOLATILE_P (from)
615	     && direct_load[(int) to_mode]
616	     && ! mode_dependent_address_p (XEXP (from, 0)))
617	    || REG_P (from)
618	    || GET_CODE (from) == SUBREG))
619	from = force_reg (from_mode, from);
620      convert_move (to, gen_lowpart (word_mode, from), 0);
621      return;
622    }
623
624  /* Now follow all the conversions between integers
625     no more than a word long.  */
626
627  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
628  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
629      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
630				GET_MODE_BITSIZE (from_mode)))
631    {
632      if (!((MEM_P (from)
633	     && ! MEM_VOLATILE_P (from)
634	     && direct_load[(int) to_mode]
635	     && ! mode_dependent_address_p (XEXP (from, 0)))
636	    || REG_P (from)
637	    || GET_CODE (from) == SUBREG))
638	from = force_reg (from_mode, from);
639      if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
640	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
641	from = copy_to_reg (from);
642      emit_move_insn (to, gen_lowpart (to_mode, from));
643      return;
644    }
645
646  /* Handle extension.  */
647  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
648    {
649      /* Convert directly if that works.  */
650      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
651	  != CODE_FOR_nothing)
652	{
653	  emit_unop_insn (code, to, from, equiv_code);
654	  return;
655	}
656      else
657	{
658	  enum machine_mode intermediate;
659	  rtx tmp;
660	  tree shift_amount;
661
662	  /* Search for a mode to convert via.  */
663	  for (intermediate = from_mode; intermediate != VOIDmode;
664	       intermediate = GET_MODE_WIDER_MODE (intermediate))
665	    if (((can_extend_p (to_mode, intermediate, unsignedp)
666		  != CODE_FOR_nothing)
667		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
668		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
669					       GET_MODE_BITSIZE (intermediate))))
670		&& (can_extend_p (intermediate, from_mode, unsignedp)
671		    != CODE_FOR_nothing))
672	      {
673		convert_move (to, convert_to_mode (intermediate, from,
674						   unsignedp), unsignedp);
675		return;
676	      }
677
678	  /* No suitable intermediate mode.
679	     Generate what we need with	shifts.  */
680	  shift_amount = build_int_cst (NULL_TREE,
681					GET_MODE_BITSIZE (to_mode)
682					- GET_MODE_BITSIZE (from_mode));
683	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
684	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
685			      to, unsignedp);
686	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
687			      to, unsignedp);
688	  if (tmp != to)
689	    emit_move_insn (to, tmp);
690	  return;
691	}
692    }
693
694  /* Support special truncate insns for certain modes.  */
695  if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
696    {
697      emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
698		      to, from, UNKNOWN);
699      return;
700    }
701
702  /* Handle truncation of volatile memrefs, and so on;
703     the things that couldn't be truncated directly,
704     and for which there was no special instruction.
705
706     ??? Code above formerly short-circuited this, for most integer
707     mode pairs, with a force_reg in from_mode followed by a recursive
708     call to this routine.  Appears always to have been wrong.  */
709  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
710    {
711      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
712      emit_move_insn (to, temp);
713      return;
714    }
715
716  /* Mode combination is not recognized.  */
717  gcc_unreachable ();
718}
719
720/* Return an rtx for a value that would result
721   from converting X to mode MODE.
722   Both X and MODE may be floating, or both integer.
723   UNSIGNEDP is nonzero if X is an unsigned value.
724   This can be done by referring to a part of X in place
725   or by copying to a new temporary with conversion.  */
726
727rtx
728convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
729{
730  return convert_modes (mode, VOIDmode, x, unsignedp);
731}
732
733/* Return an rtx for a value that would result
734   from converting X from mode OLDMODE to mode MODE.
735   Both modes may be floating, or both integer.
736   UNSIGNEDP is nonzero if X is an unsigned value.
737
738   This can be done by referring to a part of X in place
739   or by copying to a new temporary with conversion.
740
741   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
742
743rtx
744convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
745{
746  rtx temp;
747
748  /* If FROM is a SUBREG that indicates that we have already done at least
749     the required extension, strip it.  */
750
751  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
752      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
753      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
754    x = gen_lowpart (mode, x);
755
756  if (GET_MODE (x) != VOIDmode)
757    oldmode = GET_MODE (x);
758
759  if (mode == oldmode)
760    return x;
761
762  /* There is one case that we must handle specially: If we are converting
763     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
764     we are to interpret the constant as unsigned, gen_lowpart will do
765     the wrong if the constant appears negative.  What we want to do is
766     make the high-order word of the constant zero, not all ones.  */
767
768  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
769      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
770      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
771    {
772      HOST_WIDE_INT val = INTVAL (x);
773
774      if (oldmode != VOIDmode
775	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
776	{
777	  int width = GET_MODE_BITSIZE (oldmode);
778
779	  /* We need to zero extend VAL.  */
780	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
781	}
782
783      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
784    }
785
786  /* We can do this with a gen_lowpart if both desired and current modes
787     are integer, and this is either a constant integer, a register, or a
788     non-volatile MEM.  Except for the constant case where MODE is no
789     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
790
791  if ((GET_CODE (x) == CONST_INT
792       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
793      || (GET_MODE_CLASS (mode) == MODE_INT
794	  && GET_MODE_CLASS (oldmode) == MODE_INT
795	  && (GET_CODE (x) == CONST_DOUBLE
796	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
797		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
798		       && direct_load[(int) mode])
799		      || (REG_P (x)
800			  && (! HARD_REGISTER_P (x)
801			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
802			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
803						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
804    {
805      /* ?? If we don't know OLDMODE, we have to assume here that
806	 X does not need sign- or zero-extension.   This may not be
807	 the case, but it's the best we can do.  */
808      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
809	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
810	{
811	  HOST_WIDE_INT val = INTVAL (x);
812	  int width = GET_MODE_BITSIZE (oldmode);
813
814	  /* We must sign or zero-extend in this case.  Start by
815	     zero-extending, then sign extend if we need to.  */
816	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
817	  if (! unsignedp
818	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
819	    val |= (HOST_WIDE_INT) (-1) << width;
820
821	  return gen_int_mode (val, mode);
822	}
823
824      return gen_lowpart (mode, x);
825    }
826
827  /* Converting from integer constant into mode is always equivalent to an
828     subreg operation.  */
829  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
830    {
831      gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
832      return simplify_gen_subreg (mode, x, oldmode, 0);
833    }
834
835  temp = gen_reg_rtx (mode);
836  convert_move (temp, x, unsignedp);
837  return temp;
838}
839
840/* STORE_MAX_PIECES is the number of bytes at a time that we can
841   store efficiently.  Due to internal GCC limitations, this is
842   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
843   for an immediate constant.  */
844
845#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846
847/* Determine whether the LEN bytes can be moved by using several move
848   instructions.  Return nonzero if a call to move_by_pieces should
849   succeed.  */
850
851int
852can_move_by_pieces (unsigned HOST_WIDE_INT len,
853		    unsigned int align ATTRIBUTE_UNUSED)
854{
855  return MOVE_BY_PIECES_P (len, align);
856}
857
858/* Generate several move instructions to copy LEN bytes from block FROM to
859   block TO.  (These are MEM rtx's with BLKmode).
860
861   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
862   used to push FROM to the stack.
863
864   ALIGN is maximum stack alignment we can assume.
865
866   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
867   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
868   stpcpy.  */
869
870rtx
871move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
872		unsigned int align, int endp)
873{
874  struct move_by_pieces data;
875  rtx to_addr, from_addr = XEXP (from, 0);
876  unsigned int max_size = MOVE_MAX_PIECES + 1;
877  enum machine_mode mode = VOIDmode, tmode;
878  enum insn_code icode;
879
880  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
881
882  data.offset = 0;
883  data.from_addr = from_addr;
884  if (to)
885    {
886      to_addr = XEXP (to, 0);
887      data.to = to;
888      data.autinc_to
889	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
890	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
891      data.reverse
892	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
893    }
894  else
895    {
896      to_addr = NULL_RTX;
897      data.to = NULL_RTX;
898      data.autinc_to = 1;
899#ifdef STACK_GROWS_DOWNWARD
900      data.reverse = 1;
901#else
902      data.reverse = 0;
903#endif
904    }
905  data.to_addr = to_addr;
906  data.from = from;
907  data.autinc_from
908    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
909       || GET_CODE (from_addr) == POST_INC
910       || GET_CODE (from_addr) == POST_DEC);
911
912  data.explicit_inc_from = 0;
913  data.explicit_inc_to = 0;
914  if (data.reverse) data.offset = len;
915  data.len = len;
916
917  /* If copying requires more than two move insns,
918     copy addresses to registers (to make displacements shorter)
919     and use post-increment if available.  */
920  if (!(data.autinc_from && data.autinc_to)
921      && move_by_pieces_ninsns (len, align, max_size) > 2)
922    {
923      /* Find the mode of the largest move...  */
924      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
925	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
926	if (GET_MODE_SIZE (tmode) < max_size)
927	  mode = tmode;
928
929      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
930	{
931	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
932	  data.autinc_from = 1;
933	  data.explicit_inc_from = -1;
934	}
935      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936	{
937	  data.from_addr = copy_addr_to_reg (from_addr);
938	  data.autinc_from = 1;
939	  data.explicit_inc_from = 1;
940	}
941      if (!data.autinc_from && CONSTANT_P (from_addr))
942	data.from_addr = copy_addr_to_reg (from_addr);
943      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944	{
945	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
946	  data.autinc_to = 1;
947	  data.explicit_inc_to = -1;
948	}
949      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
950	{
951	  data.to_addr = copy_addr_to_reg (to_addr);
952	  data.autinc_to = 1;
953	  data.explicit_inc_to = 1;
954	}
955      if (!data.autinc_to && CONSTANT_P (to_addr))
956	data.to_addr = copy_addr_to_reg (to_addr);
957    }
958
959  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
960  if (align >= GET_MODE_ALIGNMENT (tmode))
961    align = GET_MODE_ALIGNMENT (tmode);
962  else
963    {
964      enum machine_mode xmode;
965
966      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
967	   tmode != VOIDmode;
968	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
969	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
970	    || SLOW_UNALIGNED_ACCESS (tmode, align))
971	  break;
972
973      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
974    }
975
976  /* First move what we can in the largest integer mode, then go to
977     successively smaller modes.  */
978
979  while (max_size > 1)
980    {
981      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
982	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
983	if (GET_MODE_SIZE (tmode) < max_size)
984	  mode = tmode;
985
986      if (mode == VOIDmode)
987	break;
988
989      icode = mov_optab->handlers[(int) mode].insn_code;
990      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
991	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
992
993      max_size = GET_MODE_SIZE (mode);
994    }
995
996  /* The code above should have handled everything.  */
997  gcc_assert (!data.len);
998
999  if (endp)
1000    {
1001      rtx to1;
1002
1003      gcc_assert (!data.reverse);
1004      if (data.autinc_to)
1005	{
1006	  if (endp == 2)
1007	    {
1008	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1009		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1010	      else
1011		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1012								-1));
1013	    }
1014	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1015					   data.offset);
1016	}
1017      else
1018	{
1019	  if (endp == 2)
1020	    --data.offset;
1021	  to1 = adjust_address (data.to, QImode, data.offset);
1022	}
1023      return to1;
1024    }
1025  else
1026    return data.to;
1027}
1028
1029/* Return number of insns required to move L bytes by pieces.
1030   ALIGN (in bits) is maximum alignment we can assume.  */
1031
1032static unsigned HOST_WIDE_INT
1033move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1034		       unsigned int max_size)
1035{
1036  unsigned HOST_WIDE_INT n_insns = 0;
1037  enum machine_mode tmode;
1038
1039  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1040  if (align >= GET_MODE_ALIGNMENT (tmode))
1041    align = GET_MODE_ALIGNMENT (tmode);
1042  else
1043    {
1044      enum machine_mode tmode, xmode;
1045
1046      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1047	   tmode != VOIDmode;
1048	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1049	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1050	    || SLOW_UNALIGNED_ACCESS (tmode, align))
1051	  break;
1052
1053      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1054    }
1055
1056  while (max_size > 1)
1057    {
1058      enum machine_mode mode = VOIDmode;
1059      enum insn_code icode;
1060
1061      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1062	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1063	if (GET_MODE_SIZE (tmode) < max_size)
1064	  mode = tmode;
1065
1066      if (mode == VOIDmode)
1067	break;
1068
1069      icode = mov_optab->handlers[(int) mode].insn_code;
1070      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1071	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1072
1073      max_size = GET_MODE_SIZE (mode);
1074    }
1075
1076  gcc_assert (!l);
1077  return n_insns;
1078}
1079
1080/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1081   with move instructions for mode MODE.  GENFUN is the gen_... function
1082   to make a move insn for that mode.  DATA has all the other info.  */
1083
1084static void
1085move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1086		  struct move_by_pieces *data)
1087{
1088  unsigned int size = GET_MODE_SIZE (mode);
1089  rtx to1 = NULL_RTX, from1;
1090
1091  while (data->len >= size)
1092    {
1093      if (data->reverse)
1094	data->offset -= size;
1095
1096      if (data->to)
1097	{
1098	  if (data->autinc_to)
1099	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1100					     data->offset);
1101	  else
1102	    to1 = adjust_address (data->to, mode, data->offset);
1103	}
1104
1105      if (data->autinc_from)
1106	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1107					   data->offset);
1108      else
1109	from1 = adjust_address (data->from, mode, data->offset);
1110
1111      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1112	emit_insn (gen_add2_insn (data->to_addr,
1113				  GEN_INT (-(HOST_WIDE_INT)size)));
1114      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1115	emit_insn (gen_add2_insn (data->from_addr,
1116				  GEN_INT (-(HOST_WIDE_INT)size)));
1117
1118      if (data->to)
1119	emit_insn ((*genfun) (to1, from1));
1120      else
1121	{
1122#ifdef PUSH_ROUNDING
1123	  emit_single_push_insn (mode, from1, NULL);
1124#else
1125	  gcc_unreachable ();
1126#endif
1127	}
1128
1129      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1130	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1131      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1132	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1133
1134      if (! data->reverse)
1135	data->offset += size;
1136
1137      data->len -= size;
1138    }
1139}
1140
1141/* Emit code to move a block Y to a block X.  This may be done with
1142   string-move instructions, with multiple scalar move instructions,
1143   or with a library call.
1144
1145   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1146   SIZE is an rtx that says how long they are.
1147   ALIGN is the maximum alignment we can assume they have.
1148   METHOD describes what kind of copy this is, and what mechanisms may be used.
1149
1150   Return the address of the new block, if memcpy is called and returns it,
1151   0 otherwise.  */
1152
1153rtx
1154emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1155{
1156  bool may_use_call;
1157  rtx retval = 0;
1158  unsigned int align;
1159
1160  switch (method)
1161    {
1162    case BLOCK_OP_NORMAL:
1163    case BLOCK_OP_TAILCALL:
1164      may_use_call = true;
1165      break;
1166
1167    case BLOCK_OP_CALL_PARM:
1168      may_use_call = block_move_libcall_safe_for_call_parm ();
1169
1170      /* Make inhibit_defer_pop nonzero around the library call
1171	 to force it to pop the arguments right away.  */
1172      NO_DEFER_POP;
1173      break;
1174
1175    case BLOCK_OP_NO_LIBCALL:
1176      may_use_call = false;
1177      break;
1178
1179    default:
1180      gcc_unreachable ();
1181    }
1182
1183  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1184
1185  gcc_assert (MEM_P (x));
1186  gcc_assert (MEM_P (y));
1187  gcc_assert (size);
1188
1189  /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1190     block copy is more efficient for other large modes, e.g. DCmode.  */
1191  x = adjust_address (x, BLKmode, 0);
1192  y = adjust_address (y, BLKmode, 0);
1193
1194  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1195     can be incorrect is coming from __builtin_memcpy.  */
1196  if (GET_CODE (size) == CONST_INT)
1197    {
1198      if (INTVAL (size) == 0)
1199	return 0;
1200
1201      x = shallow_copy_rtx (x);
1202      y = shallow_copy_rtx (y);
1203      set_mem_size (x, size);
1204      set_mem_size (y, size);
1205    }
1206
1207  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1208    move_by_pieces (x, y, INTVAL (size), align, 0);
1209  else if (emit_block_move_via_movmem (x, y, size, align))
1210    ;
1211  else if (may_use_call)
1212    retval = emit_block_move_via_libcall (x, y, size,
1213					  method == BLOCK_OP_TAILCALL);
1214  else
1215    emit_block_move_via_loop (x, y, size, align);
1216
1217  if (method == BLOCK_OP_CALL_PARM)
1218    OK_DEFER_POP;
1219
1220  return retval;
1221}
1222
1223/* A subroutine of emit_block_move.  Returns true if calling the
1224   block move libcall will not clobber any parameters which may have
1225   already been placed on the stack.  */
1226
1227static bool
1228block_move_libcall_safe_for_call_parm (void)
1229{
1230  /* If arguments are pushed on the stack, then they're safe.  */
1231  if (PUSH_ARGS)
1232    return true;
1233
1234  /* If registers go on the stack anyway, any argument is sure to clobber
1235     an outgoing argument.  */
1236#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1237  {
1238    tree fn = emit_block_move_libcall_fn (false);
1239    (void) fn;
1240    if (REG_PARM_STACK_SPACE (fn) != 0)
1241      return false;
1242  }
1243#endif
1244
1245  /* If any argument goes in memory, then it might clobber an outgoing
1246     argument.  */
1247  {
1248    CUMULATIVE_ARGS args_so_far;
1249    tree fn, arg;
1250
1251    fn = emit_block_move_libcall_fn (false);
1252    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1253
1254    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1255    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1256      {
1257	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1258	rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1259	if (!tmp || !REG_P (tmp))
1260	  return false;
1261	if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1262	  return false;
1263	FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1264      }
1265  }
1266  return true;
1267}
1268
1269/* A subroutine of emit_block_move.  Expand a movmem pattern;
1270   return true if successful.  */
1271
1272static bool
1273emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1274{
1275  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1276  int save_volatile_ok = volatile_ok;
1277  enum machine_mode mode;
1278
1279  /* Since this is a move insn, we don't care about volatility.  */
1280  volatile_ok = 1;
1281
1282  /* Try the most limited insn first, because there's no point
1283     including more than one in the machine description unless
1284     the more limited one has some advantage.  */
1285
1286  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1287       mode = GET_MODE_WIDER_MODE (mode))
1288    {
1289      enum insn_code code = movmem_optab[(int) mode];
1290      insn_operand_predicate_fn pred;
1291
1292      if (code != CODE_FOR_nothing
1293	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1294	     here because if SIZE is less than the mode mask, as it is
1295	     returned by the macro, it will definitely be less than the
1296	     actual mode mask.  */
1297	  && ((GET_CODE (size) == CONST_INT
1298	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1299		   <= (GET_MODE_MASK (mode) >> 1)))
1300	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1301	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1302	      || (*pred) (x, BLKmode))
1303	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1304	      || (*pred) (y, BLKmode))
1305	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1306	      || (*pred) (opalign, VOIDmode)))
1307	{
1308	  rtx op2;
1309	  rtx last = get_last_insn ();
1310	  rtx pat;
1311
1312	  op2 = convert_to_mode (mode, size, 1);
1313	  pred = insn_data[(int) code].operand[2].predicate;
1314	  if (pred != 0 && ! (*pred) (op2, mode))
1315	    op2 = copy_to_mode_reg (mode, op2);
1316
1317	  /* ??? When called via emit_block_move_for_call, it'd be
1318	     nice if there were some way to inform the backend, so
1319	     that it doesn't fail the expansion because it thinks
1320	     emitting the libcall would be more efficient.  */
1321
1322	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1323	  if (pat)
1324	    {
1325	      emit_insn (pat);
1326	      volatile_ok = save_volatile_ok;
1327	      return true;
1328	    }
1329	  else
1330	    delete_insns_since (last);
1331	}
1332    }
1333
1334  volatile_ok = save_volatile_ok;
1335  return false;
1336}
1337
1338/* A subroutine of emit_block_move.  Expand a call to memcpy.
1339   Return the return value from memcpy, 0 otherwise.  */
1340
1341static rtx
1342emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1343{
1344  rtx dst_addr, src_addr;
1345  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1346  enum machine_mode size_mode;
1347  rtx retval;
1348
1349  /* Emit code to copy the addresses of DST and SRC and SIZE into new
1350     pseudos.  We can then place those new pseudos into a VAR_DECL and
1351     use them later.  */
1352
1353  dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1354  src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1355
1356  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1357  src_addr = convert_memory_address (ptr_mode, src_addr);
1358
1359  dst_tree = make_tree (ptr_type_node, dst_addr);
1360  src_tree = make_tree (ptr_type_node, src_addr);
1361
1362  size_mode = TYPE_MODE (sizetype);
1363
1364  size = convert_to_mode (size_mode, size, 1);
1365  size = copy_to_mode_reg (size_mode, size);
1366
1367  /* It is incorrect to use the libcall calling conventions to call
1368     memcpy in this context.  This could be a user call to memcpy and
1369     the user may wish to examine the return value from memcpy.  For
1370     targets where libcalls and normal calls have different conventions
1371     for returning pointers, we could end up generating incorrect code.  */
1372
1373  size_tree = make_tree (sizetype, size);
1374
1375  fn = emit_block_move_libcall_fn (true);
1376  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1377  arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1378  arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1379
1380  /* Now we have to build up the CALL_EXPR itself.  */
1381  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1382  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1383		      call_expr, arg_list, NULL_TREE);
1384  CALL_EXPR_TAILCALL (call_expr) = tailcall;
1385
1386  retval = expand_normal (call_expr);
1387
1388  return retval;
1389}
1390
1391/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1392   for the function we use for block copies.  The first time FOR_CALL
1393   is true, we call assemble_external.  */
1394
1395static GTY(()) tree block_move_fn;
1396
1397void
1398init_block_move_fn (const char *asmspec)
1399{
1400  if (!block_move_fn)
1401    {
1402      tree args, fn;
1403
1404      fn = get_identifier ("memcpy");
1405      args = build_function_type_list (ptr_type_node, ptr_type_node,
1406				       const_ptr_type_node, sizetype,
1407				       NULL_TREE);
1408
1409      fn = build_decl (FUNCTION_DECL, fn, args);
1410      DECL_EXTERNAL (fn) = 1;
1411      TREE_PUBLIC (fn) = 1;
1412      DECL_ARTIFICIAL (fn) = 1;
1413      TREE_NOTHROW (fn) = 1;
1414      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1415      DECL_VISIBILITY_SPECIFIED (fn) = 1;
1416
1417      block_move_fn = fn;
1418    }
1419
1420  if (asmspec)
1421    set_user_assembler_name (block_move_fn, asmspec);
1422}
1423
1424static tree
1425emit_block_move_libcall_fn (int for_call)
1426{
1427  static bool emitted_extern;
1428
1429  if (!block_move_fn)
1430    init_block_move_fn (NULL);
1431
1432  if (for_call && !emitted_extern)
1433    {
1434      emitted_extern = true;
1435      make_decl_rtl (block_move_fn);
1436      assemble_external (block_move_fn);
1437    }
1438
1439  return block_move_fn;
1440}
1441
1442/* A subroutine of emit_block_move.  Copy the data via an explicit
1443   loop.  This is used only when libcalls are forbidden.  */
1444/* ??? It'd be nice to copy in hunks larger than QImode.  */
1445
1446static void
1447emit_block_move_via_loop (rtx x, rtx y, rtx size,
1448			  unsigned int align ATTRIBUTE_UNUSED)
1449{
1450  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1451  enum machine_mode iter_mode;
1452
1453  iter_mode = GET_MODE (size);
1454  if (iter_mode == VOIDmode)
1455    iter_mode = word_mode;
1456
1457  top_label = gen_label_rtx ();
1458  cmp_label = gen_label_rtx ();
1459  iter = gen_reg_rtx (iter_mode);
1460
1461  emit_move_insn (iter, const0_rtx);
1462
1463  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1464  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1465  do_pending_stack_adjust ();
1466
1467  emit_jump (cmp_label);
1468  emit_label (top_label);
1469
1470  tmp = convert_modes (Pmode, iter_mode, iter, true);
1471  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1472  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1473  x = change_address (x, QImode, x_addr);
1474  y = change_address (y, QImode, y_addr);
1475
1476  emit_move_insn (x, y);
1477
1478  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1479			     true, OPTAB_LIB_WIDEN);
1480  if (tmp != iter)
1481    emit_move_insn (iter, tmp);
1482
1483  emit_label (cmp_label);
1484
1485  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1486			   true, top_label);
1487}
1488
1489/* Copy all or part of a value X into registers starting at REGNO.
1490   The number of registers to be filled is NREGS.  */
1491
1492void
1493move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1494{
1495  int i;
1496#ifdef HAVE_load_multiple
1497  rtx pat;
1498  rtx last;
1499#endif
1500
1501  if (nregs == 0)
1502    return;
1503
1504  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1505    x = validize_mem (force_const_mem (mode, x));
1506
1507  /* See if the machine can do this with a load multiple insn.  */
1508#ifdef HAVE_load_multiple
1509  if (HAVE_load_multiple)
1510    {
1511      last = get_last_insn ();
1512      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1513			       GEN_INT (nregs));
1514      if (pat)
1515	{
1516	  emit_insn (pat);
1517	  return;
1518	}
1519      else
1520	delete_insns_since (last);
1521    }
1522#endif
1523
1524  for (i = 0; i < nregs; i++)
1525    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1526		    operand_subword_force (x, i, mode));
1527}
1528
1529/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1530   The number of registers to be filled is NREGS.  */
1531
1532void
1533move_block_from_reg (int regno, rtx x, int nregs)
1534{
1535  int i;
1536
1537  if (nregs == 0)
1538    return;
1539
1540  /* See if the machine can do this with a store multiple insn.  */
1541#ifdef HAVE_store_multiple
1542  if (HAVE_store_multiple)
1543    {
1544      rtx last = get_last_insn ();
1545      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1546				    GEN_INT (nregs));
1547      if (pat)
1548	{
1549	  emit_insn (pat);
1550	  return;
1551	}
1552      else
1553	delete_insns_since (last);
1554    }
1555#endif
1556
1557  for (i = 0; i < nregs; i++)
1558    {
1559      rtx tem = operand_subword (x, i, 1, BLKmode);
1560
1561      gcc_assert (tem);
1562
1563      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1564    }
1565}
1566
1567/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1568   ORIG, where ORIG is a non-consecutive group of registers represented by
1569   a PARALLEL.  The clone is identical to the original except in that the
1570   original set of registers is replaced by a new set of pseudo registers.
1571   The new set has the same modes as the original set.  */
1572
1573rtx
1574gen_group_rtx (rtx orig)
1575{
1576  int i, length;
1577  rtx *tmps;
1578
1579  gcc_assert (GET_CODE (orig) == PARALLEL);
1580
1581  length = XVECLEN (orig, 0);
1582  tmps = alloca (sizeof (rtx) * length);
1583
1584  /* Skip a NULL entry in first slot.  */
1585  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1586
1587  if (i)
1588    tmps[0] = 0;
1589
1590  for (; i < length; i++)
1591    {
1592      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1593      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1594
1595      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1596    }
1597
1598  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1599}
1600
1601/* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1602   except that values are placed in TMPS[i], and must later be moved
1603   into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1604
1605static void
1606emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1607{
1608  rtx src;
1609  int start, i;
1610  enum machine_mode m = GET_MODE (orig_src);
1611
1612  gcc_assert (GET_CODE (dst) == PARALLEL);
1613
1614  if (m != VOIDmode
1615      && !SCALAR_INT_MODE_P (m)
1616      && !MEM_P (orig_src)
1617      && GET_CODE (orig_src) != CONCAT)
1618    {
1619      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1620      if (imode == BLKmode)
1621	src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1622      else
1623	src = gen_reg_rtx (imode);
1624      if (imode != BLKmode)
1625	src = gen_lowpart (GET_MODE (orig_src), src);
1626      emit_move_insn (src, orig_src);
1627      /* ...and back again.  */
1628      if (imode != BLKmode)
1629	src = gen_lowpart (imode, src);
1630      emit_group_load_1 (tmps, dst, src, type, ssize);
1631      return;
1632    }
1633
1634  /* Check for a NULL entry, used to indicate that the parameter goes
1635     both on the stack and in registers.  */
1636  if (XEXP (XVECEXP (dst, 0, 0), 0))
1637    start = 0;
1638  else
1639    start = 1;
1640
1641  /* Process the pieces.  */
1642  for (i = start; i < XVECLEN (dst, 0); i++)
1643    {
1644      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1645      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1646      unsigned int bytelen = GET_MODE_SIZE (mode);
1647      int shift = 0;
1648
1649      /* Handle trailing fragments that run over the size of the struct.  */
1650      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1651	{
1652	  /* Arrange to shift the fragment to where it belongs.
1653	     extract_bit_field loads to the lsb of the reg.  */
1654	  if (
1655#ifdef BLOCK_REG_PADDING
1656	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1657	      == (BYTES_BIG_ENDIAN ? upward : downward)
1658#else
1659	      BYTES_BIG_ENDIAN
1660#endif
1661	      )
1662	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1663	  bytelen = ssize - bytepos;
1664	  gcc_assert (bytelen > 0);
1665	}
1666
1667      /* If we won't be loading directly from memory, protect the real source
1668	 from strange tricks we might play; but make sure that the source can
1669	 be loaded directly into the destination.  */
1670      src = orig_src;
1671      if (!MEM_P (orig_src)
1672	  && (!CONSTANT_P (orig_src)
1673	      || (GET_MODE (orig_src) != mode
1674		  && GET_MODE (orig_src) != VOIDmode)))
1675	{
1676	  if (GET_MODE (orig_src) == VOIDmode)
1677	    src = gen_reg_rtx (mode);
1678	  else
1679	    src = gen_reg_rtx (GET_MODE (orig_src));
1680
1681	  emit_move_insn (src, orig_src);
1682	}
1683
1684      /* Optimize the access just a bit.  */
1685      if (MEM_P (src)
1686	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1687	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1688	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1689	  && bytelen == GET_MODE_SIZE (mode))
1690	{
1691	  tmps[i] = gen_reg_rtx (mode);
1692	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1693	}
1694      else if (COMPLEX_MODE_P (mode)
1695	       && GET_MODE (src) == mode
1696	       && bytelen == GET_MODE_SIZE (mode))
1697	/* Let emit_move_complex do the bulk of the work.  */
1698	tmps[i] = src;
1699      else if (GET_CODE (src) == CONCAT)
1700	{
1701	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1702	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1703
1704	  if ((bytepos == 0 && bytelen == slen0)
1705	      || (bytepos != 0 && bytepos + bytelen <= slen))
1706	    {
1707	      /* The following assumes that the concatenated objects all
1708		 have the same size.  In this case, a simple calculation
1709		 can be used to determine the object and the bit field
1710		 to be extracted.  */
1711	      tmps[i] = XEXP (src, bytepos / slen0);
1712	      if (! CONSTANT_P (tmps[i])
1713		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1714		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1715					     (bytepos % slen0) * BITS_PER_UNIT,
1716					     1, NULL_RTX, mode, mode);
1717	    }
1718	  else
1719	    {
1720	      rtx mem;
1721
1722	      gcc_assert (!bytepos);
1723	      mem = assign_stack_temp (GET_MODE (src), slen, 0);
1724	      emit_move_insn (mem, src);
1725	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1726					   0, 1, NULL_RTX, mode, mode);
1727	    }
1728	}
1729      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1730	 SIMD register, which is currently broken.  While we get GCC
1731	 to emit proper RTL for these cases, let's dump to memory.  */
1732      else if (VECTOR_MODE_P (GET_MODE (dst))
1733	       && REG_P (src))
1734	{
1735	  int slen = GET_MODE_SIZE (GET_MODE (src));
1736	  rtx mem;
1737
1738	  mem = assign_stack_temp (GET_MODE (src), slen, 0);
1739	  emit_move_insn (mem, src);
1740	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1741	}
1742      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1743               && XVECLEN (dst, 0) > 1)
1744        tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1745      else if (CONSTANT_P (src)
1746	       || (REG_P (src) && GET_MODE (src) == mode))
1747	tmps[i] = src;
1748      else
1749	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1750				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1751				     mode, mode);
1752
1753      if (shift)
1754	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1755				build_int_cst (NULL_TREE, shift), tmps[i], 0);
1756    }
1757}
1758
1759/* Emit code to move a block SRC of type TYPE to a block DST,
1760   where DST is non-consecutive registers represented by a PARALLEL.
1761   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1762   if not known.  */
1763
1764void
1765emit_group_load (rtx dst, rtx src, tree type, int ssize)
1766{
1767  rtx *tmps;
1768  int i;
1769
1770  tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1771  emit_group_load_1 (tmps, dst, src, type, ssize);
1772
1773  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1774  for (i = 0; i < XVECLEN (dst, 0); i++)
1775    {
1776      rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1777      if (d == NULL)
1778	continue;
1779      emit_move_insn (d, tmps[i]);
1780    }
1781}
1782
1783/* Similar, but load SRC into new pseudos in a format that looks like
1784   PARALLEL.  This can later be fed to emit_group_move to get things
1785   in the right place.  */
1786
1787rtx
1788emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1789{
1790  rtvec vec;
1791  int i;
1792
1793  vec = rtvec_alloc (XVECLEN (parallel, 0));
1794  emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1795
1796  /* Convert the vector to look just like the original PARALLEL, except
1797     with the computed values.  */
1798  for (i = 0; i < XVECLEN (parallel, 0); i++)
1799    {
1800      rtx e = XVECEXP (parallel, 0, i);
1801      rtx d = XEXP (e, 0);
1802
1803      if (d)
1804	{
1805	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1806	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1807	}
1808      RTVEC_ELT (vec, i) = e;
1809    }
1810
1811  return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1812}
1813
1814/* Emit code to move a block SRC to block DST, where SRC and DST are
1815   non-consecutive groups of registers, each represented by a PARALLEL.  */
1816
1817void
1818emit_group_move (rtx dst, rtx src)
1819{
1820  int i;
1821
1822  gcc_assert (GET_CODE (src) == PARALLEL
1823	      && GET_CODE (dst) == PARALLEL
1824	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1825
1826  /* Skip first entry if NULL.  */
1827  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1828    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1829		    XEXP (XVECEXP (src, 0, i), 0));
1830}
1831
1832/* Move a group of registers represented by a PARALLEL into pseudos.  */
1833
1834rtx
1835emit_group_move_into_temps (rtx src)
1836{
1837  rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1838  int i;
1839
1840  for (i = 0; i < XVECLEN (src, 0); i++)
1841    {
1842      rtx e = XVECEXP (src, 0, i);
1843      rtx d = XEXP (e, 0);
1844
1845      if (d)
1846	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1847      RTVEC_ELT (vec, i) = e;
1848    }
1849
1850  return gen_rtx_PARALLEL (GET_MODE (src), vec);
1851}
1852
1853/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1854   where SRC is non-consecutive registers represented by a PARALLEL.
1855   SSIZE represents the total size of block ORIG_DST, or -1 if not
1856   known.  */
1857
1858void
1859emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1860{
1861  rtx *tmps, dst;
1862  int start, finish, i;
1863  enum machine_mode m = GET_MODE (orig_dst);
1864
1865  gcc_assert (GET_CODE (src) == PARALLEL);
1866
1867  if (!SCALAR_INT_MODE_P (m)
1868      && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1869    {
1870      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1871      if (imode == BLKmode)
1872        dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1873      else
1874        dst = gen_reg_rtx (imode);
1875      emit_group_store (dst, src, type, ssize);
1876      if (imode != BLKmode)
1877        dst = gen_lowpart (GET_MODE (orig_dst), dst);
1878      emit_move_insn (orig_dst, dst);
1879      return;
1880    }
1881
1882  /* Check for a NULL entry, used to indicate that the parameter goes
1883     both on the stack and in registers.  */
1884  if (XEXP (XVECEXP (src, 0, 0), 0))
1885    start = 0;
1886  else
1887    start = 1;
1888  finish = XVECLEN (src, 0);
1889
1890  tmps = alloca (sizeof (rtx) * finish);
1891
1892  /* Copy the (probable) hard regs into pseudos.  */
1893  for (i = start; i < finish; i++)
1894    {
1895      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1896      if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1897	{
1898	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1899	  emit_move_insn (tmps[i], reg);
1900	}
1901      else
1902	tmps[i] = reg;
1903    }
1904
1905  /* If we won't be storing directly into memory, protect the real destination
1906     from strange tricks we might play.  */
1907  dst = orig_dst;
1908  if (GET_CODE (dst) == PARALLEL)
1909    {
1910      rtx temp;
1911
1912      /* We can get a PARALLEL dst if there is a conditional expression in
1913	 a return statement.  In that case, the dst and src are the same,
1914	 so no action is necessary.  */
1915      if (rtx_equal_p (dst, src))
1916	return;
1917
1918      /* It is unclear if we can ever reach here, but we may as well handle
1919	 it.  Allocate a temporary, and split this into a store/load to/from
1920	 the temporary.  */
1921
1922      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1923      emit_group_store (temp, src, type, ssize);
1924      emit_group_load (dst, temp, type, ssize);
1925      return;
1926    }
1927  else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1928    {
1929      enum machine_mode outer = GET_MODE (dst);
1930      enum machine_mode inner;
1931      HOST_WIDE_INT bytepos;
1932      bool done = false;
1933      rtx temp;
1934
1935      if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1936	dst = gen_reg_rtx (outer);
1937
1938      /* Make life a bit easier for combine.  */
1939      /* If the first element of the vector is the low part
1940	 of the destination mode, use a paradoxical subreg to
1941	 initialize the destination.  */
1942      if (start < finish)
1943	{
1944	  inner = GET_MODE (tmps[start]);
1945	  bytepos = subreg_lowpart_offset (inner, outer);
1946	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1947	    {
1948	      temp = simplify_gen_subreg (outer, tmps[start],
1949					  inner, 0);
1950	      if (temp)
1951		{
1952		  emit_move_insn (dst, temp);
1953		  done = true;
1954		  start++;
1955		}
1956	    }
1957	}
1958
1959      /* If the first element wasn't the low part, try the last.  */
1960      if (!done
1961	  && start < finish - 1)
1962	{
1963	  inner = GET_MODE (tmps[finish - 1]);
1964	  bytepos = subreg_lowpart_offset (inner, outer);
1965	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1966	    {
1967	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
1968					  inner, 0);
1969	      if (temp)
1970		{
1971		  emit_move_insn (dst, temp);
1972		  done = true;
1973		  finish--;
1974		}
1975	    }
1976	}
1977
1978      /* Otherwise, simply initialize the result to zero.  */
1979      if (!done)
1980        emit_move_insn (dst, CONST0_RTX (outer));
1981    }
1982
1983  /* Process the pieces.  */
1984  for (i = start; i < finish; i++)
1985    {
1986      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1987      enum machine_mode mode = GET_MODE (tmps[i]);
1988      unsigned int bytelen = GET_MODE_SIZE (mode);
1989      rtx dest = dst;
1990
1991      /* Handle trailing fragments that run over the size of the struct.  */
1992      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1993	{
1994	  /* store_bit_field always takes its value from the lsb.
1995	     Move the fragment to the lsb if it's not already there.  */
1996	  if (
1997#ifdef BLOCK_REG_PADDING
1998	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1999	      == (BYTES_BIG_ENDIAN ? upward : downward)
2000#else
2001	      BYTES_BIG_ENDIAN
2002#endif
2003	      )
2004	    {
2005	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2006	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2007				      build_int_cst (NULL_TREE, shift),
2008				      tmps[i], 0);
2009	    }
2010	  bytelen = ssize - bytepos;
2011	}
2012
2013      if (GET_CODE (dst) == CONCAT)
2014	{
2015	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016	    dest = XEXP (dst, 0);
2017	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2018	    {
2019	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2020	      dest = XEXP (dst, 1);
2021	    }
2022	  else
2023	    {
2024	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2025	      dest = assign_stack_temp (GET_MODE (dest),
2026				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2027	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2028			      tmps[i]);
2029	      dst = dest;
2030	      break;
2031	    }
2032	}
2033
2034      /* Optimize the access just a bit.  */
2035      if (MEM_P (dest)
2036	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2037	      || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2038	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2039	  && bytelen == GET_MODE_SIZE (mode))
2040	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2041      else
2042	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2043			 mode, tmps[i]);
2044    }
2045
2046  /* Copy from the pseudo into the (probable) hard reg.  */
2047  if (orig_dst != dst)
2048    emit_move_insn (orig_dst, dst);
2049}
2050
2051/* Generate code to copy a BLKmode object of TYPE out of a
2052   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2053   is null, a stack temporary is created.  TGTBLK is returned.
2054
2055   The purpose of this routine is to handle functions that return
2056   BLKmode structures in registers.  Some machines (the PA for example)
2057   want to return all small structures in registers regardless of the
2058   structure's alignment.  */
2059
2060rtx
2061copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2062{
2063  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2064  rtx src = NULL, dst = NULL;
2065  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2066  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2067
2068  if (tgtblk == 0)
2069    {
2070      tgtblk = assign_temp (build_qualified_type (type,
2071						  (TYPE_QUALS (type)
2072						   | TYPE_QUAL_CONST)),
2073			    0, 1, 1);
2074      preserve_temp_slots (tgtblk);
2075    }
2076
2077  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2078     into a new pseudo which is a full word.  */
2079
2080  if (GET_MODE (srcreg) != BLKmode
2081      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2082    srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2083
2084  /* If the structure doesn't take up a whole number of words, see whether
2085     SRCREG is padded on the left or on the right.  If it's on the left,
2086     set PADDING_CORRECTION to the number of bits to skip.
2087
2088     In most ABIs, the structure will be returned at the least end of
2089     the register, which translates to right padding on little-endian
2090     targets and left padding on big-endian targets.  The opposite
2091     holds if the structure is returned at the most significant
2092     end of the register.  */
2093  if (bytes % UNITS_PER_WORD != 0
2094      && (targetm.calls.return_in_msb (type)
2095	  ? !BYTES_BIG_ENDIAN
2096	  : BYTES_BIG_ENDIAN))
2097    padding_correction
2098      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2099
2100  /* Copy the structure BITSIZE bites at a time.
2101
2102     We could probably emit more efficient code for machines which do not use
2103     strict alignment, but it doesn't seem worth the effort at the current
2104     time.  */
2105  for (bitpos = 0, xbitpos = padding_correction;
2106       bitpos < bytes * BITS_PER_UNIT;
2107       bitpos += bitsize, xbitpos += bitsize)
2108    {
2109      /* We need a new source operand each time xbitpos is on a
2110	 word boundary and when xbitpos == padding_correction
2111	 (the first time through).  */
2112      if (xbitpos % BITS_PER_WORD == 0
2113	  || xbitpos == padding_correction)
2114	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2115				     GET_MODE (srcreg));
2116
2117      /* We need a new destination operand each time bitpos is on
2118	 a word boundary.  */
2119      if (bitpos % BITS_PER_WORD == 0)
2120	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2121
2122      /* Use xbitpos for the source extraction (right justified) and
2123	 xbitpos for the destination store (left justified).  */
2124      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2125		       extract_bit_field (src, bitsize,
2126					  xbitpos % BITS_PER_WORD, 1,
2127					  NULL_RTX, word_mode, word_mode));
2128    }
2129
2130  return tgtblk;
2131}
2132
2133/* Add a USE expression for REG to the (possibly empty) list pointed
2134   to by CALL_FUSAGE.  REG must denote a hard register.  */
2135
2136void
2137use_reg (rtx *call_fusage, rtx reg)
2138{
2139  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2140
2141  *call_fusage
2142    = gen_rtx_EXPR_LIST (VOIDmode,
2143			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2144}
2145
2146/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2147   starting at REGNO.  All of these registers must be hard registers.  */
2148
2149void
2150use_regs (rtx *call_fusage, int regno, int nregs)
2151{
2152  int i;
2153
2154  gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2155
2156  for (i = 0; i < nregs; i++)
2157    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2158}
2159
2160/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2161   PARALLEL REGS.  This is for calls that pass values in multiple
2162   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2163
2164void
2165use_group_regs (rtx *call_fusage, rtx regs)
2166{
2167  int i;
2168
2169  for (i = 0; i < XVECLEN (regs, 0); i++)
2170    {
2171      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2172
2173      /* A NULL entry means the parameter goes both on the stack and in
2174	 registers.  This can also be a MEM for targets that pass values
2175	 partially on the stack and partially in registers.  */
2176      if (reg != 0 && REG_P (reg))
2177	use_reg (call_fusage, reg);
2178    }
2179}
2180
2181
2182/* Determine whether the LEN bytes generated by CONSTFUN can be
2183   stored to memory using several move instructions.  CONSTFUNDATA is
2184   a pointer which will be passed as argument in every CONSTFUN call.
2185   ALIGN is maximum alignment we can assume.  Return nonzero if a
2186   call to store_by_pieces should succeed.  */
2187
2188int
2189can_store_by_pieces (unsigned HOST_WIDE_INT len,
2190		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2191		     void *constfundata, unsigned int align)
2192{
2193  unsigned HOST_WIDE_INT l;
2194  unsigned int max_size;
2195  HOST_WIDE_INT offset = 0;
2196  enum machine_mode mode, tmode;
2197  enum insn_code icode;
2198  int reverse;
2199  rtx cst;
2200
2201  if (len == 0)
2202    return 1;
2203
2204  if (! STORE_BY_PIECES_P (len, align))
2205    return 0;
2206
2207  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2208  if (align >= GET_MODE_ALIGNMENT (tmode))
2209    align = GET_MODE_ALIGNMENT (tmode);
2210  else
2211    {
2212      enum machine_mode xmode;
2213
2214      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2215	   tmode != VOIDmode;
2216	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2217	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2218	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2219	  break;
2220
2221      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2222    }
2223
2224  /* We would first store what we can in the largest integer mode, then go to
2225     successively smaller modes.  */
2226
2227  for (reverse = 0;
2228       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2229       reverse++)
2230    {
2231      l = len;
2232      mode = VOIDmode;
2233      max_size = STORE_MAX_PIECES + 1;
2234      while (max_size > 1)
2235	{
2236	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2237	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2238	    if (GET_MODE_SIZE (tmode) < max_size)
2239	      mode = tmode;
2240
2241	  if (mode == VOIDmode)
2242	    break;
2243
2244	  icode = mov_optab->handlers[(int) mode].insn_code;
2245	  if (icode != CODE_FOR_nothing
2246	      && align >= GET_MODE_ALIGNMENT (mode))
2247	    {
2248	      unsigned int size = GET_MODE_SIZE (mode);
2249
2250	      while (l >= size)
2251		{
2252		  if (reverse)
2253		    offset -= size;
2254
2255		  cst = (*constfun) (constfundata, offset, mode);
2256		  if (!LEGITIMATE_CONSTANT_P (cst))
2257		    return 0;
2258
2259		  if (!reverse)
2260		    offset += size;
2261
2262		  l -= size;
2263		}
2264	    }
2265
2266	  max_size = GET_MODE_SIZE (mode);
2267	}
2268
2269      /* The code above should have handled everything.  */
2270      gcc_assert (!l);
2271    }
2272
2273  return 1;
2274}
2275
2276/* Generate several move instructions to store LEN bytes generated by
2277   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2278   pointer which will be passed as argument in every CONSTFUN call.
2279   ALIGN is maximum alignment we can assume.
2280   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2281   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2282   stpcpy.  */
2283
2284rtx
2285store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2286		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2287		 void *constfundata, unsigned int align, int endp)
2288{
2289  struct store_by_pieces data;
2290
2291  if (len == 0)
2292    {
2293      gcc_assert (endp != 2);
2294      return to;
2295    }
2296
2297  gcc_assert (STORE_BY_PIECES_P (len, align));
2298  data.constfun = constfun;
2299  data.constfundata = constfundata;
2300  data.len = len;
2301  data.to = to;
2302  store_by_pieces_1 (&data, align);
2303  if (endp)
2304    {
2305      rtx to1;
2306
2307      gcc_assert (!data.reverse);
2308      if (data.autinc_to)
2309	{
2310	  if (endp == 2)
2311	    {
2312	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2313		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2314	      else
2315		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2316								-1));
2317	    }
2318	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2319					   data.offset);
2320	}
2321      else
2322	{
2323	  if (endp == 2)
2324	    --data.offset;
2325	  to1 = adjust_address (data.to, QImode, data.offset);
2326	}
2327      return to1;
2328    }
2329  else
2330    return data.to;
2331}
2332
2333/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2334   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2335
2336static void
2337clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2338{
2339  struct store_by_pieces data;
2340
2341  if (len == 0)
2342    return;
2343
2344  data.constfun = clear_by_pieces_1;
2345  data.constfundata = NULL;
2346  data.len = len;
2347  data.to = to;
2348  store_by_pieces_1 (&data, align);
2349}
2350
2351/* Callback routine for clear_by_pieces.
2352   Return const0_rtx unconditionally.  */
2353
2354static rtx
2355clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2356		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2357		   enum machine_mode mode ATTRIBUTE_UNUSED)
2358{
2359  return const0_rtx;
2360}
2361
2362/* Subroutine of clear_by_pieces and store_by_pieces.
2363   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2364   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2365
2366static void
2367store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2368		   unsigned int align ATTRIBUTE_UNUSED)
2369{
2370  rtx to_addr = XEXP (data->to, 0);
2371  unsigned int max_size = STORE_MAX_PIECES + 1;
2372  enum machine_mode mode = VOIDmode, tmode;
2373  enum insn_code icode;
2374
2375  data->offset = 0;
2376  data->to_addr = to_addr;
2377  data->autinc_to
2378    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2379       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2380
2381  data->explicit_inc_to = 0;
2382  data->reverse
2383    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2384  if (data->reverse)
2385    data->offset = data->len;
2386
2387  /* If storing requires more than two move insns,
2388     copy addresses to registers (to make displacements shorter)
2389     and use post-increment if available.  */
2390  if (!data->autinc_to
2391      && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2392    {
2393      /* Determine the main mode we'll be using.  */
2394      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2395	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2396	if (GET_MODE_SIZE (tmode) < max_size)
2397	  mode = tmode;
2398
2399      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2400	{
2401	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2402	  data->autinc_to = 1;
2403	  data->explicit_inc_to = -1;
2404	}
2405
2406      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2407	  && ! data->autinc_to)
2408	{
2409	  data->to_addr = copy_addr_to_reg (to_addr);
2410	  data->autinc_to = 1;
2411	  data->explicit_inc_to = 1;
2412	}
2413
2414      if ( !data->autinc_to && CONSTANT_P (to_addr))
2415	data->to_addr = copy_addr_to_reg (to_addr);
2416    }
2417
2418  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2419  if (align >= GET_MODE_ALIGNMENT (tmode))
2420    align = GET_MODE_ALIGNMENT (tmode);
2421  else
2422    {
2423      enum machine_mode xmode;
2424
2425      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2426	   tmode != VOIDmode;
2427	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2428	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2429	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2430	  break;
2431
2432      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2433    }
2434
2435  /* First store what we can in the largest integer mode, then go to
2436     successively smaller modes.  */
2437
2438  while (max_size > 1)
2439    {
2440      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2441	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2442	if (GET_MODE_SIZE (tmode) < max_size)
2443	  mode = tmode;
2444
2445      if (mode == VOIDmode)
2446	break;
2447
2448      icode = mov_optab->handlers[(int) mode].insn_code;
2449      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2450	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2451
2452      max_size = GET_MODE_SIZE (mode);
2453    }
2454
2455  /* The code above should have handled everything.  */
2456  gcc_assert (!data->len);
2457}
2458
2459/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2460   with move instructions for mode MODE.  GENFUN is the gen_... function
2461   to make a move insn for that mode.  DATA has all the other info.  */
2462
2463static void
2464store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2465		   struct store_by_pieces *data)
2466{
2467  unsigned int size = GET_MODE_SIZE (mode);
2468  rtx to1, cst;
2469
2470  while (data->len >= size)
2471    {
2472      if (data->reverse)
2473	data->offset -= size;
2474
2475      if (data->autinc_to)
2476	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2477					 data->offset);
2478      else
2479	to1 = adjust_address (data->to, mode, data->offset);
2480
2481      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2482	emit_insn (gen_add2_insn (data->to_addr,
2483				  GEN_INT (-(HOST_WIDE_INT) size)));
2484
2485      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2486      emit_insn ((*genfun) (to1, cst));
2487
2488      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2489	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2490
2491      if (! data->reverse)
2492	data->offset += size;
2493
2494      data->len -= size;
2495    }
2496}
2497
2498/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2499   its length in bytes.  */
2500
2501rtx
2502clear_storage (rtx object, rtx size, enum block_op_methods method)
2503{
2504  enum machine_mode mode = GET_MODE (object);
2505  unsigned int align;
2506
2507  gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2508
2509  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2510     just move a zero.  Otherwise, do this a piece at a time.  */
2511  if (mode != BLKmode
2512      && GET_CODE (size) == CONST_INT
2513      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2514    {
2515      rtx zero = CONST0_RTX (mode);
2516      if (zero != NULL)
2517	{
2518	  emit_move_insn (object, zero);
2519	  return NULL;
2520	}
2521
2522      if (COMPLEX_MODE_P (mode))
2523	{
2524	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2525	  if (zero != NULL)
2526	    {
2527	      write_complex_part (object, zero, 0);
2528	      write_complex_part (object, zero, 1);
2529	      return NULL;
2530	    }
2531	}
2532    }
2533
2534  if (size == const0_rtx)
2535    return NULL;
2536
2537  align = MEM_ALIGN (object);
2538
2539  if (GET_CODE (size) == CONST_INT
2540      && CLEAR_BY_PIECES_P (INTVAL (size), align))
2541    clear_by_pieces (object, INTVAL (size), align);
2542  else if (set_storage_via_setmem (object, size, const0_rtx, align))
2543    ;
2544  else
2545    return clear_storage_via_libcall (object, size,
2546				      method == BLOCK_OP_TAILCALL);
2547
2548  return NULL;
2549}
2550
2551/* A subroutine of clear_storage.  Expand a call to memset.
2552   Return the return value of memset, 0 otherwise.  */
2553
2554static rtx
2555clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2556{
2557  tree call_expr, arg_list, fn, object_tree, size_tree;
2558  enum machine_mode size_mode;
2559  rtx retval;
2560
2561  /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2562     place those into new pseudos into a VAR_DECL and use them later.  */
2563
2564  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2565
2566  size_mode = TYPE_MODE (sizetype);
2567  size = convert_to_mode (size_mode, size, 1);
2568  size = copy_to_mode_reg (size_mode, size);
2569
2570  /* It is incorrect to use the libcall calling conventions to call
2571     memset in this context.  This could be a user call to memset and
2572     the user may wish to examine the return value from memset.  For
2573     targets where libcalls and normal calls have different conventions
2574     for returning pointers, we could end up generating incorrect code.  */
2575
2576  object_tree = make_tree (ptr_type_node, object);
2577  size_tree = make_tree (sizetype, size);
2578
2579  fn = clear_storage_libcall_fn (true);
2580  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2581  arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2582  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2583
2584  /* Now we have to build up the CALL_EXPR itself.  */
2585  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2586  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2587		      call_expr, arg_list, NULL_TREE);
2588  CALL_EXPR_TAILCALL (call_expr) = tailcall;
2589
2590  retval = expand_normal (call_expr);
2591
2592  return retval;
2593}
2594
2595/* A subroutine of clear_storage_via_libcall.  Create the tree node
2596   for the function we use for block clears.  The first time FOR_CALL
2597   is true, we call assemble_external.  */
2598
2599static GTY(()) tree block_clear_fn;
2600
2601void
2602init_block_clear_fn (const char *asmspec)
2603{
2604  if (!block_clear_fn)
2605    {
2606      tree fn, args;
2607
2608      fn = get_identifier ("memset");
2609      args = build_function_type_list (ptr_type_node, ptr_type_node,
2610				       integer_type_node, sizetype,
2611				       NULL_TREE);
2612
2613      fn = build_decl (FUNCTION_DECL, fn, args);
2614      DECL_EXTERNAL (fn) = 1;
2615      TREE_PUBLIC (fn) = 1;
2616      DECL_ARTIFICIAL (fn) = 1;
2617      TREE_NOTHROW (fn) = 1;
2618      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2619      DECL_VISIBILITY_SPECIFIED (fn) = 1;
2620
2621      block_clear_fn = fn;
2622    }
2623
2624  if (asmspec)
2625    set_user_assembler_name (block_clear_fn, asmspec);
2626}
2627
2628static tree
2629clear_storage_libcall_fn (int for_call)
2630{
2631  static bool emitted_extern;
2632
2633  if (!block_clear_fn)
2634    init_block_clear_fn (NULL);
2635
2636  if (for_call && !emitted_extern)
2637    {
2638      emitted_extern = true;
2639      make_decl_rtl (block_clear_fn);
2640      assemble_external (block_clear_fn);
2641    }
2642
2643  return block_clear_fn;
2644}
2645
2646/* Expand a setmem pattern; return true if successful.  */
2647
2648bool
2649set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2650{
2651  /* Try the most limited insn first, because there's no point
2652     including more than one in the machine description unless
2653     the more limited one has some advantage.  */
2654
2655  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2656  enum machine_mode mode;
2657
2658  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2659       mode = GET_MODE_WIDER_MODE (mode))
2660    {
2661      enum insn_code code = setmem_optab[(int) mode];
2662      insn_operand_predicate_fn pred;
2663
2664      if (code != CODE_FOR_nothing
2665	  /* We don't need MODE to be narrower than
2666	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2667	     the mode mask, as it is returned by the macro, it will
2668	     definitely be less than the actual mode mask.  */
2669	  && ((GET_CODE (size) == CONST_INT
2670	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2671		   <= (GET_MODE_MASK (mode) >> 1)))
2672	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2673	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2674	      || (*pred) (object, BLKmode))
2675	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2676	      || (*pred) (opalign, VOIDmode)))
2677	{
2678	  rtx opsize, opchar;
2679	  enum machine_mode char_mode;
2680	  rtx last = get_last_insn ();
2681	  rtx pat;
2682
2683	  opsize = convert_to_mode (mode, size, 1);
2684	  pred = insn_data[(int) code].operand[1].predicate;
2685	  if (pred != 0 && ! (*pred) (opsize, mode))
2686	    opsize = copy_to_mode_reg (mode, opsize);
2687
2688	  opchar = val;
2689	  char_mode = insn_data[(int) code].operand[2].mode;
2690	  if (char_mode != VOIDmode)
2691	    {
2692	      opchar = convert_to_mode (char_mode, opchar, 1);
2693	      pred = insn_data[(int) code].operand[2].predicate;
2694	      if (pred != 0 && ! (*pred) (opchar, char_mode))
2695		opchar = copy_to_mode_reg (char_mode, opchar);
2696	    }
2697
2698	  pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2699	  if (pat)
2700	    {
2701	      emit_insn (pat);
2702	      return true;
2703	    }
2704	  else
2705	    delete_insns_since (last);
2706	}
2707    }
2708
2709  return false;
2710}
2711
2712
2713/* Write to one of the components of the complex value CPLX.  Write VAL to
2714   the real part if IMAG_P is false, and the imaginary part if its true.  */
2715
2716static void
2717write_complex_part (rtx cplx, rtx val, bool imag_p)
2718{
2719  enum machine_mode cmode;
2720  enum machine_mode imode;
2721  unsigned ibitsize;
2722
2723  if (GET_CODE (cplx) == CONCAT)
2724    {
2725      emit_move_insn (XEXP (cplx, imag_p), val);
2726      return;
2727    }
2728
2729  cmode = GET_MODE (cplx);
2730  imode = GET_MODE_INNER (cmode);
2731  ibitsize = GET_MODE_BITSIZE (imode);
2732
2733  /* For MEMs simplify_gen_subreg may generate an invalid new address
2734     because, e.g., the original address is considered mode-dependent
2735     by the target, which restricts simplify_subreg from invoking
2736     adjust_address_nv.  Instead of preparing fallback support for an
2737     invalid address, we call adjust_address_nv directly.  */
2738  if (MEM_P (cplx))
2739    {
2740      emit_move_insn (adjust_address_nv (cplx, imode,
2741					 imag_p ? GET_MODE_SIZE (imode) : 0),
2742		      val);
2743      return;
2744    }
2745
2746  /* If the sub-object is at least word sized, then we know that subregging
2747     will work.  This special case is important, since store_bit_field
2748     wants to operate on integer modes, and there's rarely an OImode to
2749     correspond to TCmode.  */
2750  if (ibitsize >= BITS_PER_WORD
2751      /* For hard regs we have exact predicates.  Assume we can split
2752	 the original object if it spans an even number of hard regs.
2753	 This special case is important for SCmode on 64-bit platforms
2754	 where the natural size of floating-point regs is 32-bit.  */
2755      || (REG_P (cplx)
2756	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2757	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2758    {
2759      rtx part = simplify_gen_subreg (imode, cplx, cmode,
2760				      imag_p ? GET_MODE_SIZE (imode) : 0);
2761      if (part)
2762        {
2763	  emit_move_insn (part, val);
2764	  return;
2765	}
2766      else
2767	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2768	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2769    }
2770
2771  store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2772}
2773
2774/* Extract one of the components of the complex value CPLX.  Extract the
2775   real part if IMAG_P is false, and the imaginary part if it's true.  */
2776
2777static rtx
2778read_complex_part (rtx cplx, bool imag_p)
2779{
2780  enum machine_mode cmode, imode;
2781  unsigned ibitsize;
2782
2783  if (GET_CODE (cplx) == CONCAT)
2784    return XEXP (cplx, imag_p);
2785
2786  cmode = GET_MODE (cplx);
2787  imode = GET_MODE_INNER (cmode);
2788  ibitsize = GET_MODE_BITSIZE (imode);
2789
2790  /* Special case reads from complex constants that got spilled to memory.  */
2791  if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2792    {
2793      tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2794      if (decl && TREE_CODE (decl) == COMPLEX_CST)
2795	{
2796	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2797	  if (CONSTANT_CLASS_P (part))
2798	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2799	}
2800    }
2801
2802  /* For MEMs simplify_gen_subreg may generate an invalid new address
2803     because, e.g., the original address is considered mode-dependent
2804     by the target, which restricts simplify_subreg from invoking
2805     adjust_address_nv.  Instead of preparing fallback support for an
2806     invalid address, we call adjust_address_nv directly.  */
2807  if (MEM_P (cplx))
2808    return adjust_address_nv (cplx, imode,
2809			      imag_p ? GET_MODE_SIZE (imode) : 0);
2810
2811  /* If the sub-object is at least word sized, then we know that subregging
2812     will work.  This special case is important, since extract_bit_field
2813     wants to operate on integer modes, and there's rarely an OImode to
2814     correspond to TCmode.  */
2815  if (ibitsize >= BITS_PER_WORD
2816      /* For hard regs we have exact predicates.  Assume we can split
2817	 the original object if it spans an even number of hard regs.
2818	 This special case is important for SCmode on 64-bit platforms
2819	 where the natural size of floating-point regs is 32-bit.  */
2820      || (REG_P (cplx)
2821	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2822	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2823    {
2824      rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2825				     imag_p ? GET_MODE_SIZE (imode) : 0);
2826      if (ret)
2827        return ret;
2828      else
2829	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2830	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2831    }
2832
2833  return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2834			    true, NULL_RTX, imode, imode);
2835}
2836
2837/* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2838   NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2839   represented in NEW_MODE.  If FORCE is true, this will never happen, as
2840   we'll force-create a SUBREG if needed.  */
2841
2842static rtx
2843emit_move_change_mode (enum machine_mode new_mode,
2844		       enum machine_mode old_mode, rtx x, bool force)
2845{
2846  rtx ret;
2847
2848  if (MEM_P (x))
2849    {
2850      /* We don't have to worry about changing the address since the
2851	 size in bytes is supposed to be the same.  */
2852      if (reload_in_progress)
2853	{
2854	  /* Copy the MEM to change the mode and move any
2855	     substitutions from the old MEM to the new one.  */
2856	  ret = adjust_address_nv (x, new_mode, 0);
2857	  copy_replacements (x, ret);
2858	}
2859      else
2860	ret = adjust_address (x, new_mode, 0);
2861    }
2862  else
2863    {
2864      /* Note that we do want simplify_subreg's behavior of validating
2865	 that the new mode is ok for a hard register.  If we were to use
2866	 simplify_gen_subreg, we would create the subreg, but would
2867	 probably run into the target not being able to implement it.  */
2868      /* Except, of course, when FORCE is true, when this is exactly what
2869	 we want.  Which is needed for CCmodes on some targets.  */
2870      if (force)
2871	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2872      else
2873	ret = simplify_subreg (new_mode, x, old_mode, 0);
2874    }
2875
2876  return ret;
2877}
2878
2879/* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
2880   an integer mode of the same size as MODE.  Returns the instruction
2881   emitted, or NULL if such a move could not be generated.  */
2882
2883static rtx
2884emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2885{
2886  enum machine_mode imode;
2887  enum insn_code code;
2888
2889  /* There must exist a mode of the exact size we require.  */
2890  imode = int_mode_for_mode (mode);
2891  if (imode == BLKmode)
2892    return NULL_RTX;
2893
2894  /* The target must support moves in this mode.  */
2895  code = mov_optab->handlers[imode].insn_code;
2896  if (code == CODE_FOR_nothing)
2897    return NULL_RTX;
2898
2899  x = emit_move_change_mode (imode, mode, x, force);
2900  if (x == NULL_RTX)
2901    return NULL_RTX;
2902  y = emit_move_change_mode (imode, mode, y, force);
2903  if (y == NULL_RTX)
2904    return NULL_RTX;
2905  return emit_insn (GEN_FCN (code) (x, y));
2906}
2907
2908/* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
2909   Return an equivalent MEM that does not use an auto-increment.  */
2910
2911static rtx
2912emit_move_resolve_push (enum machine_mode mode, rtx x)
2913{
2914  enum rtx_code code = GET_CODE (XEXP (x, 0));
2915  HOST_WIDE_INT adjust;
2916  rtx temp;
2917
2918  adjust = GET_MODE_SIZE (mode);
2919#ifdef PUSH_ROUNDING
2920  adjust = PUSH_ROUNDING (adjust);
2921#endif
2922  if (code == PRE_DEC || code == POST_DEC)
2923    adjust = -adjust;
2924  else if (code == PRE_MODIFY || code == POST_MODIFY)
2925    {
2926      rtx expr = XEXP (XEXP (x, 0), 1);
2927      HOST_WIDE_INT val;
2928
2929      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2930      gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2931      val = INTVAL (XEXP (expr, 1));
2932      if (GET_CODE (expr) == MINUS)
2933	val = -val;
2934      gcc_assert (adjust == val || adjust == -val);
2935      adjust = val;
2936    }
2937
2938  /* Do not use anti_adjust_stack, since we don't want to update
2939     stack_pointer_delta.  */
2940  temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2941			      GEN_INT (adjust), stack_pointer_rtx,
2942			      0, OPTAB_LIB_WIDEN);
2943  if (temp != stack_pointer_rtx)
2944    emit_move_insn (stack_pointer_rtx, temp);
2945
2946  switch (code)
2947    {
2948    case PRE_INC:
2949    case PRE_DEC:
2950    case PRE_MODIFY:
2951      temp = stack_pointer_rtx;
2952      break;
2953    case POST_INC:
2954    case POST_DEC:
2955    case POST_MODIFY:
2956      temp = plus_constant (stack_pointer_rtx, -adjust);
2957      break;
2958    default:
2959      gcc_unreachable ();
2960    }
2961
2962  return replace_equiv_address (x, temp);
2963}
2964
2965/* A subroutine of emit_move_complex.  Generate a move from Y into X.
2966   X is known to satisfy push_operand, and MODE is known to be complex.
2967   Returns the last instruction emitted.  */
2968
2969static rtx
2970emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2971{
2972  enum machine_mode submode = GET_MODE_INNER (mode);
2973  bool imag_first;
2974
2975#ifdef PUSH_ROUNDING
2976  unsigned int submodesize = GET_MODE_SIZE (submode);
2977
2978  /* In case we output to the stack, but the size is smaller than the
2979     machine can push exactly, we need to use move instructions.  */
2980  if (PUSH_ROUNDING (submodesize) != submodesize)
2981    {
2982      x = emit_move_resolve_push (mode, x);
2983      return emit_move_insn (x, y);
2984    }
2985#endif
2986
2987  /* Note that the real part always precedes the imag part in memory
2988     regardless of machine's endianness.  */
2989  switch (GET_CODE (XEXP (x, 0)))
2990    {
2991    case PRE_DEC:
2992    case POST_DEC:
2993      imag_first = true;
2994      break;
2995    case PRE_INC:
2996    case POST_INC:
2997      imag_first = false;
2998      break;
2999    default:
3000      gcc_unreachable ();
3001    }
3002
3003  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004		  read_complex_part (y, imag_first));
3005  return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3006			 read_complex_part (y, !imag_first));
3007}
3008
3009/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3010   MODE is known to be complex.  Returns the last instruction emitted.  */
3011
3012static rtx
3013emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3014{
3015  bool try_int;
3016
3017  /* Need to take special care for pushes, to maintain proper ordering
3018     of the data, and possibly extra padding.  */
3019  if (push_operand (x, mode))
3020    return emit_move_complex_push (mode, x, y);
3021
3022  /* See if we can coerce the target into moving both values at once.  */
3023
3024  /* Move floating point as parts.  */
3025  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3026      && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3027    try_int = false;
3028  /* Not possible if the values are inherently not adjacent.  */
3029  else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3030    try_int = false;
3031  /* Is possible if both are registers (or subregs of registers).  */
3032  else if (register_operand (x, mode) && register_operand (y, mode))
3033    try_int = true;
3034  /* If one of the operands is a memory, and alignment constraints
3035     are friendly enough, we may be able to do combined memory operations.
3036     We do not attempt this if Y is a constant because that combination is
3037     usually better with the by-parts thing below.  */
3038  else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3039	   && (!STRICT_ALIGNMENT
3040	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3041    try_int = true;
3042  else
3043    try_int = false;
3044
3045  if (try_int)
3046    {
3047      rtx ret;
3048
3049      /* For memory to memory moves, optimal behavior can be had with the
3050	 existing block move logic.  */
3051      if (MEM_P (x) && MEM_P (y))
3052	{
3053	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3054			   BLOCK_OP_NO_LIBCALL);
3055	  return get_last_insn ();
3056	}
3057
3058      ret = emit_move_via_integer (mode, x, y, true);
3059      if (ret)
3060	return ret;
3061    }
3062
3063  /* Show the output dies here.  This is necessary for SUBREGs
3064     of pseudos since we cannot track their lifetimes correctly;
3065     hard regs shouldn't appear here except as return values.  */
3066  if (!reload_completed && !reload_in_progress
3067      && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3068    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069
3070  write_complex_part (x, read_complex_part (y, false), false);
3071  write_complex_part (x, read_complex_part (y, true), true);
3072  return get_last_insn ();
3073}
3074
3075/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3076   MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3077
3078static rtx
3079emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3080{
3081  rtx ret;
3082
3083  /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3084  if (mode != CCmode)
3085    {
3086      enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3087      if (code != CODE_FOR_nothing)
3088	{
3089	  x = emit_move_change_mode (CCmode, mode, x, true);
3090	  y = emit_move_change_mode (CCmode, mode, y, true);
3091	  return emit_insn (GEN_FCN (code) (x, y));
3092	}
3093    }
3094
3095  /* Otherwise, find the MODE_INT mode of the same width.  */
3096  ret = emit_move_via_integer (mode, x, y, false);
3097  gcc_assert (ret != NULL);
3098  return ret;
3099}
3100
3101/* Return true if word I of OP lies entirely in the
3102   undefined bits of a paradoxical subreg.  */
3103
3104static bool
3105undefined_operand_subword_p (rtx op, int i)
3106{
3107  enum machine_mode innermode, innermostmode;
3108  int offset;
3109  if (GET_CODE (op) != SUBREG)
3110    return false;
3111  innermode = GET_MODE (op);
3112  innermostmode = GET_MODE (SUBREG_REG (op));
3113  offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3114  /* The SUBREG_BYTE represents offset, as if the value were stored in
3115     memory, except for a paradoxical subreg where we define
3116     SUBREG_BYTE to be 0; undo this exception as in
3117     simplify_subreg.  */
3118  if (SUBREG_BYTE (op) == 0
3119      && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3120    {
3121      int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3122      if (WORDS_BIG_ENDIAN)
3123	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3124      if (BYTES_BIG_ENDIAN)
3125	offset += difference % UNITS_PER_WORD;
3126    }
3127  if (offset >= GET_MODE_SIZE (innermostmode)
3128      || offset <= -GET_MODE_SIZE (word_mode))
3129    return true;
3130  return false;
3131}
3132
3133/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3134   MODE is any multi-word or full-word mode that lacks a move_insn
3135   pattern.  Note that you will get better code if you define such
3136   patterns, even if they must turn into multiple assembler instructions.  */
3137
3138static rtx
3139emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3140{
3141  rtx last_insn = 0;
3142  rtx seq, inner;
3143  bool need_clobber;
3144  int i;
3145
3146  gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3147
3148  /* If X is a push on the stack, do the push now and replace
3149     X with a reference to the stack pointer.  */
3150  if (push_operand (x, mode))
3151    x = emit_move_resolve_push (mode, x);
3152
3153  /* If we are in reload, see if either operand is a MEM whose address
3154     is scheduled for replacement.  */
3155  if (reload_in_progress && MEM_P (x)
3156      && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3157    x = replace_equiv_address_nv (x, inner);
3158  if (reload_in_progress && MEM_P (y)
3159      && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3160    y = replace_equiv_address_nv (y, inner);
3161
3162  start_sequence ();
3163
3164  need_clobber = false;
3165  for (i = 0;
3166       i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3167       i++)
3168    {
3169      rtx xpart = operand_subword (x, i, 1, mode);
3170      rtx ypart;
3171
3172      /* Do not generate code for a move if it would come entirely
3173	 from the undefined bits of a paradoxical subreg.  */
3174      if (undefined_operand_subword_p (y, i))
3175	continue;
3176
3177      ypart = operand_subword (y, i, 1, mode);
3178
3179      /* If we can't get a part of Y, put Y into memory if it is a
3180	 constant.  Otherwise, force it into a register.  Then we must
3181	 be able to get a part of Y.  */
3182      if (ypart == 0 && CONSTANT_P (y))
3183	{
3184	  y = use_anchored_address (force_const_mem (mode, y));
3185	  ypart = operand_subword (y, i, 1, mode);
3186	}
3187      else if (ypart == 0)
3188	ypart = operand_subword_force (y, i, mode);
3189
3190      gcc_assert (xpart && ypart);
3191
3192      need_clobber |= (GET_CODE (xpart) == SUBREG);
3193
3194      last_insn = emit_move_insn (xpart, ypart);
3195    }
3196
3197  seq = get_insns ();
3198  end_sequence ();
3199
3200  /* Show the output dies here.  This is necessary for SUBREGs
3201     of pseudos since we cannot track their lifetimes correctly;
3202     hard regs shouldn't appear here except as return values.
3203     We never want to emit such a clobber after reload.  */
3204  if (x != y
3205      && ! (reload_in_progress || reload_completed)
3206      && need_clobber != 0)
3207    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3208
3209  emit_insn (seq);
3210
3211  return last_insn;
3212}
3213
3214/* Low level part of emit_move_insn.
3215   Called just like emit_move_insn, but assumes X and Y
3216   are basically valid.  */
3217
3218rtx
3219emit_move_insn_1 (rtx x, rtx y)
3220{
3221  enum machine_mode mode = GET_MODE (x);
3222  enum insn_code code;
3223
3224  gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3225
3226  code = mov_optab->handlers[mode].insn_code;
3227  if (code != CODE_FOR_nothing)
3228    return emit_insn (GEN_FCN (code) (x, y));
3229
3230  /* Expand complex moves by moving real part and imag part.  */
3231  if (COMPLEX_MODE_P (mode))
3232    return emit_move_complex (mode, x, y);
3233
3234  if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3235    {
3236      rtx result = emit_move_via_integer (mode, x, y, true);
3237
3238      /* If we can't find an integer mode, use multi words.  */
3239      if (result)
3240	return result;
3241      else
3242	return emit_move_multi_word (mode, x, y);
3243    }
3244
3245  if (GET_MODE_CLASS (mode) == MODE_CC)
3246    return emit_move_ccmode (mode, x, y);
3247
3248  /* Try using a move pattern for the corresponding integer mode.  This is
3249     only safe when simplify_subreg can convert MODE constants into integer
3250     constants.  At present, it can only do this reliably if the value
3251     fits within a HOST_WIDE_INT.  */
3252  if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3253    {
3254      rtx ret = emit_move_via_integer (mode, x, y, false);
3255      if (ret)
3256	return ret;
3257    }
3258
3259  return emit_move_multi_word (mode, x, y);
3260}
3261
3262/* Generate code to copy Y into X.
3263   Both Y and X must have the same mode, except that
3264   Y can be a constant with VOIDmode.
3265   This mode cannot be BLKmode; use emit_block_move for that.
3266
3267   Return the last instruction emitted.  */
3268
3269rtx
3270emit_move_insn (rtx x, rtx y)
3271{
3272  enum machine_mode mode = GET_MODE (x);
3273  rtx y_cst = NULL_RTX;
3274  rtx last_insn, set;
3275
3276  gcc_assert (mode != BLKmode
3277	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3278
3279  if (CONSTANT_P (y))
3280    {
3281      if (optimize
3282	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3283	  && (last_insn = compress_float_constant (x, y)))
3284	return last_insn;
3285
3286      y_cst = y;
3287
3288      if (!LEGITIMATE_CONSTANT_P (y))
3289	{
3290	  y = force_const_mem (mode, y);
3291
3292	  /* If the target's cannot_force_const_mem prevented the spill,
3293	     assume that the target's move expanders will also take care
3294	     of the non-legitimate constant.  */
3295	  if (!y)
3296	    y = y_cst;
3297	  else
3298	    y = use_anchored_address (y);
3299	}
3300    }
3301
3302  /* If X or Y are memory references, verify that their addresses are valid
3303     for the machine.  */
3304  if (MEM_P (x)
3305      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3306	   && ! push_operand (x, GET_MODE (x)))
3307	  || (flag_force_addr
3308	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3309    x = validize_mem (x);
3310
3311  if (MEM_P (y)
3312      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3313	  || (flag_force_addr
3314	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3315    y = validize_mem (y);
3316
3317  gcc_assert (mode != BLKmode);
3318
3319  last_insn = emit_move_insn_1 (x, y);
3320
3321  if (y_cst && REG_P (x)
3322      && (set = single_set (last_insn)) != NULL_RTX
3323      && SET_DEST (set) == x
3324      && ! rtx_equal_p (y_cst, SET_SRC (set)))
3325    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3326
3327  return last_insn;
3328}
3329
3330/* If Y is representable exactly in a narrower mode, and the target can
3331   perform the extension directly from constant or memory, then emit the
3332   move as an extension.  */
3333
3334static rtx
3335compress_float_constant (rtx x, rtx y)
3336{
3337  enum machine_mode dstmode = GET_MODE (x);
3338  enum machine_mode orig_srcmode = GET_MODE (y);
3339  enum machine_mode srcmode;
3340  REAL_VALUE_TYPE r;
3341  int oldcost, newcost;
3342
3343  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3344
3345  if (LEGITIMATE_CONSTANT_P (y))
3346    oldcost = rtx_cost (y, SET);
3347  else
3348    oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3349
3350  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3351       srcmode != orig_srcmode;
3352       srcmode = GET_MODE_WIDER_MODE (srcmode))
3353    {
3354      enum insn_code ic;
3355      rtx trunc_y, last_insn;
3356
3357      /* Skip if the target can't extend this way.  */
3358      ic = can_extend_p (dstmode, srcmode, 0);
3359      if (ic == CODE_FOR_nothing)
3360	continue;
3361
3362      /* Skip if the narrowed value isn't exact.  */
3363      if (! exact_real_truncate (srcmode, &r))
3364	continue;
3365
3366      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3367
3368      if (LEGITIMATE_CONSTANT_P (trunc_y))
3369	{
3370	  /* Skip if the target needs extra instructions to perform
3371	     the extension.  */
3372	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3373	    continue;
3374	  /* This is valid, but may not be cheaper than the original. */
3375	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3376	  if (oldcost < newcost)
3377	    continue;
3378	}
3379      else if (float_extend_from_mem[dstmode][srcmode])
3380	{
3381	  trunc_y = force_const_mem (srcmode, trunc_y);
3382	  /* This is valid, but may not be cheaper than the original. */
3383	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3384	  if (oldcost < newcost)
3385	    continue;
3386	  trunc_y = validize_mem (trunc_y);
3387	}
3388      else
3389	continue;
3390
3391      /* For CSE's benefit, force the compressed constant pool entry
3392	 into a new pseudo.  This constant may be used in different modes,
3393	 and if not, combine will put things back together for us.  */
3394      trunc_y = force_reg (srcmode, trunc_y);
3395      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3396      last_insn = get_last_insn ();
3397
3398      if (REG_P (x))
3399	set_unique_reg_note (last_insn, REG_EQUAL, y);
3400
3401      return last_insn;
3402    }
3403
3404  return NULL_RTX;
3405}
3406
3407/* Pushing data onto the stack.  */
3408
3409/* Push a block of length SIZE (perhaps variable)
3410   and return an rtx to address the beginning of the block.
3411   The value may be virtual_outgoing_args_rtx.
3412
3413   EXTRA is the number of bytes of padding to push in addition to SIZE.
3414   BELOW nonzero means this padding comes at low addresses;
3415   otherwise, the padding comes at high addresses.  */
3416
3417rtx
3418push_block (rtx size, int extra, int below)
3419{
3420  rtx temp;
3421
3422  size = convert_modes (Pmode, ptr_mode, size, 1);
3423  if (CONSTANT_P (size))
3424    anti_adjust_stack (plus_constant (size, extra));
3425  else if (REG_P (size) && extra == 0)
3426    anti_adjust_stack (size);
3427  else
3428    {
3429      temp = copy_to_mode_reg (Pmode, size);
3430      if (extra != 0)
3431	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3432			     temp, 0, OPTAB_LIB_WIDEN);
3433      anti_adjust_stack (temp);
3434    }
3435
3436#ifndef STACK_GROWS_DOWNWARD
3437  if (0)
3438#else
3439  if (1)
3440#endif
3441    {
3442      temp = virtual_outgoing_args_rtx;
3443      if (extra != 0 && below)
3444	temp = plus_constant (temp, extra);
3445    }
3446  else
3447    {
3448      if (GET_CODE (size) == CONST_INT)
3449	temp = plus_constant (virtual_outgoing_args_rtx,
3450			      -INTVAL (size) - (below ? 0 : extra));
3451      else if (extra != 0 && !below)
3452	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3453			     negate_rtx (Pmode, plus_constant (size, extra)));
3454      else
3455	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3456			     negate_rtx (Pmode, size));
3457    }
3458
3459  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3460}
3461
3462#ifdef PUSH_ROUNDING
3463
3464/* Emit single push insn.  */
3465
3466static void
3467emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3468{
3469  rtx dest_addr;
3470  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3471  rtx dest;
3472  enum insn_code icode;
3473  insn_operand_predicate_fn pred;
3474
3475  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3476  /* If there is push pattern, use it.  Otherwise try old way of throwing
3477     MEM representing push operation to move expander.  */
3478  icode = push_optab->handlers[(int) mode].insn_code;
3479  if (icode != CODE_FOR_nothing)
3480    {
3481      if (((pred = insn_data[(int) icode].operand[0].predicate)
3482	   && !((*pred) (x, mode))))
3483	x = force_reg (mode, x);
3484      emit_insn (GEN_FCN (icode) (x));
3485      return;
3486    }
3487  if (GET_MODE_SIZE (mode) == rounded_size)
3488    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3489  /* If we are to pad downward, adjust the stack pointer first and
3490     then store X into the stack location using an offset.  This is
3491     because emit_move_insn does not know how to pad; it does not have
3492     access to type.  */
3493  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3494    {
3495      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3496      HOST_WIDE_INT offset;
3497
3498      emit_move_insn (stack_pointer_rtx,
3499		      expand_binop (Pmode,
3500#ifdef STACK_GROWS_DOWNWARD
3501				    sub_optab,
3502#else
3503				    add_optab,
3504#endif
3505				    stack_pointer_rtx,
3506				    GEN_INT (rounded_size),
3507				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3508
3509      offset = (HOST_WIDE_INT) padding_size;
3510#ifdef STACK_GROWS_DOWNWARD
3511      if (STACK_PUSH_CODE == POST_DEC)
3512	/* We have already decremented the stack pointer, so get the
3513	   previous value.  */
3514	offset += (HOST_WIDE_INT) rounded_size;
3515#else
3516      if (STACK_PUSH_CODE == POST_INC)
3517	/* We have already incremented the stack pointer, so get the
3518	   previous value.  */
3519	offset -= (HOST_WIDE_INT) rounded_size;
3520#endif
3521      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3522    }
3523  else
3524    {
3525#ifdef STACK_GROWS_DOWNWARD
3526      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3527      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3528				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3529#else
3530      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3531      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3532				GEN_INT (rounded_size));
3533#endif
3534      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3535    }
3536
3537  dest = gen_rtx_MEM (mode, dest_addr);
3538
3539  if (type != 0)
3540    {
3541      set_mem_attributes (dest, type, 1);
3542
3543      if (flag_optimize_sibling_calls)
3544	/* Function incoming arguments may overlap with sibling call
3545	   outgoing arguments and we cannot allow reordering of reads
3546	   from function arguments with stores to outgoing arguments
3547	   of sibling calls.  */
3548	set_mem_alias_set (dest, 0);
3549    }
3550  emit_move_insn (dest, x);
3551}
3552#endif
3553
3554/* Generate code to push X onto the stack, assuming it has mode MODE and
3555   type TYPE.
3556   MODE is redundant except when X is a CONST_INT (since they don't
3557   carry mode info).
3558   SIZE is an rtx for the size of data to be copied (in bytes),
3559   needed only if X is BLKmode.
3560
3561   ALIGN (in bits) is maximum alignment we can assume.
3562
3563   If PARTIAL and REG are both nonzero, then copy that many of the first
3564   bytes of X into registers starting with REG, and push the rest of X.
3565   The amount of space pushed is decreased by PARTIAL bytes.
3566   REG must be a hard register in this case.
3567   If REG is zero but PARTIAL is not, take any all others actions for an
3568   argument partially in registers, but do not actually load any
3569   registers.
3570
3571   EXTRA is the amount in bytes of extra space to leave next to this arg.
3572   This is ignored if an argument block has already been allocated.
3573
3574   On a machine that lacks real push insns, ARGS_ADDR is the address of
3575   the bottom of the argument block for this call.  We use indexing off there
3576   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3577   argument block has not been preallocated.
3578
3579   ARGS_SO_FAR is the size of args previously pushed for this call.
3580
3581   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3582   for arguments passed in registers.  If nonzero, it will be the number
3583   of bytes required.  */
3584
3585void
3586emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3587		unsigned int align, int partial, rtx reg, int extra,
3588		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3589		rtx alignment_pad)
3590{
3591  rtx xinner;
3592  enum direction stack_direction
3593#ifdef STACK_GROWS_DOWNWARD
3594    = downward;
3595#else
3596    = upward;
3597#endif
3598
3599  /* Decide where to pad the argument: `downward' for below,
3600     `upward' for above, or `none' for don't pad it.
3601     Default is below for small data on big-endian machines; else above.  */
3602  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3603
3604  /* Invert direction if stack is post-decrement.
3605     FIXME: why?  */
3606  if (STACK_PUSH_CODE == POST_DEC)
3607    if (where_pad != none)
3608      where_pad = (where_pad == downward ? upward : downward);
3609
3610  xinner = x;
3611
3612  if (mode == BLKmode)
3613    {
3614      /* Copy a block into the stack, entirely or partially.  */
3615
3616      rtx temp;
3617      int used;
3618      int offset;
3619      int skip;
3620
3621      offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3622      used = partial - offset;
3623
3624      gcc_assert (size);
3625
3626      /* USED is now the # of bytes we need not copy to the stack
3627	 because registers will take care of them.  */
3628
3629      if (partial != 0)
3630	xinner = adjust_address (xinner, BLKmode, used);
3631
3632      /* If the partial register-part of the arg counts in its stack size,
3633	 skip the part of stack space corresponding to the registers.
3634	 Otherwise, start copying to the beginning of the stack space,
3635	 by setting SKIP to 0.  */
3636      skip = (reg_parm_stack_space == 0) ? 0 : used;
3637
3638#ifdef PUSH_ROUNDING
3639      /* Do it with several push insns if that doesn't take lots of insns
3640	 and if there is no difficulty with push insns that skip bytes
3641	 on the stack for alignment purposes.  */
3642      if (args_addr == 0
3643	  && PUSH_ARGS
3644	  && GET_CODE (size) == CONST_INT
3645	  && skip == 0
3646	  && MEM_ALIGN (xinner) >= align
3647	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3648	  /* Here we avoid the case of a structure whose weak alignment
3649	     forces many pushes of a small amount of data,
3650	     and such small pushes do rounding that causes trouble.  */
3651	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3652	      || align >= BIGGEST_ALIGNMENT
3653	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3654		  == (align / BITS_PER_UNIT)))
3655	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3656	{
3657	  /* Push padding now if padding above and stack grows down,
3658	     or if padding below and stack grows up.
3659	     But if space already allocated, this has already been done.  */
3660	  if (extra && args_addr == 0
3661	      && where_pad != none && where_pad != stack_direction)
3662	    anti_adjust_stack (GEN_INT (extra));
3663
3664	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3665	}
3666      else
3667#endif /* PUSH_ROUNDING  */
3668	{
3669	  rtx target;
3670
3671	  /* Otherwise make space on the stack and copy the data
3672	     to the address of that space.  */
3673
3674	  /* Deduct words put into registers from the size we must copy.  */
3675	  if (partial != 0)
3676	    {
3677	      if (GET_CODE (size) == CONST_INT)
3678		size = GEN_INT (INTVAL (size) - used);
3679	      else
3680		size = expand_binop (GET_MODE (size), sub_optab, size,
3681				     GEN_INT (used), NULL_RTX, 0,
3682				     OPTAB_LIB_WIDEN);
3683	    }
3684
3685	  /* Get the address of the stack space.
3686	     In this case, we do not deal with EXTRA separately.
3687	     A single stack adjust will do.  */
3688	  if (! args_addr)
3689	    {
3690	      temp = push_block (size, extra, where_pad == downward);
3691	      extra = 0;
3692	    }
3693	  else if (GET_CODE (args_so_far) == CONST_INT)
3694	    temp = memory_address (BLKmode,
3695				   plus_constant (args_addr,
3696						  skip + INTVAL (args_so_far)));
3697	  else
3698	    temp = memory_address (BLKmode,
3699				   plus_constant (gen_rtx_PLUS (Pmode,
3700								args_addr,
3701								args_so_far),
3702						  skip));
3703
3704	  if (!ACCUMULATE_OUTGOING_ARGS)
3705	    {
3706	      /* If the source is referenced relative to the stack pointer,
3707		 copy it to another register to stabilize it.  We do not need
3708		 to do this if we know that we won't be changing sp.  */
3709
3710	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3711		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3712		temp = copy_to_reg (temp);
3713	    }
3714
3715	  target = gen_rtx_MEM (BLKmode, temp);
3716
3717	  /* We do *not* set_mem_attributes here, because incoming arguments
3718	     may overlap with sibling call outgoing arguments and we cannot
3719	     allow reordering of reads from function arguments with stores
3720	     to outgoing arguments of sibling calls.  We do, however, want
3721	     to record the alignment of the stack slot.  */
3722	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3723	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3724	  set_mem_align (target, align);
3725
3726	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3727	}
3728    }
3729  else if (partial > 0)
3730    {
3731      /* Scalar partly in registers.  */
3732
3733      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3734      int i;
3735      int not_stack;
3736      /* # bytes of start of argument
3737	 that we must make space for but need not store.  */
3738      int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3739      int args_offset = INTVAL (args_so_far);
3740      int skip;
3741
3742      /* Push padding now if padding above and stack grows down,
3743	 or if padding below and stack grows up.
3744	 But if space already allocated, this has already been done.  */
3745      if (extra && args_addr == 0
3746	  && where_pad != none && where_pad != stack_direction)
3747	anti_adjust_stack (GEN_INT (extra));
3748
3749      /* If we make space by pushing it, we might as well push
3750	 the real data.  Otherwise, we can leave OFFSET nonzero
3751	 and leave the space uninitialized.  */
3752      if (args_addr == 0)
3753	offset = 0;
3754
3755      /* Now NOT_STACK gets the number of words that we don't need to
3756	 allocate on the stack.  Convert OFFSET to words too.  */
3757      not_stack = (partial - offset) / UNITS_PER_WORD;
3758      offset /= UNITS_PER_WORD;
3759
3760      /* If the partial register-part of the arg counts in its stack size,
3761	 skip the part of stack space corresponding to the registers.
3762	 Otherwise, start copying to the beginning of the stack space,
3763	 by setting SKIP to 0.  */
3764      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3765
3766      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3767	x = validize_mem (force_const_mem (mode, x));
3768
3769      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3770	 SUBREGs of such registers are not allowed.  */
3771      if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3772	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3773	x = copy_to_reg (x);
3774
3775      /* Loop over all the words allocated on the stack for this arg.  */
3776      /* We can do it by words, because any scalar bigger than a word
3777	 has a size a multiple of a word.  */
3778#ifndef PUSH_ARGS_REVERSED
3779      for (i = not_stack; i < size; i++)
3780#else
3781      for (i = size - 1; i >= not_stack; i--)
3782#endif
3783	if (i >= not_stack + offset)
3784	  emit_push_insn (operand_subword_force (x, i, mode),
3785			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3786			  0, args_addr,
3787			  GEN_INT (args_offset + ((i - not_stack + skip)
3788						  * UNITS_PER_WORD)),
3789			  reg_parm_stack_space, alignment_pad);
3790    }
3791  else
3792    {
3793      rtx addr;
3794      rtx dest;
3795
3796      /* Push padding now if padding above and stack grows down,
3797	 or if padding below and stack grows up.
3798	 But if space already allocated, this has already been done.  */
3799      if (extra && args_addr == 0
3800	  && where_pad != none && where_pad != stack_direction)
3801	anti_adjust_stack (GEN_INT (extra));
3802
3803#ifdef PUSH_ROUNDING
3804      if (args_addr == 0 && PUSH_ARGS)
3805	emit_single_push_insn (mode, x, type);
3806      else
3807#endif
3808	{
3809	  if (GET_CODE (args_so_far) == CONST_INT)
3810	    addr
3811	      = memory_address (mode,
3812				plus_constant (args_addr,
3813					       INTVAL (args_so_far)));
3814	  else
3815	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3816						       args_so_far));
3817	  dest = gen_rtx_MEM (mode, addr);
3818
3819	  /* We do *not* set_mem_attributes here, because incoming arguments
3820	     may overlap with sibling call outgoing arguments and we cannot
3821	     allow reordering of reads from function arguments with stores
3822	     to outgoing arguments of sibling calls.  We do, however, want
3823	     to record the alignment of the stack slot.  */
3824	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3825	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3826	  set_mem_align (dest, align);
3827
3828	  emit_move_insn (dest, x);
3829	}
3830    }
3831
3832  /* If part should go in registers, copy that part
3833     into the appropriate registers.  Do this now, at the end,
3834     since mem-to-mem copies above may do function calls.  */
3835  if (partial > 0 && reg != 0)
3836    {
3837      /* Handle calls that pass values in multiple non-contiguous locations.
3838	 The Irix 6 ABI has examples of this.  */
3839      if (GET_CODE (reg) == PARALLEL)
3840	emit_group_load (reg, x, type, -1);
3841      else
3842	{
3843	  gcc_assert (partial % UNITS_PER_WORD == 0);
3844	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3845	}
3846    }
3847
3848  if (extra && args_addr == 0 && where_pad == stack_direction)
3849    anti_adjust_stack (GEN_INT (extra));
3850
3851  if (alignment_pad && args_addr == 0)
3852    anti_adjust_stack (alignment_pad);
3853}
3854
3855/* Return X if X can be used as a subtarget in a sequence of arithmetic
3856   operations.  */
3857
3858static rtx
3859get_subtarget (rtx x)
3860{
3861  return (optimize
3862          || x == 0
3863	   /* Only registers can be subtargets.  */
3864	   || !REG_P (x)
3865	   /* Don't use hard regs to avoid extending their life.  */
3866	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3867	  ? 0 : x);
3868}
3869
3870/* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
3871   FIELD is a bitfield.  Returns true if the optimization was successful,
3872   and there's nothing else to do.  */
3873
3874static bool
3875optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3876				 unsigned HOST_WIDE_INT bitpos,
3877				 enum machine_mode mode1, rtx str_rtx,
3878				 tree to, tree src)
3879{
3880  enum machine_mode str_mode = GET_MODE (str_rtx);
3881  unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3882  tree op0, op1;
3883  rtx value, result;
3884  optab binop;
3885
3886  if (mode1 != VOIDmode
3887      || bitsize >= BITS_PER_WORD
3888      || str_bitsize > BITS_PER_WORD
3889      || TREE_SIDE_EFFECTS (to)
3890      || TREE_THIS_VOLATILE (to))
3891    return false;
3892
3893  STRIP_NOPS (src);
3894  if (!BINARY_CLASS_P (src)
3895      || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3896    return false;
3897
3898  op0 = TREE_OPERAND (src, 0);
3899  op1 = TREE_OPERAND (src, 1);
3900  STRIP_NOPS (op0);
3901
3902  if (!operand_equal_p (to, op0, 0))
3903    return false;
3904
3905  if (MEM_P (str_rtx))
3906    {
3907      unsigned HOST_WIDE_INT offset1;
3908
3909      if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3910	str_mode = word_mode;
3911      str_mode = get_best_mode (bitsize, bitpos,
3912				MEM_ALIGN (str_rtx), str_mode, 0);
3913      if (str_mode == VOIDmode)
3914	return false;
3915      str_bitsize = GET_MODE_BITSIZE (str_mode);
3916
3917      offset1 = bitpos;
3918      bitpos %= str_bitsize;
3919      offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3920      str_rtx = adjust_address (str_rtx, str_mode, offset1);
3921    }
3922  else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3923    return false;
3924
3925  /* If the bit field covers the whole REG/MEM, store_field
3926     will likely generate better code.  */
3927  if (bitsize >= str_bitsize)
3928    return false;
3929
3930  /* We can't handle fields split across multiple entities.  */
3931  if (bitpos + bitsize > str_bitsize)
3932    return false;
3933
3934  if (BYTES_BIG_ENDIAN)
3935    bitpos = str_bitsize - bitpos - bitsize;
3936
3937  switch (TREE_CODE (src))
3938    {
3939    case PLUS_EXPR:
3940    case MINUS_EXPR:
3941      /* For now, just optimize the case of the topmost bitfield
3942	 where we don't need to do any masking and also
3943	 1 bit bitfields where xor can be used.
3944	 We might win by one instruction for the other bitfields
3945	 too if insv/extv instructions aren't used, so that
3946	 can be added later.  */
3947      if (bitpos + bitsize != str_bitsize
3948	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3949	break;
3950
3951      value = expand_expr (op1, NULL_RTX, str_mode, 0);
3952      value = convert_modes (str_mode,
3953			     TYPE_MODE (TREE_TYPE (op1)), value,
3954			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3955
3956      /* We may be accessing data outside the field, which means
3957	 we can alias adjacent data.  */
3958      if (MEM_P (str_rtx))
3959	{
3960	  str_rtx = shallow_copy_rtx (str_rtx);
3961	  set_mem_alias_set (str_rtx, 0);
3962	  set_mem_expr (str_rtx, 0);
3963	}
3964
3965      binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3966      if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3967	{
3968	  value = expand_and (str_mode, value, const1_rtx, NULL);
3969	  binop = xor_optab;
3970	}
3971      value = expand_shift (LSHIFT_EXPR, str_mode, value,
3972			    build_int_cst (NULL_TREE, bitpos),
3973			    NULL_RTX, 1);
3974      result = expand_binop (str_mode, binop, str_rtx,
3975			     value, str_rtx, 1, OPTAB_WIDEN);
3976      if (result != str_rtx)
3977	emit_move_insn (str_rtx, result);
3978      return true;
3979
3980    case BIT_IOR_EXPR:
3981    case BIT_XOR_EXPR:
3982      if (TREE_CODE (op1) != INTEGER_CST)
3983	break;
3984      value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3985      value = convert_modes (GET_MODE (str_rtx),
3986			     TYPE_MODE (TREE_TYPE (op1)), value,
3987			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3988
3989      /* We may be accessing data outside the field, which means
3990	 we can alias adjacent data.  */
3991      if (MEM_P (str_rtx))
3992	{
3993	  str_rtx = shallow_copy_rtx (str_rtx);
3994	  set_mem_alias_set (str_rtx, 0);
3995	  set_mem_expr (str_rtx, 0);
3996	}
3997
3998      binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3999      if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4000	{
4001	  rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4002			      - 1);
4003	  value = expand_and (GET_MODE (str_rtx), value, mask,
4004			      NULL_RTX);
4005	}
4006      value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4007			    build_int_cst (NULL_TREE, bitpos),
4008			    NULL_RTX, 1);
4009      result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4010			     value, str_rtx, 1, OPTAB_WIDEN);
4011      if (result != str_rtx)
4012	emit_move_insn (str_rtx, result);
4013      return true;
4014
4015    default:
4016      break;
4017    }
4018
4019  return false;
4020}
4021
4022
4023/* Expand an assignment that stores the value of FROM into TO.  */
4024
4025void
4026expand_assignment (tree to, tree from)
4027{
4028  rtx to_rtx = 0;
4029  rtx result;
4030
4031  /* Don't crash if the lhs of the assignment was erroneous.  */
4032  if (TREE_CODE (to) == ERROR_MARK)
4033    {
4034      result = expand_normal (from);
4035      return;
4036    }
4037
4038  /* Optimize away no-op moves without side-effects.  */
4039  if (operand_equal_p (to, from, 0))
4040    return;
4041
4042  /* Assignment of a structure component needs special treatment
4043     if the structure component's rtx is not simply a MEM.
4044     Assignment of an array element at a constant index, and assignment of
4045     an array element in an unaligned packed structure field, has the same
4046     problem.  */
4047  if (handled_component_p (to)
4048      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4049    {
4050      enum machine_mode mode1;
4051      HOST_WIDE_INT bitsize, bitpos;
4052      tree offset;
4053      int unsignedp;
4054      int volatilep = 0;
4055      tree tem;
4056
4057      push_temp_slots ();
4058      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4059				 &unsignedp, &volatilep, true);
4060
4061      /* If we are going to use store_bit_field and extract_bit_field,
4062	 make sure to_rtx will be safe for multiple use.  */
4063
4064      to_rtx = expand_normal (tem);
4065
4066      if (offset != 0)
4067	{
4068	  rtx offset_rtx;
4069
4070	  if (!MEM_P (to_rtx))
4071	    {
4072	      /* We can get constant negative offsets into arrays with broken
4073		 user code.  Translate this to a trap instead of ICEing.  */
4074	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4075	      expand_builtin_trap ();
4076	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4077	    }
4078
4079	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4080#ifdef POINTERS_EXTEND_UNSIGNED
4081	  if (GET_MODE (offset_rtx) != Pmode)
4082	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4083#else
4084	  if (GET_MODE (offset_rtx) != ptr_mode)
4085	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4086#endif
4087
4088	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4089	     to call force_reg for that case.  Avoid that case.  */
4090	  if (MEM_P (to_rtx)
4091	      && GET_MODE (to_rtx) == BLKmode
4092	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4093	      && bitsize > 0
4094	      && (bitpos % bitsize) == 0
4095	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4096	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4097	    {
4098	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4099	      bitpos = 0;
4100	    }
4101
4102	  to_rtx = offset_address (to_rtx, offset_rtx,
4103				   highest_pow2_factor_for_target (to,
4104				   				   offset));
4105	}
4106
4107      /* Handle expand_expr of a complex value returning a CONCAT.  */
4108      if (GET_CODE (to_rtx) == CONCAT)
4109	{
4110	  if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4111	    {
4112	      gcc_assert (bitpos == 0);
4113	      result = store_expr (from, to_rtx, false);
4114	    }
4115	  else
4116	    {
4117	      gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4118	      result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4119	    }
4120	}
4121      else
4122	{
4123	  if (MEM_P (to_rtx))
4124	    {
4125	      /* If the field is at offset zero, we could have been given the
4126		 DECL_RTX of the parent struct.  Don't munge it.  */
4127	      to_rtx = shallow_copy_rtx (to_rtx);
4128
4129	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4130
4131	      /* Deal with volatile and readonly fields.  The former is only
4132		 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4133	      if (volatilep)
4134		MEM_VOLATILE_P (to_rtx) = 1;
4135	      if (component_uses_parent_alias_set (to))
4136		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4137	    }
4138
4139	  if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4140					       to_rtx, to, from))
4141	    result = NULL;
4142	  else
4143	    result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4144				  TREE_TYPE (tem), get_alias_set (to));
4145	}
4146
4147      if (result)
4148	preserve_temp_slots (result);
4149      free_temp_slots ();
4150      pop_temp_slots ();
4151      return;
4152    }
4153
4154  /* If the rhs is a function call and its value is not an aggregate,
4155     call the function before we start to compute the lhs.
4156     This is needed for correct code for cases such as
4157     val = setjmp (buf) on machines where reference to val
4158     requires loading up part of an address in a separate insn.
4159
4160     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4161     since it might be a promoted variable where the zero- or sign- extension
4162     needs to be done.  Handling this in the normal way is safe because no
4163     computation is done before the call.  */
4164  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4165      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4166      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4167	    && REG_P (DECL_RTL (to))))
4168    {
4169      rtx value;
4170
4171      push_temp_slots ();
4172      value = expand_normal (from);
4173      if (to_rtx == 0)
4174	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4175
4176      /* Handle calls that return values in multiple non-contiguous locations.
4177	 The Irix 6 ABI has examples of this.  */
4178      if (GET_CODE (to_rtx) == PARALLEL)
4179	emit_group_load (to_rtx, value, TREE_TYPE (from),
4180			 int_size_in_bytes (TREE_TYPE (from)));
4181      else if (GET_MODE (to_rtx) == BLKmode)
4182	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4183      else
4184	{
4185	  if (POINTER_TYPE_P (TREE_TYPE (to)))
4186	    value = convert_memory_address (GET_MODE (to_rtx), value);
4187	  emit_move_insn (to_rtx, value);
4188	}
4189      preserve_temp_slots (to_rtx);
4190      free_temp_slots ();
4191      pop_temp_slots ();
4192      return;
4193    }
4194
4195  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4196     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4197
4198  if (to_rtx == 0)
4199    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4200
4201  /* Don't move directly into a return register.  */
4202  if (TREE_CODE (to) == RESULT_DECL
4203      && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4204    {
4205      rtx temp;
4206
4207      push_temp_slots ();
4208      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4209
4210      if (GET_CODE (to_rtx) == PARALLEL)
4211	emit_group_load (to_rtx, temp, TREE_TYPE (from),
4212			 int_size_in_bytes (TREE_TYPE (from)));
4213      else
4214	emit_move_insn (to_rtx, temp);
4215
4216      preserve_temp_slots (to_rtx);
4217      free_temp_slots ();
4218      pop_temp_slots ();
4219      return;
4220    }
4221
4222  /* In case we are returning the contents of an object which overlaps
4223     the place the value is being stored, use a safe function when copying
4224     a value through a pointer into a structure value return block.  */
4225  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4226      && current_function_returns_struct
4227      && !current_function_returns_pcc_struct)
4228    {
4229      rtx from_rtx, size;
4230
4231      push_temp_slots ();
4232      size = expr_size (from);
4233      from_rtx = expand_normal (from);
4234
4235      emit_library_call (memmove_libfunc, LCT_NORMAL,
4236			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4237			 XEXP (from_rtx, 0), Pmode,
4238			 convert_to_mode (TYPE_MODE (sizetype),
4239					  size, TYPE_UNSIGNED (sizetype)),
4240			 TYPE_MODE (sizetype));
4241
4242      preserve_temp_slots (to_rtx);
4243      free_temp_slots ();
4244      pop_temp_slots ();
4245      return;
4246    }
4247
4248  /* Compute FROM and store the value in the rtx we got.  */
4249
4250  push_temp_slots ();
4251  result = store_expr (from, to_rtx, 0);
4252  preserve_temp_slots (result);
4253  free_temp_slots ();
4254  pop_temp_slots ();
4255  return;
4256}
4257
4258/* Generate code for computing expression EXP,
4259   and storing the value into TARGET.
4260
4261   If the mode is BLKmode then we may return TARGET itself.
4262   It turns out that in BLKmode it doesn't cause a problem.
4263   because C has no operators that could combine two different
4264   assignments into the same BLKmode object with different values
4265   with no sequence point.  Will other languages need this to
4266   be more thorough?
4267
4268   If CALL_PARAM_P is nonzero, this is a store into a call param on the
4269   stack, and block moves may need to be treated specially.  */
4270
4271rtx
4272store_expr (tree exp, rtx target, int call_param_p)
4273{
4274  rtx temp;
4275  rtx alt_rtl = NULL_RTX;
4276  int dont_return_target = 0;
4277
4278  if (VOID_TYPE_P (TREE_TYPE (exp)))
4279    {
4280      /* C++ can generate ?: expressions with a throw expression in one
4281	 branch and an rvalue in the other. Here, we resolve attempts to
4282	 store the throw expression's nonexistent result.  */
4283      gcc_assert (!call_param_p);
4284      expand_expr (exp, const0_rtx, VOIDmode, 0);
4285      return NULL_RTX;
4286    }
4287  if (TREE_CODE (exp) == COMPOUND_EXPR)
4288    {
4289      /* Perform first part of compound expression, then assign from second
4290	 part.  */
4291      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4292		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4293      return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4294    }
4295  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4296    {
4297      /* For conditional expression, get safe form of the target.  Then
4298	 test the condition, doing the appropriate assignment on either
4299	 side.  This avoids the creation of unnecessary temporaries.
4300	 For non-BLKmode, it is more efficient not to do this.  */
4301
4302      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4303
4304      do_pending_stack_adjust ();
4305      NO_DEFER_POP;
4306      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4307      store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4308      emit_jump_insn (gen_jump (lab2));
4309      emit_barrier ();
4310      emit_label (lab1);
4311      store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4312      emit_label (lab2);
4313      OK_DEFER_POP;
4314
4315      return NULL_RTX;
4316    }
4317  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4318    /* If this is a scalar in a register that is stored in a wider mode
4319       than the declared mode, compute the result into its declared mode
4320       and then convert to the wider mode.  Our value is the computed
4321       expression.  */
4322    {
4323      rtx inner_target = 0;
4324
4325      /* We can do the conversion inside EXP, which will often result
4326	 in some optimizations.  Do the conversion in two steps: first
4327	 change the signedness, if needed, then the extend.  But don't
4328	 do this if the type of EXP is a subtype of something else
4329	 since then the conversion might involve more than just
4330	 converting modes.  */
4331      if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4332	  && TREE_TYPE (TREE_TYPE (exp)) == 0
4333	  && (!lang_hooks.reduce_bit_field_operations
4334	      || (GET_MODE_PRECISION (GET_MODE (target))
4335		  == TYPE_PRECISION (TREE_TYPE (exp)))))
4336	{
4337	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
4338	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4339	    exp = fold_convert
4340	      (lang_hooks.types.signed_or_unsigned_type
4341	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4342
4343	  exp = fold_convert (lang_hooks.types.type_for_mode
4344				(GET_MODE (SUBREG_REG (target)),
4345				 SUBREG_PROMOTED_UNSIGNED_P (target)),
4346			      exp);
4347
4348	  inner_target = SUBREG_REG (target);
4349	}
4350
4351      temp = expand_expr (exp, inner_target, VOIDmode,
4352			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4353
4354      /* If TEMP is a VOIDmode constant, use convert_modes to make
4355	 sure that we properly convert it.  */
4356      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4357	{
4358	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4359				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4360	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4361			        GET_MODE (target), temp,
4362			        SUBREG_PROMOTED_UNSIGNED_P (target));
4363	}
4364
4365      convert_move (SUBREG_REG (target), temp,
4366		    SUBREG_PROMOTED_UNSIGNED_P (target));
4367
4368      return NULL_RTX;
4369    }
4370  else
4371    {
4372      temp = expand_expr_real (exp, target, GET_MODE (target),
4373			       (call_param_p
4374				? EXPAND_STACK_PARM : EXPAND_NORMAL),
4375			       &alt_rtl);
4376      /* Return TARGET if it's a specified hardware register.
4377	 If TARGET is a volatile mem ref, either return TARGET
4378	 or return a reg copied *from* TARGET; ANSI requires this.
4379
4380	 Otherwise, if TEMP is not TARGET, return TEMP
4381	 if it is constant (for efficiency),
4382	 or if we really want the correct value.  */
4383      if (!(target && REG_P (target)
4384	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4385	  && !(MEM_P (target) && MEM_VOLATILE_P (target))
4386	  && ! rtx_equal_p (temp, target)
4387	  && CONSTANT_P (temp))
4388	dont_return_target = 1;
4389    }
4390
4391  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4392     the same as that of TARGET, adjust the constant.  This is needed, for
4393     example, in case it is a CONST_DOUBLE and we want only a word-sized
4394     value.  */
4395  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4396      && TREE_CODE (exp) != ERROR_MARK
4397      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4398    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4399			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4400
4401  /* If value was not generated in the target, store it there.
4402     Convert the value to TARGET's type first if necessary and emit the
4403     pending incrementations that have been queued when expanding EXP.
4404     Note that we cannot emit the whole queue blindly because this will
4405     effectively disable the POST_INC optimization later.
4406
4407     If TEMP and TARGET compare equal according to rtx_equal_p, but
4408     one or both of them are volatile memory refs, we have to distinguish
4409     two cases:
4410     - expand_expr has used TARGET.  In this case, we must not generate
4411       another copy.  This can be detected by TARGET being equal according
4412       to == .
4413     - expand_expr has not used TARGET - that means that the source just
4414       happens to have the same RTX form.  Since temp will have been created
4415       by expand_expr, it will compare unequal according to == .
4416       We must generate a copy in this case, to reach the correct number
4417       of volatile memory references.  */
4418
4419  if ((! rtx_equal_p (temp, target)
4420       || (temp != target && (side_effects_p (temp)
4421			      || side_effects_p (target))))
4422      && TREE_CODE (exp) != ERROR_MARK
4423      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4424	 but TARGET is not valid memory reference, TEMP will differ
4425	 from TARGET although it is really the same location.  */
4426      && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4427      /* If there's nothing to copy, don't bother.  Don't call
4428	 expr_size unless necessary, because some front-ends (C++)
4429	 expr_size-hook must not be given objects that are not
4430	 supposed to be bit-copied or bit-initialized.  */
4431      && expr_size (exp) != const0_rtx)
4432    {
4433      if (GET_MODE (temp) != GET_MODE (target)
4434	  && GET_MODE (temp) != VOIDmode)
4435	{
4436	  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4437	  if (dont_return_target)
4438	    {
4439	      /* In this case, we will return TEMP,
4440		 so make sure it has the proper mode.
4441		 But don't forget to store the value into TARGET.  */
4442	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4443	      emit_move_insn (target, temp);
4444	    }
4445	  else if (GET_MODE (target) == BLKmode)
4446	    emit_block_move (target, temp, expr_size (exp),
4447			     (call_param_p
4448			      ? BLOCK_OP_CALL_PARM
4449			      : BLOCK_OP_NORMAL));
4450	  else
4451	    convert_move (target, temp, unsignedp);
4452	}
4453
4454      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4455	{
4456	  /* Handle copying a string constant into an array.  The string
4457	     constant may be shorter than the array.  So copy just the string's
4458	     actual length, and clear the rest.  First get the size of the data
4459	     type of the string, which is actually the size of the target.  */
4460	  rtx size = expr_size (exp);
4461
4462	  if (GET_CODE (size) == CONST_INT
4463	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4464	    emit_block_move (target, temp, size,
4465			     (call_param_p
4466			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4467	  else
4468	    {
4469	      /* Compute the size of the data to copy from the string.  */
4470	      tree copy_size
4471		= size_binop (MIN_EXPR,
4472			      make_tree (sizetype, size),
4473			      size_int (TREE_STRING_LENGTH (exp)));
4474	      rtx copy_size_rtx
4475		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4476			       (call_param_p
4477				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4478	      rtx label = 0;
4479
4480	      /* Copy that much.  */
4481	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4482					       TYPE_UNSIGNED (sizetype));
4483	      emit_block_move (target, temp, copy_size_rtx,
4484			       (call_param_p
4485				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4486
4487	      /* Figure out how much is left in TARGET that we have to clear.
4488		 Do all calculations in ptr_mode.  */
4489	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4490		{
4491		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4492		  target = adjust_address (target, BLKmode,
4493					   INTVAL (copy_size_rtx));
4494		}
4495	      else
4496		{
4497		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4498				       copy_size_rtx, NULL_RTX, 0,
4499				       OPTAB_LIB_WIDEN);
4500
4501#ifdef POINTERS_EXTEND_UNSIGNED
4502		  if (GET_MODE (copy_size_rtx) != Pmode)
4503		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4504						     TYPE_UNSIGNED (sizetype));
4505#endif
4506
4507		  target = offset_address (target, copy_size_rtx,
4508					   highest_pow2_factor (copy_size));
4509		  label = gen_label_rtx ();
4510		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4511					   GET_MODE (size), 0, label);
4512		}
4513
4514	      if (size != const0_rtx)
4515		clear_storage (target, size, BLOCK_OP_NORMAL);
4516
4517	      if (label)
4518		emit_label (label);
4519	    }
4520	}
4521      /* Handle calls that return values in multiple non-contiguous locations.
4522	 The Irix 6 ABI has examples of this.  */
4523      else if (GET_CODE (target) == PARALLEL)
4524	emit_group_load (target, temp, TREE_TYPE (exp),
4525			 int_size_in_bytes (TREE_TYPE (exp)));
4526      else if (GET_MODE (temp) == BLKmode)
4527	emit_block_move (target, temp, expr_size (exp),
4528			 (call_param_p
4529			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4530      else
4531	{
4532	  temp = force_operand (temp, target);
4533	  if (temp != target)
4534	    emit_move_insn (target, temp);
4535	}
4536    }
4537
4538  return NULL_RTX;
4539}
4540
4541/* Helper for categorize_ctor_elements.  Identical interface.  */
4542
4543static bool
4544categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4545			    HOST_WIDE_INT *p_elt_count,
4546			    bool *p_must_clear)
4547{
4548  unsigned HOST_WIDE_INT idx;
4549  HOST_WIDE_INT nz_elts, elt_count;
4550  tree value, purpose;
4551
4552  /* Whether CTOR is a valid constant initializer, in accordance with what
4553     initializer_constant_valid_p does.  If inferred from the constructor
4554     elements, true until proven otherwise.  */
4555  bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4556  bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4557
4558  nz_elts = 0;
4559  elt_count = 0;
4560
4561  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4562    {
4563      HOST_WIDE_INT mult;
4564
4565      mult = 1;
4566      if (TREE_CODE (purpose) == RANGE_EXPR)
4567	{
4568	  tree lo_index = TREE_OPERAND (purpose, 0);
4569	  tree hi_index = TREE_OPERAND (purpose, 1);
4570
4571	  if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4572	    mult = (tree_low_cst (hi_index, 1)
4573		    - tree_low_cst (lo_index, 1) + 1);
4574	}
4575
4576      switch (TREE_CODE (value))
4577	{
4578	case CONSTRUCTOR:
4579	  {
4580	    HOST_WIDE_INT nz = 0, ic = 0;
4581
4582	    bool const_elt_p
4583	      = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4584
4585	    nz_elts += mult * nz;
4586 	    elt_count += mult * ic;
4587
4588	    if (const_from_elts_p && const_p)
4589	      const_p = const_elt_p;
4590	  }
4591	  break;
4592
4593	case INTEGER_CST:
4594	case REAL_CST:
4595	  if (!initializer_zerop (value))
4596	    nz_elts += mult;
4597	  elt_count += mult;
4598	  break;
4599
4600	case STRING_CST:
4601	  nz_elts += mult * TREE_STRING_LENGTH (value);
4602	  elt_count += mult * TREE_STRING_LENGTH (value);
4603	  break;
4604
4605	case COMPLEX_CST:
4606	  if (!initializer_zerop (TREE_REALPART (value)))
4607	    nz_elts += mult;
4608	  if (!initializer_zerop (TREE_IMAGPART (value)))
4609	    nz_elts += mult;
4610	  elt_count += mult;
4611	  break;
4612
4613	case VECTOR_CST:
4614	  {
4615	    tree v;
4616	    for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4617	      {
4618		if (!initializer_zerop (TREE_VALUE (v)))
4619		  nz_elts += mult;
4620		elt_count += mult;
4621	      }
4622	  }
4623	  break;
4624
4625	default:
4626	  nz_elts += mult;
4627	  elt_count += mult;
4628
4629	  if (const_from_elts_p && const_p)
4630	    const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4631		      != NULL_TREE;
4632	  break;
4633	}
4634    }
4635
4636  if (!*p_must_clear
4637      && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4638	  || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4639    {
4640      tree init_sub_type;
4641      bool clear_this = true;
4642
4643      if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4644	{
4645	  /* We don't expect more than one element of the union to be
4646	     initialized.  Not sure what we should do otherwise... */
4647          gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4648		      == 1);
4649
4650          init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4651						CONSTRUCTOR_ELTS (ctor),
4652						0)->value);
4653
4654	  /* ??? We could look at each element of the union, and find the
4655	     largest element.  Which would avoid comparing the size of the
4656	     initialized element against any tail padding in the union.
4657	     Doesn't seem worth the effort...  */
4658	  if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4659				TYPE_SIZE (init_sub_type)) == 1)
4660	    {
4661	      /* And now we have to find out if the element itself is fully
4662		 constructed.  E.g. for union { struct { int a, b; } s; } u
4663		 = { .s = { .a = 1 } }.  */
4664	      if (elt_count == count_type_elements (init_sub_type, false))
4665		clear_this = false;
4666	    }
4667	}
4668
4669      *p_must_clear = clear_this;
4670    }
4671
4672  *p_nz_elts += nz_elts;
4673  *p_elt_count += elt_count;
4674
4675  return const_p;
4676}
4677
4678/* Examine CTOR to discover:
4679   * how many scalar fields are set to nonzero values,
4680     and place it in *P_NZ_ELTS;
4681   * how many scalar fields in total are in CTOR,
4682     and place it in *P_ELT_COUNT.
4683   * if a type is a union, and the initializer from the constructor
4684     is not the largest element in the union, then set *p_must_clear.
4685
4686   Return whether or not CTOR is a valid static constant initializer, the same
4687   as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
4688
4689bool
4690categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4691			  HOST_WIDE_INT *p_elt_count,
4692			  bool *p_must_clear)
4693{
4694  *p_nz_elts = 0;
4695  *p_elt_count = 0;
4696  *p_must_clear = false;
4697
4698  return
4699    categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4700}
4701
4702/* Count the number of scalars in TYPE.  Return -1 on overflow or
4703   variable-sized.  If ALLOW_FLEXARR is true, don't count flexible
4704   array member at the end of the structure.  */
4705
4706HOST_WIDE_INT
4707count_type_elements (tree type, bool allow_flexarr)
4708{
4709  const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4710  switch (TREE_CODE (type))
4711    {
4712    case ARRAY_TYPE:
4713      {
4714	tree telts = array_type_nelts (type);
4715	if (telts && host_integerp (telts, 1))
4716	  {
4717	    HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4718	    HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4719	    if (n == 0)
4720	      return 0;
4721	    else if (max / n > m)
4722	      return n * m;
4723	  }
4724	return -1;
4725      }
4726
4727    case RECORD_TYPE:
4728      {
4729	HOST_WIDE_INT n = 0, t;
4730	tree f;
4731
4732	for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4733	  if (TREE_CODE (f) == FIELD_DECL)
4734	    {
4735	      t = count_type_elements (TREE_TYPE (f), false);
4736	      if (t < 0)
4737		{
4738		  /* Check for structures with flexible array member.  */
4739		  tree tf = TREE_TYPE (f);
4740		  if (allow_flexarr
4741		      && TREE_CHAIN (f) == NULL
4742		      && TREE_CODE (tf) == ARRAY_TYPE
4743		      && TYPE_DOMAIN (tf)
4744		      && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4745		      && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4746		      && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4747		      && int_size_in_bytes (type) >= 0)
4748		    break;
4749
4750		  return -1;
4751		}
4752	      n += t;
4753	    }
4754
4755	return n;
4756      }
4757
4758    case UNION_TYPE:
4759    case QUAL_UNION_TYPE:
4760      return -1;
4761
4762    case COMPLEX_TYPE:
4763      return 2;
4764
4765    case VECTOR_TYPE:
4766      return TYPE_VECTOR_SUBPARTS (type);
4767
4768    case INTEGER_TYPE:
4769    case REAL_TYPE:
4770    case ENUMERAL_TYPE:
4771    case BOOLEAN_TYPE:
4772    case POINTER_TYPE:
4773    /* APPLE LOCAL radar 5732232 - blocks */
4774    case BLOCK_POINTER_TYPE:
4775    case OFFSET_TYPE:
4776    case REFERENCE_TYPE:
4777      return 1;
4778
4779    case VOID_TYPE:
4780    case METHOD_TYPE:
4781    case FUNCTION_TYPE:
4782    case LANG_TYPE:
4783    default:
4784      gcc_unreachable ();
4785    }
4786}
4787
4788/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4789
4790static int
4791mostly_zeros_p (tree exp)
4792{
4793  if (TREE_CODE (exp) == CONSTRUCTOR)
4794
4795    {
4796      HOST_WIDE_INT nz_elts, count, elts;
4797      bool must_clear;
4798
4799      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4800      if (must_clear)
4801	return 1;
4802
4803      elts = count_type_elements (TREE_TYPE (exp), false);
4804
4805      return nz_elts < elts / 4;
4806    }
4807
4808  return initializer_zerop (exp);
4809}
4810
4811/* Return 1 if EXP contains all zeros.  */
4812
4813static int
4814all_zeros_p (tree exp)
4815{
4816  if (TREE_CODE (exp) == CONSTRUCTOR)
4817
4818    {
4819      HOST_WIDE_INT nz_elts, count;
4820      bool must_clear;
4821
4822      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4823      return nz_elts == 0;
4824    }
4825
4826  return initializer_zerop (exp);
4827}
4828
4829/* Helper function for store_constructor.
4830   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4831   TYPE is the type of the CONSTRUCTOR, not the element type.
4832   CLEARED is as for store_constructor.
4833   ALIAS_SET is the alias set to use for any stores.
4834
4835   This provides a recursive shortcut back to store_constructor when it isn't
4836   necessary to go through store_field.  This is so that we can pass through
4837   the cleared field to let store_constructor know that we may not have to
4838   clear a substructure if the outer structure has already been cleared.  */
4839
4840static void
4841store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4842			 HOST_WIDE_INT bitpos, enum machine_mode mode,
4843			 tree exp, tree type, int cleared, int alias_set)
4844{
4845  if (TREE_CODE (exp) == CONSTRUCTOR
4846      /* We can only call store_constructor recursively if the size and
4847	 bit position are on a byte boundary.  */
4848      && bitpos % BITS_PER_UNIT == 0
4849      && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4850      /* If we have a nonzero bitpos for a register target, then we just
4851	 let store_field do the bitfield handling.  This is unlikely to
4852	 generate unnecessary clear instructions anyways.  */
4853      && (bitpos == 0 || MEM_P (target)))
4854    {
4855      if (MEM_P (target))
4856	target
4857	  = adjust_address (target,
4858			    GET_MODE (target) == BLKmode
4859			    || 0 != (bitpos
4860				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4861			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4862
4863
4864      /* Update the alias set, if required.  */
4865      if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4866	  && MEM_ALIAS_SET (target) != 0)
4867	{
4868	  target = copy_rtx (target);
4869	  set_mem_alias_set (target, alias_set);
4870	}
4871
4872      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4873    }
4874  else
4875    store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4876}
4877
4878/* Store the value of constructor EXP into the rtx TARGET.
4879   TARGET is either a REG or a MEM; we know it cannot conflict, since
4880   safe_from_p has been called.
4881   CLEARED is true if TARGET is known to have been zero'd.
4882   SIZE is the number of bytes of TARGET we are allowed to modify: this
4883   may not be the same as the size of EXP if we are assigning to a field
4884   which has been packed to exclude padding bits.  */
4885
4886static void
4887store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4888{
4889  tree type = TREE_TYPE (exp);
4890#ifdef WORD_REGISTER_OPERATIONS
4891  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4892#endif
4893
4894  switch (TREE_CODE (type))
4895    {
4896    case RECORD_TYPE:
4897    case UNION_TYPE:
4898    case QUAL_UNION_TYPE:
4899      {
4900	unsigned HOST_WIDE_INT idx;
4901	tree field, value;
4902
4903	/* If size is zero or the target is already cleared, do nothing.  */
4904	if (size == 0 || cleared)
4905	  cleared = 1;
4906	/* We either clear the aggregate or indicate the value is dead.  */
4907	else if ((TREE_CODE (type) == UNION_TYPE
4908		  || TREE_CODE (type) == QUAL_UNION_TYPE)
4909		 && ! CONSTRUCTOR_ELTS (exp))
4910	  /* If the constructor is empty, clear the union.  */
4911	  {
4912	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4913	    cleared = 1;
4914	  }
4915
4916	/* If we are building a static constructor into a register,
4917	   set the initial value as zero so we can fold the value into
4918	   a constant.  But if more than one register is involved,
4919	   this probably loses.  */
4920	else if (REG_P (target) && TREE_STATIC (exp)
4921		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4922	  {
4923	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4924	    cleared = 1;
4925	  }
4926
4927        /* If the constructor has fewer fields than the structure or
4928	   if we are initializing the structure to mostly zeros, clear
4929	   the whole structure first.  Don't do this if TARGET is a
4930	   register whose mode size isn't equal to SIZE since
4931	   clear_storage can't handle this case.  */
4932	else if (size > 0
4933		 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4934		      != fields_length (type))
4935		     || mostly_zeros_p (exp))
4936		 && (!REG_P (target)
4937		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4938			 == size)))
4939	  {
4940	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4941	    cleared = 1;
4942	  }
4943
4944	if (! cleared)
4945	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4946
4947	/* Store each element of the constructor into the
4948	   corresponding field of TARGET.  */
4949	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4950	  {
4951	    enum machine_mode mode;
4952	    HOST_WIDE_INT bitsize;
4953	    HOST_WIDE_INT bitpos = 0;
4954	    tree offset;
4955	    rtx to_rtx = target;
4956
4957	    /* Just ignore missing fields.  We cleared the whole
4958	       structure, above, if any fields are missing.  */
4959	    if (field == 0)
4960	      continue;
4961
4962	    if (cleared && initializer_zerop (value))
4963	      continue;
4964
4965	    if (host_integerp (DECL_SIZE (field), 1))
4966	      bitsize = tree_low_cst (DECL_SIZE (field), 1);
4967	    else
4968	      bitsize = -1;
4969
4970	    mode = DECL_MODE (field);
4971	    if (DECL_BIT_FIELD (field))
4972	      mode = VOIDmode;
4973
4974	    offset = DECL_FIELD_OFFSET (field);
4975	    if (host_integerp (offset, 0)
4976		&& host_integerp (bit_position (field), 0))
4977	      {
4978		bitpos = int_bit_position (field);
4979		offset = 0;
4980	      }
4981	    else
4982	      bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4983
4984	    if (offset)
4985	      {
4986		rtx offset_rtx;
4987
4988		offset
4989		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4990						    make_tree (TREE_TYPE (exp),
4991							       target));
4992
4993		offset_rtx = expand_normal (offset);
4994		gcc_assert (MEM_P (to_rtx));
4995
4996#ifdef POINTERS_EXTEND_UNSIGNED
4997		if (GET_MODE (offset_rtx) != Pmode)
4998		  offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4999#else
5000		if (GET_MODE (offset_rtx) != ptr_mode)
5001		  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5002#endif
5003
5004		to_rtx = offset_address (to_rtx, offset_rtx,
5005					 highest_pow2_factor (offset));
5006	      }
5007
5008#ifdef WORD_REGISTER_OPERATIONS
5009	    /* If this initializes a field that is smaller than a
5010	       word, at the start of a word, try to widen it to a full
5011	       word.  This special case allows us to output C++ member
5012	       function initializations in a form that the optimizers
5013	       can understand.  */
5014	    if (REG_P (target)
5015		&& bitsize < BITS_PER_WORD
5016		&& bitpos % BITS_PER_WORD == 0
5017		&& GET_MODE_CLASS (mode) == MODE_INT
5018		&& TREE_CODE (value) == INTEGER_CST
5019		&& exp_size >= 0
5020		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5021	      {
5022		tree type = TREE_TYPE (value);
5023
5024		if (TYPE_PRECISION (type) < BITS_PER_WORD)
5025		  {
5026		    type = lang_hooks.types.type_for_size
5027		      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5028		    value = fold_convert (type, value);
5029		  }
5030
5031		if (BYTES_BIG_ENDIAN)
5032		  value
5033		   = fold_build2 (LSHIFT_EXPR, type, value,
5034				   build_int_cst (type,
5035						  BITS_PER_WORD - bitsize));
5036		bitsize = BITS_PER_WORD;
5037		mode = word_mode;
5038	      }
5039#endif
5040
5041	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5042		&& DECL_NONADDRESSABLE_P (field))
5043	      {
5044		to_rtx = copy_rtx (to_rtx);
5045		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5046	      }
5047
5048	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
5049				     value, type, cleared,
5050				     get_alias_set (TREE_TYPE (field)));
5051	  }
5052	break;
5053      }
5054    case ARRAY_TYPE:
5055      {
5056	tree value, index;
5057	unsigned HOST_WIDE_INT i;
5058	int need_to_clear;
5059	tree domain;
5060	tree elttype = TREE_TYPE (type);
5061	int const_bounds_p;
5062	HOST_WIDE_INT minelt = 0;
5063	HOST_WIDE_INT maxelt = 0;
5064
5065	domain = TYPE_DOMAIN (type);
5066	const_bounds_p = (TYPE_MIN_VALUE (domain)
5067			  && TYPE_MAX_VALUE (domain)
5068			  && host_integerp (TYPE_MIN_VALUE (domain), 0)
5069			  && host_integerp (TYPE_MAX_VALUE (domain), 0));
5070
5071	/* If we have constant bounds for the range of the type, get them.  */
5072	if (const_bounds_p)
5073	  {
5074	    minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5075	    maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5076	  }
5077
5078	/* If the constructor has fewer elements than the array, clear
5079           the whole array first.  Similarly if this is static
5080           constructor of a non-BLKmode object.  */
5081	if (cleared)
5082	  need_to_clear = 0;
5083	else if (REG_P (target) && TREE_STATIC (exp))
5084	  need_to_clear = 1;
5085	else
5086	  {
5087	    unsigned HOST_WIDE_INT idx;
5088	    tree index, value;
5089	    HOST_WIDE_INT count = 0, zero_count = 0;
5090	    need_to_clear = ! const_bounds_p;
5091
5092	    /* This loop is a more accurate version of the loop in
5093	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5094	       is also needed to check for missing elements.  */
5095	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5096	      {
5097		HOST_WIDE_INT this_node_count;
5098
5099		if (need_to_clear)
5100		  break;
5101
5102		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5103		  {
5104		    tree lo_index = TREE_OPERAND (index, 0);
5105		    tree hi_index = TREE_OPERAND (index, 1);
5106
5107		    if (! host_integerp (lo_index, 1)
5108			|| ! host_integerp (hi_index, 1))
5109		      {
5110			need_to_clear = 1;
5111			break;
5112		      }
5113
5114		    this_node_count = (tree_low_cst (hi_index, 1)
5115				       - tree_low_cst (lo_index, 1) + 1);
5116		  }
5117		else
5118		  this_node_count = 1;
5119
5120		count += this_node_count;
5121		if (mostly_zeros_p (value))
5122		  zero_count += this_node_count;
5123	      }
5124
5125	    /* Clear the entire array first if there are any missing
5126	       elements, or if the incidence of zero elements is >=
5127	       75%.  */
5128	    if (! need_to_clear
5129		&& (count < maxelt - minelt + 1
5130		    || 4 * zero_count >= 3 * count))
5131	      need_to_clear = 1;
5132	  }
5133
5134	if (need_to_clear && size > 0)
5135	  {
5136	    if (REG_P (target))
5137	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5138	    else
5139	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5140	    cleared = 1;
5141	  }
5142
5143	if (!cleared && REG_P (target))
5144	  /* Inform later passes that the old value is dead.  */
5145	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5146
5147	/* Store each element of the constructor into the
5148	   corresponding element of TARGET, determined by counting the
5149	   elements.  */
5150	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5151	  {
5152	    enum machine_mode mode;
5153	    HOST_WIDE_INT bitsize;
5154	    HOST_WIDE_INT bitpos;
5155	    int unsignedp;
5156	    rtx xtarget = target;
5157
5158	    if (cleared && initializer_zerop (value))
5159	      continue;
5160
5161	    unsignedp = TYPE_UNSIGNED (elttype);
5162	    mode = TYPE_MODE (elttype);
5163	    if (mode == BLKmode)
5164	      bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5165			 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5166			 : -1);
5167	    else
5168	      bitsize = GET_MODE_BITSIZE (mode);
5169
5170	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5171	      {
5172		tree lo_index = TREE_OPERAND (index, 0);
5173		tree hi_index = TREE_OPERAND (index, 1);
5174		rtx index_r, pos_rtx;
5175		HOST_WIDE_INT lo, hi, count;
5176		tree position;
5177
5178		/* If the range is constant and "small", unroll the loop.  */
5179		if (const_bounds_p
5180		    && host_integerp (lo_index, 0)
5181		    && host_integerp (hi_index, 0)
5182		    && (lo = tree_low_cst (lo_index, 0),
5183			hi = tree_low_cst (hi_index, 0),
5184			count = hi - lo + 1,
5185			(!MEM_P (target)
5186			 || count <= 2
5187			 || (host_integerp (TYPE_SIZE (elttype), 1)
5188			     && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5189				 <= 40 * 8)))))
5190		  {
5191		    lo -= minelt;  hi -= minelt;
5192		    for (; lo <= hi; lo++)
5193		      {
5194			bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5195
5196			if (MEM_P (target)
5197			    && !MEM_KEEP_ALIAS_SET_P (target)
5198			    && TREE_CODE (type) == ARRAY_TYPE
5199			    && TYPE_NONALIASED_COMPONENT (type))
5200			  {
5201			    target = copy_rtx (target);
5202			    MEM_KEEP_ALIAS_SET_P (target) = 1;
5203			  }
5204
5205			store_constructor_field
5206			  (target, bitsize, bitpos, mode, value, type, cleared,
5207			   get_alias_set (elttype));
5208		      }
5209		  }
5210		else
5211		  {
5212		    rtx loop_start = gen_label_rtx ();
5213		    rtx loop_end = gen_label_rtx ();
5214		    tree exit_cond;
5215
5216		    expand_normal (hi_index);
5217		    unsignedp = TYPE_UNSIGNED (domain);
5218
5219		    index = build_decl (VAR_DECL, NULL_TREE, domain);
5220
5221		    index_r
5222		      = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5223						   &unsignedp, 0));
5224		    SET_DECL_RTL (index, index_r);
5225		    store_expr (lo_index, index_r, 0);
5226
5227		    /* Build the head of the loop.  */
5228		    do_pending_stack_adjust ();
5229		    emit_label (loop_start);
5230
5231		    /* Assign value to element index.  */
5232		    position =
5233		      fold_convert (ssizetype,
5234				    fold_build2 (MINUS_EXPR,
5235						 TREE_TYPE (index),
5236						 index,
5237						 TYPE_MIN_VALUE (domain)));
5238
5239		    position =
5240			size_binop (MULT_EXPR, position,
5241				    fold_convert (ssizetype,
5242						  TYPE_SIZE_UNIT (elttype)));
5243
5244		    pos_rtx = expand_normal (position);
5245		    xtarget = offset_address (target, pos_rtx,
5246					      highest_pow2_factor (position));
5247		    xtarget = adjust_address (xtarget, mode, 0);
5248		    if (TREE_CODE (value) == CONSTRUCTOR)
5249		      store_constructor (value, xtarget, cleared,
5250					 bitsize / BITS_PER_UNIT);
5251		    else
5252		      store_expr (value, xtarget, 0);
5253
5254		    /* Generate a conditional jump to exit the loop.  */
5255		    exit_cond = build2 (LT_EXPR, integer_type_node,
5256					index, hi_index);
5257		    jumpif (exit_cond, loop_end);
5258
5259		    /* Update the loop counter, and jump to the head of
5260		       the loop.  */
5261		    expand_assignment (index,
5262				       build2 (PLUS_EXPR, TREE_TYPE (index),
5263					       index, integer_one_node));
5264
5265		    emit_jump (loop_start);
5266
5267		    /* Build the end of the loop.  */
5268		    emit_label (loop_end);
5269		  }
5270	      }
5271	    else if ((index != 0 && ! host_integerp (index, 0))
5272		     || ! host_integerp (TYPE_SIZE (elttype), 1))
5273	      {
5274		tree position;
5275
5276		if (index == 0)
5277		  index = ssize_int (1);
5278
5279		if (minelt)
5280		  index = fold_convert (ssizetype,
5281					fold_build2 (MINUS_EXPR,
5282						     TREE_TYPE (index),
5283						     index,
5284						     TYPE_MIN_VALUE (domain)));
5285
5286		position =
5287		  size_binop (MULT_EXPR, index,
5288			      fold_convert (ssizetype,
5289					    TYPE_SIZE_UNIT (elttype)));
5290		xtarget = offset_address (target,
5291					  expand_normal (position),
5292					  highest_pow2_factor (position));
5293		xtarget = adjust_address (xtarget, mode, 0);
5294		store_expr (value, xtarget, 0);
5295	      }
5296	    else
5297	      {
5298		if (index != 0)
5299		  bitpos = ((tree_low_cst (index, 0) - minelt)
5300			    * tree_low_cst (TYPE_SIZE (elttype), 1));
5301		else
5302		  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5303
5304		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5305		    && TREE_CODE (type) == ARRAY_TYPE
5306		    && TYPE_NONALIASED_COMPONENT (type))
5307		  {
5308		    target = copy_rtx (target);
5309		    MEM_KEEP_ALIAS_SET_P (target) = 1;
5310		  }
5311		store_constructor_field (target, bitsize, bitpos, mode, value,
5312					 type, cleared, get_alias_set (elttype));
5313	      }
5314	  }
5315	break;
5316      }
5317
5318    case VECTOR_TYPE:
5319      {
5320	unsigned HOST_WIDE_INT idx;
5321	constructor_elt *ce;
5322	int i;
5323	int need_to_clear;
5324	int icode = 0;
5325	tree elttype = TREE_TYPE (type);
5326	int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5327	enum machine_mode eltmode = TYPE_MODE (elttype);
5328	HOST_WIDE_INT bitsize;
5329	HOST_WIDE_INT bitpos;
5330	rtvec vector = NULL;
5331	unsigned n_elts;
5332
5333	gcc_assert (eltmode != BLKmode);
5334
5335	n_elts = TYPE_VECTOR_SUBPARTS (type);
5336	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5337	  {
5338	    enum machine_mode mode = GET_MODE (target);
5339
5340	    icode = (int) vec_init_optab->handlers[mode].insn_code;
5341	    if (icode != CODE_FOR_nothing)
5342	      {
5343		unsigned int i;
5344
5345		vector = rtvec_alloc (n_elts);
5346		for (i = 0; i < n_elts; i++)
5347		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5348	      }
5349	  }
5350
5351	/* If the constructor has fewer elements than the vector,
5352	   clear the whole array first.  Similarly if this is static
5353	   constructor of a non-BLKmode object.  */
5354	if (cleared)
5355	  need_to_clear = 0;
5356	else if (REG_P (target) && TREE_STATIC (exp))
5357	  need_to_clear = 1;
5358	else
5359	  {
5360	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5361	    tree value;
5362
5363	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5364	      {
5365		int n_elts_here = tree_low_cst
5366		  (int_const_binop (TRUNC_DIV_EXPR,
5367				    TYPE_SIZE (TREE_TYPE (value)),
5368				    TYPE_SIZE (elttype), 0), 1);
5369
5370		count += n_elts_here;
5371		if (mostly_zeros_p (value))
5372		  zero_count += n_elts_here;
5373	      }
5374
5375	    /* Clear the entire vector first if there are any missing elements,
5376	       or if the incidence of zero elements is >= 75%.  */
5377	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5378	  }
5379
5380	if (need_to_clear && size > 0 && !vector)
5381	  {
5382	    if (REG_P (target))
5383	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5384	    else
5385	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5386	    cleared = 1;
5387	  }
5388
5389	/* Inform later passes that the old value is dead.  */
5390	if (!cleared && !vector && REG_P (target))
5391	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5392
5393        /* Store each element of the constructor into the corresponding
5394	   element of TARGET, determined by counting the elements.  */
5395	for (idx = 0, i = 0;
5396	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5397	     idx++, i += bitsize / elt_size)
5398	  {
5399	    HOST_WIDE_INT eltpos;
5400	    tree value = ce->value;
5401
5402	    bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5403	    if (cleared && initializer_zerop (value))
5404	      continue;
5405
5406	    if (ce->index)
5407	      eltpos = tree_low_cst (ce->index, 1);
5408	    else
5409	      eltpos = i;
5410
5411	    if (vector)
5412	      {
5413	        /* Vector CONSTRUCTORs should only be built from smaller
5414		   vectors in the case of BLKmode vectors.  */
5415		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5416		RTVEC_ELT (vector, eltpos)
5417		  = expand_normal (value);
5418	      }
5419	    else
5420	      {
5421		enum machine_mode value_mode =
5422		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5423		  ? TYPE_MODE (TREE_TYPE (value))
5424		  : eltmode;
5425		bitpos = eltpos * elt_size;
5426		store_constructor_field (target, bitsize, bitpos,
5427					 value_mode, value, type,
5428					 cleared, get_alias_set (elttype));
5429	      }
5430	  }
5431
5432	if (vector)
5433	  emit_insn (GEN_FCN (icode)
5434		     (target,
5435		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
5436	break;
5437      }
5438
5439    default:
5440      gcc_unreachable ();
5441    }
5442}
5443
5444/* Store the value of EXP (an expression tree)
5445   into a subfield of TARGET which has mode MODE and occupies
5446   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5447   If MODE is VOIDmode, it means that we are storing into a bit-field.
5448
5449   Always return const0_rtx unless we have something particular to
5450   return.
5451
5452   TYPE is the type of the underlying object,
5453
5454   ALIAS_SET is the alias set for the destination.  This value will
5455   (in general) be different from that for TARGET, since TARGET is a
5456   reference to the containing structure.  */
5457
5458static rtx
5459store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5460	     enum machine_mode mode, tree exp, tree type, int alias_set)
5461{
5462  HOST_WIDE_INT width_mask = 0;
5463
5464  if (TREE_CODE (exp) == ERROR_MARK)
5465    return const0_rtx;
5466
5467  /* If we have nothing to store, do nothing unless the expression has
5468     side-effects.  */
5469  if (bitsize == 0)
5470    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5471  else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5472    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5473
5474  /* If we are storing into an unaligned field of an aligned union that is
5475     in a register, we may have the mode of TARGET being an integer mode but
5476     MODE == BLKmode.  In that case, get an aligned object whose size and
5477     alignment are the same as TARGET and store TARGET into it (we can avoid
5478     the store if the field being stored is the entire width of TARGET).  Then
5479     call ourselves recursively to store the field into a BLKmode version of
5480     that object.  Finally, load from the object into TARGET.  This is not
5481     very efficient in general, but should only be slightly more expensive
5482     than the otherwise-required unaligned accesses.  Perhaps this can be
5483     cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5484     twice, once with emit_move_insn and once via store_field.  */
5485
5486  if (mode == BLKmode
5487      && (REG_P (target) || GET_CODE (target) == SUBREG))
5488    {
5489      rtx object = assign_temp (type, 0, 1, 1);
5490      rtx blk_object = adjust_address (object, BLKmode, 0);
5491
5492      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5493	emit_move_insn (object, target);
5494
5495      store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5496
5497      emit_move_insn (target, object);
5498
5499      /* We want to return the BLKmode version of the data.  */
5500      return blk_object;
5501    }
5502
5503  if (GET_CODE (target) == CONCAT)
5504    {
5505      /* We're storing into a struct containing a single __complex.  */
5506
5507      gcc_assert (!bitpos);
5508      return store_expr (exp, target, 0);
5509    }
5510
5511  /* If the structure is in a register or if the component
5512     is a bit field, we cannot use addressing to access it.
5513     Use bit-field techniques or SUBREG to store in it.  */
5514
5515  if (mode == VOIDmode
5516      || (mode != BLKmode && ! direct_store[(int) mode]
5517	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5518	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5519      || REG_P (target)
5520      || GET_CODE (target) == SUBREG
5521      /* If the field isn't aligned enough to store as an ordinary memref,
5522	 store it as a bit field.  */
5523      || (mode != BLKmode
5524	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5525		|| bitpos % GET_MODE_ALIGNMENT (mode))
5526	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5527	      || (bitpos % BITS_PER_UNIT != 0)))
5528      /* If the RHS and field are a constant size and the size of the
5529	 RHS isn't the same size as the bitfield, we must use bitfield
5530	 operations.  */
5531      || (bitsize >= 0
5532	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5533	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5534    {
5535      rtx temp;
5536
5537      /* If EXP is a NOP_EXPR of precision less than its mode, then that
5538	 implies a mask operation.  If the precision is the same size as
5539	 the field we're storing into, that mask is redundant.  This is
5540	 particularly common with bit field assignments generated by the
5541	 C front end.  */
5542      if (TREE_CODE (exp) == NOP_EXPR)
5543	{
5544	  tree type = TREE_TYPE (exp);
5545	  if (INTEGRAL_TYPE_P (type)
5546	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5547	      && bitsize == TYPE_PRECISION (type))
5548	    {
5549	      type = TREE_TYPE (TREE_OPERAND (exp, 0));
5550	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5551		exp = TREE_OPERAND (exp, 0);
5552	    }
5553	}
5554
5555      temp = expand_normal (exp);
5556
5557      /* If BITSIZE is narrower than the size of the type of EXP
5558	 we will be narrowing TEMP.  Normally, what's wanted are the
5559	 low-order bits.  However, if EXP's type is a record and this is
5560	 big-endian machine, we want the upper BITSIZE bits.  */
5561      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5562	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5563	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5564	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5565			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5566				       - bitsize),
5567			     NULL_RTX, 1);
5568
5569      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5570	 MODE.  */
5571      if (mode != VOIDmode && mode != BLKmode
5572	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5573	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5574
5575      /* If the modes of TARGET and TEMP are both BLKmode, both
5576	 must be in memory and BITPOS must be aligned on a byte
5577	 boundary.  If so, we simply do a block copy.  */
5578      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5579	{
5580	  gcc_assert (MEM_P (target) && MEM_P (temp)
5581		      && !(bitpos % BITS_PER_UNIT));
5582
5583	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5584	  emit_block_move (target, temp,
5585			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5586				    / BITS_PER_UNIT),
5587			   BLOCK_OP_NORMAL);
5588
5589	  return const0_rtx;
5590	}
5591
5592      /* Store the value in the bitfield.  */
5593      store_bit_field (target, bitsize, bitpos, mode, temp);
5594
5595      return const0_rtx;
5596    }
5597  else
5598    {
5599      /* Now build a reference to just the desired component.  */
5600      rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5601
5602      if (to_rtx == target)
5603	to_rtx = copy_rtx (to_rtx);
5604
5605      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5606      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5607	set_mem_alias_set (to_rtx, alias_set);
5608
5609      return store_expr (exp, to_rtx, 0);
5610    }
5611}
5612
5613/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5614   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5615   codes and find the ultimate containing object, which we return.
5616
5617   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5618   bit position, and *PUNSIGNEDP to the signedness of the field.
5619   If the position of the field is variable, we store a tree
5620   giving the variable offset (in units) in *POFFSET.
5621   This offset is in addition to the bit position.
5622   If the position is not variable, we store 0 in *POFFSET.
5623
5624   If any of the extraction expressions is volatile,
5625   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5626
5627   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5628   is a mode that can be used to access the field.  In that case, *PBITSIZE
5629   is redundant.
5630
5631   If the field describes a variable-sized object, *PMODE is set to
5632   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5633   this case, but the address of the object can be found.
5634
5635   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5636   look through nodes that serve as markers of a greater alignment than
5637   the one that can be deduced from the expression.  These nodes make it
5638   possible for front-ends to prevent temporaries from being created by
5639   the middle-end on alignment considerations.  For that purpose, the
5640   normal operating mode at high-level is to always pass FALSE so that
5641   the ultimate containing object is really returned; moreover, the
5642   associated predicate handled_component_p will always return TRUE
5643   on these nodes, thus indicating that they are essentially handled
5644   by get_inner_reference.  TRUE should only be passed when the caller
5645   is scanning the expression in order to build another representation
5646   and specifically knows how to handle these nodes; as such, this is
5647   the normal operating mode in the RTL expanders.  */
5648
5649tree
5650get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5651		     HOST_WIDE_INT *pbitpos, tree *poffset,
5652		     enum machine_mode *pmode, int *punsignedp,
5653		     int *pvolatilep, bool keep_aligning)
5654{
5655  tree size_tree = 0;
5656  enum machine_mode mode = VOIDmode;
5657  tree offset = size_zero_node;
5658  tree bit_offset = bitsize_zero_node;
5659
5660  /* First get the mode, signedness, and size.  We do this from just the
5661     outermost expression.  */
5662  if (TREE_CODE (exp) == COMPONENT_REF)
5663    {
5664      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5665      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5666	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5667
5668      *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5669    }
5670  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5671    {
5672      size_tree = TREE_OPERAND (exp, 1);
5673      *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5674    }
5675  else
5676    {
5677      mode = TYPE_MODE (TREE_TYPE (exp));
5678      *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5679
5680      if (mode == BLKmode)
5681	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5682      else
5683	*pbitsize = GET_MODE_BITSIZE (mode);
5684    }
5685
5686  if (size_tree != 0)
5687    {
5688      if (! host_integerp (size_tree, 1))
5689	mode = BLKmode, *pbitsize = -1;
5690      else
5691	*pbitsize = tree_low_cst (size_tree, 1);
5692    }
5693
5694  *pmode = mode;
5695
5696  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5697     and find the ultimate containing object.  */
5698  while (1)
5699    {
5700      switch (TREE_CODE (exp))
5701	{
5702	case BIT_FIELD_REF:
5703	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5704				   TREE_OPERAND (exp, 2));
5705	  break;
5706
5707	case COMPONENT_REF:
5708	  {
5709	    tree field = TREE_OPERAND (exp, 1);
5710	    tree this_offset = component_ref_field_offset (exp);
5711
5712	    /* If this field hasn't been filled in yet, don't go past it.
5713	       This should only happen when folding expressions made during
5714	       type construction.  */
5715	    if (this_offset == 0)
5716	      break;
5717
5718	    offset = size_binop (PLUS_EXPR, offset, this_offset);
5719	    bit_offset = size_binop (PLUS_EXPR, bit_offset,
5720				     DECL_FIELD_BIT_OFFSET (field));
5721
5722	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5723	  }
5724	  break;
5725
5726	case ARRAY_REF:
5727	case ARRAY_RANGE_REF:
5728	  {
5729	    tree index = TREE_OPERAND (exp, 1);
5730	    tree low_bound = array_ref_low_bound (exp);
5731	    tree unit_size = array_ref_element_size (exp);
5732
5733	    /* We assume all arrays have sizes that are a multiple of a byte.
5734	       First subtract the lower bound, if any, in the type of the
5735	       index, then convert to sizetype and multiply by the size of
5736	       the array element.  */
5737	    if (! integer_zerop (low_bound))
5738	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5739				   index, low_bound);
5740
5741	    offset = size_binop (PLUS_EXPR, offset,
5742			         size_binop (MULT_EXPR,
5743					     fold_convert (sizetype, index),
5744					     unit_size));
5745	  }
5746	  break;
5747
5748	case REALPART_EXPR:
5749	  break;
5750
5751	case IMAGPART_EXPR:
5752	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5753				   bitsize_int (*pbitsize));
5754	  break;
5755
5756	case VIEW_CONVERT_EXPR:
5757	  if (keep_aligning && STRICT_ALIGNMENT
5758	      && (TYPE_ALIGN (TREE_TYPE (exp))
5759	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5760	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5761		  < BIGGEST_ALIGNMENT)
5762	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5763		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5764	    goto done;
5765	  break;
5766
5767	default:
5768	  goto done;
5769	}
5770
5771      /* If any reference in the chain is volatile, the effect is volatile.  */
5772      if (TREE_THIS_VOLATILE (exp))
5773	*pvolatilep = 1;
5774
5775      exp = TREE_OPERAND (exp, 0);
5776    }
5777 done:
5778
5779  /* If OFFSET is constant, see if we can return the whole thing as a
5780     constant bit position.  Make sure to handle overflow during
5781     this conversion.  */
5782  if (host_integerp (offset, 0))
5783    {
5784      double_int tem = double_int_mul (tree_to_double_int (offset),
5785				       uhwi_to_double_int (BITS_PER_UNIT));
5786      tem = double_int_add (tem, tree_to_double_int (bit_offset));
5787      if (double_int_fits_in_shwi_p (tem))
5788	{
5789	  *pbitpos = double_int_to_shwi (tem);
5790	  *poffset = NULL_TREE;
5791	  return exp;
5792	}
5793    }
5794
5795  /* Otherwise, split it up.  */
5796  *pbitpos = tree_low_cst (bit_offset, 0);
5797  *poffset = offset;
5798
5799  return exp;
5800}
5801
5802/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5803   look for whether EXP or any nested component-refs within EXP is marked
5804   as PACKED.  */
5805
5806bool
5807contains_packed_reference (tree exp)
5808{
5809  bool packed_p = false;
5810
5811  while (1)
5812    {
5813      switch (TREE_CODE (exp))
5814	{
5815	case COMPONENT_REF:
5816	  {
5817	    tree field = TREE_OPERAND (exp, 1);
5818	    packed_p = DECL_PACKED (field)
5819		       || TYPE_PACKED (TREE_TYPE (field))
5820		       || TYPE_PACKED (TREE_TYPE (exp));
5821	    if (packed_p)
5822	      goto done;
5823	  }
5824	  break;
5825
5826	case BIT_FIELD_REF:
5827	case ARRAY_REF:
5828	case ARRAY_RANGE_REF:
5829	case REALPART_EXPR:
5830	case IMAGPART_EXPR:
5831	case VIEW_CONVERT_EXPR:
5832	  break;
5833
5834	default:
5835	  goto done;
5836	}
5837      exp = TREE_OPERAND (exp, 0);
5838    }
5839 done:
5840  return packed_p;
5841}
5842
5843/* Return a tree of sizetype representing the size, in bytes, of the element
5844   of EXP, an ARRAY_REF.  */
5845
5846tree
5847array_ref_element_size (tree exp)
5848{
5849  tree aligned_size = TREE_OPERAND (exp, 3);
5850  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5851
5852  /* If a size was specified in the ARRAY_REF, it's the size measured
5853     in alignment units of the element type.  So multiply by that value.  */
5854  if (aligned_size)
5855    {
5856      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5857	 sizetype from another type of the same width and signedness.  */
5858      if (TREE_TYPE (aligned_size) != sizetype)
5859	aligned_size = fold_convert (sizetype, aligned_size);
5860      return size_binop (MULT_EXPR, aligned_size,
5861		         size_int (TYPE_ALIGN_UNIT (elmt_type)));
5862    }
5863
5864  /* Otherwise, take the size from that of the element type.  Substitute
5865     any PLACEHOLDER_EXPR that we have.  */
5866  else
5867    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5868}
5869
5870/* Return a tree representing the lower bound of the array mentioned in
5871   EXP, an ARRAY_REF.  */
5872
5873tree
5874array_ref_low_bound (tree exp)
5875{
5876  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5877
5878  /* If a lower bound is specified in EXP, use it.  */
5879  if (TREE_OPERAND (exp, 2))
5880    return TREE_OPERAND (exp, 2);
5881
5882  /* Otherwise, if there is a domain type and it has a lower bound, use it,
5883     substituting for a PLACEHOLDER_EXPR as needed.  */
5884  if (domain_type && TYPE_MIN_VALUE (domain_type))
5885    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5886
5887  /* Otherwise, return a zero of the appropriate type.  */
5888  return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5889}
5890
5891/* Return a tree representing the upper bound of the array mentioned in
5892   EXP, an ARRAY_REF.  */
5893
5894tree
5895array_ref_up_bound (tree exp)
5896{
5897  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5898
5899  /* If there is a domain type and it has an upper bound, use it, substituting
5900     for a PLACEHOLDER_EXPR as needed.  */
5901  if (domain_type && TYPE_MAX_VALUE (domain_type))
5902    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5903
5904  /* Otherwise fail.  */
5905  return NULL_TREE;
5906}
5907
5908/* Return a tree representing the offset, in bytes, of the field referenced
5909   by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
5910
5911tree
5912component_ref_field_offset (tree exp)
5913{
5914  tree aligned_offset = TREE_OPERAND (exp, 2);
5915  tree field = TREE_OPERAND (exp, 1);
5916
5917  /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5918     in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
5919     value.  */
5920  if (aligned_offset)
5921    {
5922      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5923	 sizetype from another type of the same width and signedness.  */
5924      if (TREE_TYPE (aligned_offset) != sizetype)
5925	aligned_offset = fold_convert (sizetype, aligned_offset);
5926      return size_binop (MULT_EXPR, aligned_offset,
5927		         size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5928    }
5929
5930  /* Otherwise, take the offset from that of the field.  Substitute
5931     any PLACEHOLDER_EXPR that we have.  */
5932  else
5933    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5934}
5935
5936/* Return 1 if T is an expression that get_inner_reference handles.  */
5937
5938int
5939handled_component_p (tree t)
5940{
5941  switch (TREE_CODE (t))
5942    {
5943    case BIT_FIELD_REF:
5944    case COMPONENT_REF:
5945    case ARRAY_REF:
5946    case ARRAY_RANGE_REF:
5947    case VIEW_CONVERT_EXPR:
5948    case REALPART_EXPR:
5949    case IMAGPART_EXPR:
5950      return 1;
5951
5952    default:
5953      return 0;
5954    }
5955}
5956
5957/* Given an rtx VALUE that may contain additions and multiplications, return
5958   an equivalent value that just refers to a register, memory, or constant.
5959   This is done by generating instructions to perform the arithmetic and
5960   returning a pseudo-register containing the value.
5961
5962   The returned value may be a REG, SUBREG, MEM or constant.  */
5963
5964rtx
5965force_operand (rtx value, rtx target)
5966{
5967  rtx op1, op2;
5968  /* Use subtarget as the target for operand 0 of a binary operation.  */
5969  rtx subtarget = get_subtarget (target);
5970  enum rtx_code code = GET_CODE (value);
5971
5972  /* Check for subreg applied to an expression produced by loop optimizer.  */
5973  if (code == SUBREG
5974      && !REG_P (SUBREG_REG (value))
5975      && !MEM_P (SUBREG_REG (value)))
5976    {
5977      value = simplify_gen_subreg (GET_MODE (value),
5978				   force_reg (GET_MODE (SUBREG_REG (value)),
5979					      force_operand (SUBREG_REG (value),
5980							     NULL_RTX)),
5981				   GET_MODE (SUBREG_REG (value)),
5982				   SUBREG_BYTE (value));
5983      code = GET_CODE (value);
5984    }
5985
5986  /* Check for a PIC address load.  */
5987  if ((code == PLUS || code == MINUS)
5988      && XEXP (value, 0) == pic_offset_table_rtx
5989      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5990	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5991	  || GET_CODE (XEXP (value, 1)) == CONST))
5992    {
5993      if (!subtarget)
5994	subtarget = gen_reg_rtx (GET_MODE (value));
5995      emit_move_insn (subtarget, value);
5996      return subtarget;
5997    }
5998
5999  if (ARITHMETIC_P (value))
6000    {
6001      op2 = XEXP (value, 1);
6002      if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6003	subtarget = 0;
6004      if (code == MINUS && GET_CODE (op2) == CONST_INT)
6005	{
6006	  code = PLUS;
6007	  op2 = negate_rtx (GET_MODE (value), op2);
6008	}
6009
6010      /* Check for an addition with OP2 a constant integer and our first
6011         operand a PLUS of a virtual register and something else.  In that
6012         case, we want to emit the sum of the virtual register and the
6013         constant first and then add the other value.  This allows virtual
6014         register instantiation to simply modify the constant rather than
6015         creating another one around this addition.  */
6016      if (code == PLUS && GET_CODE (op2) == CONST_INT
6017	  && GET_CODE (XEXP (value, 0)) == PLUS
6018	  && REG_P (XEXP (XEXP (value, 0), 0))
6019	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6020	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6021	{
6022	  rtx temp = expand_simple_binop (GET_MODE (value), code,
6023					  XEXP (XEXP (value, 0), 0), op2,
6024					  subtarget, 0, OPTAB_LIB_WIDEN);
6025	  return expand_simple_binop (GET_MODE (value), code, temp,
6026				      force_operand (XEXP (XEXP (value,
6027								 0), 1), 0),
6028				      target, 0, OPTAB_LIB_WIDEN);
6029	}
6030
6031      op1 = force_operand (XEXP (value, 0), subtarget);
6032      op2 = force_operand (op2, NULL_RTX);
6033      switch (code)
6034	{
6035	case MULT:
6036	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
6037	case DIV:
6038	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
6039	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
6040					target, 1, OPTAB_LIB_WIDEN);
6041	  else
6042	    return expand_divmod (0,
6043				  FLOAT_MODE_P (GET_MODE (value))
6044				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
6045				  GET_MODE (value), op1, op2, target, 0);
6046	  break;
6047	case MOD:
6048	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6049				target, 0);
6050	  break;
6051	case UDIV:
6052	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6053				target, 1);
6054	  break;
6055	case UMOD:
6056	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6057				target, 1);
6058	  break;
6059	case ASHIFTRT:
6060	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6061				      target, 0, OPTAB_LIB_WIDEN);
6062	  break;
6063	default:
6064	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6065				      target, 1, OPTAB_LIB_WIDEN);
6066	}
6067    }
6068  if (UNARY_P (value))
6069    {
6070      if (!target)
6071	target = gen_reg_rtx (GET_MODE (value));
6072      op1 = force_operand (XEXP (value, 0), NULL_RTX);
6073      switch (code)
6074	{
6075	case ZERO_EXTEND:
6076	case SIGN_EXTEND:
6077	case TRUNCATE:
6078	case FLOAT_EXTEND:
6079	case FLOAT_TRUNCATE:
6080	  convert_move (target, op1, code == ZERO_EXTEND);
6081	  return target;
6082
6083	case FIX:
6084	case UNSIGNED_FIX:
6085	  expand_fix (target, op1, code == UNSIGNED_FIX);
6086	  return target;
6087
6088	case FLOAT:
6089	case UNSIGNED_FLOAT:
6090	  expand_float (target, op1, code == UNSIGNED_FLOAT);
6091	  return target;
6092
6093	default:
6094	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6095	}
6096    }
6097
6098#ifdef INSN_SCHEDULING
6099  /* On machines that have insn scheduling, we want all memory reference to be
6100     explicit, so we need to deal with such paradoxical SUBREGs.  */
6101  if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6102      && (GET_MODE_SIZE (GET_MODE (value))
6103	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6104    value
6105      = simplify_gen_subreg (GET_MODE (value),
6106			     force_reg (GET_MODE (SUBREG_REG (value)),
6107					force_operand (SUBREG_REG (value),
6108						       NULL_RTX)),
6109			     GET_MODE (SUBREG_REG (value)),
6110			     SUBREG_BYTE (value));
6111#endif
6112
6113  return value;
6114}
6115
6116/* Subroutine of expand_expr: return nonzero iff there is no way that
6117   EXP can reference X, which is being modified.  TOP_P is nonzero if this
6118   call is going to be used to determine whether we need a temporary
6119   for EXP, as opposed to a recursive call to this function.
6120
6121   It is always safe for this routine to return zero since it merely
6122   searches for optimization opportunities.  */
6123
6124int
6125safe_from_p (rtx x, tree exp, int top_p)
6126{
6127  rtx exp_rtl = 0;
6128  int i, nops;
6129
6130  if (x == 0
6131      /* If EXP has varying size, we MUST use a target since we currently
6132	 have no way of allocating temporaries of variable size
6133	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6134	 So we assume here that something at a higher level has prevented a
6135	 clash.  This is somewhat bogus, but the best we can do.  Only
6136	 do this when X is BLKmode and when we are at the top level.  */
6137      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6138	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6139	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6140	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6141	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6142	      != INTEGER_CST)
6143	  && GET_MODE (x) == BLKmode)
6144      /* If X is in the outgoing argument area, it is always safe.  */
6145      || (MEM_P (x)
6146	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
6147	      || (GET_CODE (XEXP (x, 0)) == PLUS
6148		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6149    return 1;
6150
6151  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6152     find the underlying pseudo.  */
6153  if (GET_CODE (x) == SUBREG)
6154    {
6155      x = SUBREG_REG (x);
6156      if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6157	return 0;
6158    }
6159
6160  /* Now look at our tree code and possibly recurse.  */
6161  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6162    {
6163    case tcc_declaration:
6164      exp_rtl = DECL_RTL_IF_SET (exp);
6165      break;
6166
6167    case tcc_constant:
6168      return 1;
6169
6170    case tcc_exceptional:
6171      if (TREE_CODE (exp) == TREE_LIST)
6172	{
6173	  while (1)
6174	    {
6175	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6176		return 0;
6177	      exp = TREE_CHAIN (exp);
6178	      if (!exp)
6179		return 1;
6180	      if (TREE_CODE (exp) != TREE_LIST)
6181		return safe_from_p (x, exp, 0);
6182	    }
6183	}
6184      else if (TREE_CODE (exp) == CONSTRUCTOR)
6185	{
6186	  constructor_elt *ce;
6187	  unsigned HOST_WIDE_INT idx;
6188
6189	  for (idx = 0;
6190	       VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6191	       idx++)
6192	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6193		|| !safe_from_p (x, ce->value, 0))
6194	      return 0;
6195	  return 1;
6196	}
6197      else if (TREE_CODE (exp) == ERROR_MARK)
6198	return 1;	/* An already-visited SAVE_EXPR? */
6199      else
6200	return 0;
6201
6202    case tcc_statement:
6203      /* The only case we look at here is the DECL_INITIAL inside a
6204	 DECL_EXPR.  */
6205      return (TREE_CODE (exp) != DECL_EXPR
6206	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6207	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6208	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6209
6210    case tcc_binary:
6211    case tcc_comparison:
6212      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6213	return 0;
6214      /* Fall through.  */
6215
6216    case tcc_unary:
6217      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6218
6219    case tcc_expression:
6220    case tcc_reference:
6221      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6222	 the expression.  If it is set, we conflict iff we are that rtx or
6223	 both are in memory.  Otherwise, we check all operands of the
6224	 expression recursively.  */
6225
6226      switch (TREE_CODE (exp))
6227	{
6228	case ADDR_EXPR:
6229	  /* If the operand is static or we are static, we can't conflict.
6230	     Likewise if we don't conflict with the operand at all.  */
6231	  if (staticp (TREE_OPERAND (exp, 0))
6232	      || TREE_STATIC (exp)
6233	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6234	    return 1;
6235
6236	  /* Otherwise, the only way this can conflict is if we are taking
6237	     the address of a DECL a that address if part of X, which is
6238	     very rare.  */
6239	  exp = TREE_OPERAND (exp, 0);
6240	  if (DECL_P (exp))
6241	    {
6242	      if (!DECL_RTL_SET_P (exp)
6243		  || !MEM_P (DECL_RTL (exp)))
6244		return 0;
6245	      else
6246		exp_rtl = XEXP (DECL_RTL (exp), 0);
6247	    }
6248	  break;
6249
6250	case MISALIGNED_INDIRECT_REF:
6251	case ALIGN_INDIRECT_REF:
6252	case INDIRECT_REF:
6253	  if (MEM_P (x)
6254	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6255					get_alias_set (exp)))
6256	    return 0;
6257	  break;
6258
6259	case CALL_EXPR:
6260	  /* Assume that the call will clobber all hard registers and
6261	     all of memory.  */
6262	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6263	      || MEM_P (x))
6264	    return 0;
6265	  break;
6266
6267	case WITH_CLEANUP_EXPR:
6268	case CLEANUP_POINT_EXPR:
6269	  /* Lowered by gimplify.c.  */
6270	  gcc_unreachable ();
6271
6272	case SAVE_EXPR:
6273	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6274
6275	default:
6276	  break;
6277	}
6278
6279      /* If we have an rtx, we do not need to scan our operands.  */
6280      if (exp_rtl)
6281	break;
6282
6283      nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6284      for (i = 0; i < nops; i++)
6285	if (TREE_OPERAND (exp, i) != 0
6286	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6287	  return 0;
6288
6289      /* If this is a language-specific tree code, it may require
6290	 special handling.  */
6291      if ((unsigned int) TREE_CODE (exp)
6292	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6293	  && !lang_hooks.safe_from_p (x, exp))
6294	return 0;
6295      break;
6296
6297    case tcc_type:
6298      /* Should never get a type here.  */
6299      gcc_unreachable ();
6300    }
6301
6302  /* If we have an rtl, find any enclosed object.  Then see if we conflict
6303     with it.  */
6304  if (exp_rtl)
6305    {
6306      if (GET_CODE (exp_rtl) == SUBREG)
6307	{
6308	  exp_rtl = SUBREG_REG (exp_rtl);
6309	  if (REG_P (exp_rtl)
6310	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6311	    return 0;
6312	}
6313
6314      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6315	 are memory and they conflict.  */
6316      return ! (rtx_equal_p (x, exp_rtl)
6317		|| (MEM_P (x) && MEM_P (exp_rtl)
6318		    && true_dependence (exp_rtl, VOIDmode, x,
6319					rtx_addr_varies_p)));
6320    }
6321
6322  /* If we reach here, it is safe.  */
6323  return 1;
6324}
6325
6326
6327/* Return the highest power of two that EXP is known to be a multiple of.
6328   This is used in updating alignment of MEMs in array references.  */
6329
6330unsigned HOST_WIDE_INT
6331highest_pow2_factor (tree exp)
6332{
6333  unsigned HOST_WIDE_INT c0, c1;
6334
6335  switch (TREE_CODE (exp))
6336    {
6337    case INTEGER_CST:
6338      /* We can find the lowest bit that's a one.  If the low
6339	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6340	 We need to handle this case since we can find it in a COND_EXPR,
6341	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
6342	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6343	 later ICE.  */
6344      if (TREE_CONSTANT_OVERFLOW (exp))
6345	return BIGGEST_ALIGNMENT;
6346      else
6347	{
6348	  /* Note: tree_low_cst is intentionally not used here,
6349	     we don't care about the upper bits.  */
6350	  c0 = TREE_INT_CST_LOW (exp);
6351	  c0 &= -c0;
6352	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6353	}
6354      break;
6355
6356    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6357      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6358      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6359      return MIN (c0, c1);
6360
6361    case MULT_EXPR:
6362      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6363      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6364      return c0 * c1;
6365
6366    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6367    case CEIL_DIV_EXPR:
6368      if (integer_pow2p (TREE_OPERAND (exp, 1))
6369	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6370	{
6371	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6372	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6373	  return MAX (1, c0 / c1);
6374	}
6375      break;
6376
6377    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6378    case SAVE_EXPR:
6379      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6380
6381    case COMPOUND_EXPR:
6382      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6383
6384    case COND_EXPR:
6385      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6386      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6387      return MIN (c0, c1);
6388
6389    default:
6390      break;
6391    }
6392
6393  return 1;
6394}
6395
6396/* Similar, except that the alignment requirements of TARGET are
6397   taken into account.  Assume it is at least as aligned as its
6398   type, unless it is a COMPONENT_REF in which case the layout of
6399   the structure gives the alignment.  */
6400
6401static unsigned HOST_WIDE_INT
6402highest_pow2_factor_for_target (tree target, tree exp)
6403{
6404  unsigned HOST_WIDE_INT target_align, factor;
6405
6406  factor = highest_pow2_factor (exp);
6407  if (TREE_CODE (target) == COMPONENT_REF)
6408    target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6409  else
6410    target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6411  return MAX (factor, target_align);
6412}
6413
6414/* Expands variable VAR.  */
6415
6416void
6417expand_var (tree var)
6418{
6419  if (DECL_EXTERNAL (var))
6420    return;
6421
6422  if (TREE_STATIC (var))
6423    /* If this is an inlined copy of a static local variable,
6424       look up the original decl.  */
6425    var = DECL_ORIGIN (var);
6426
6427  if (TREE_STATIC (var)
6428      ? !TREE_ASM_WRITTEN (var)
6429      : !DECL_RTL_SET_P (var))
6430    {
6431      if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6432	/* Should be ignored.  */;
6433      else if (lang_hooks.expand_decl (var))
6434	/* OK.  */;
6435      else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6436	expand_decl (var);
6437      else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6438	rest_of_decl_compilation (var, 0, 0);
6439      else
6440	/* No expansion needed.  */
6441	gcc_assert (TREE_CODE (var) == TYPE_DECL
6442		    || TREE_CODE (var) == CONST_DECL
6443		    || TREE_CODE (var) == FUNCTION_DECL
6444		    || TREE_CODE (var) == LABEL_DECL);
6445    }
6446}
6447
6448/* Subroutine of expand_expr.  Expand the two operands of a binary
6449   expression EXP0 and EXP1 placing the results in OP0 and OP1.
6450   The value may be stored in TARGET if TARGET is nonzero.  The
6451   MODIFIER argument is as documented by expand_expr.  */
6452
6453static void
6454expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6455		 enum expand_modifier modifier)
6456{
6457  if (! safe_from_p (target, exp1, 1))
6458    target = 0;
6459  if (operand_equal_p (exp0, exp1, 0))
6460    {
6461      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6462      *op1 = copy_rtx (*op0);
6463    }
6464  else
6465    {
6466      /* If we need to preserve evaluation order, copy exp0 into its own
6467	 temporary variable so that it can't be clobbered by exp1.  */
6468      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6469	exp0 = save_expr (exp0);
6470      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6471      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6472    }
6473}
6474
6475
6476/* Return a MEM that contains constant EXP.  DEFER is as for
6477   output_constant_def and MODIFIER is as for expand_expr.  */
6478
6479static rtx
6480expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6481{
6482  rtx mem;
6483
6484  mem = output_constant_def (exp, defer);
6485  if (modifier != EXPAND_INITIALIZER)
6486    mem = use_anchored_address (mem);
6487  return mem;
6488}
6489
6490/* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
6491   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6492
6493static rtx
6494expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6495		         enum expand_modifier modifier)
6496{
6497  rtx result, subtarget;
6498  tree inner, offset;
6499  HOST_WIDE_INT bitsize, bitpos;
6500  int volatilep, unsignedp;
6501  enum machine_mode mode1;
6502
6503  /* If we are taking the address of a constant and are at the top level,
6504     we have to use output_constant_def since we can't call force_const_mem
6505     at top level.  */
6506  /* ??? This should be considered a front-end bug.  We should not be
6507     generating ADDR_EXPR of something that isn't an LVALUE.  The only
6508     exception here is STRING_CST.  */
6509  if (TREE_CODE (exp) == CONSTRUCTOR
6510      || CONSTANT_CLASS_P (exp))
6511    return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6512
6513  /* Everything must be something allowed by is_gimple_addressable.  */
6514  switch (TREE_CODE (exp))
6515    {
6516    case INDIRECT_REF:
6517      /* This case will happen via recursion for &a->b.  */
6518      return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6519
6520    case CONST_DECL:
6521      /* Recurse and make the output_constant_def clause above handle this.  */
6522      return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6523				      tmode, modifier);
6524
6525    case REALPART_EXPR:
6526      /* The real part of the complex number is always first, therefore
6527	 the address is the same as the address of the parent object.  */
6528      offset = 0;
6529      bitpos = 0;
6530      inner = TREE_OPERAND (exp, 0);
6531      break;
6532
6533    case IMAGPART_EXPR:
6534      /* The imaginary part of the complex number is always second.
6535	 The expression is therefore always offset by the size of the
6536	 scalar type.  */
6537      offset = 0;
6538      bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6539      inner = TREE_OPERAND (exp, 0);
6540      break;
6541
6542    default:
6543      /* If the object is a DECL, then expand it for its rtl.  Don't bypass
6544	 expand_expr, as that can have various side effects; LABEL_DECLs for
6545	 example, may not have their DECL_RTL set yet.  Assume language
6546	 specific tree nodes can be expanded in some interesting way.  */
6547      if (DECL_P (exp)
6548	  || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6549	{
6550	  result = expand_expr (exp, target, tmode,
6551				modifier == EXPAND_INITIALIZER
6552				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6553
6554	  /* If the DECL isn't in memory, then the DECL wasn't properly
6555	     marked TREE_ADDRESSABLE, which will be either a front-end
6556	     or a tree optimizer bug.  */
6557	  gcc_assert (MEM_P (result));
6558	  result = XEXP (result, 0);
6559
6560	  /* ??? Is this needed anymore?  */
6561	  if (DECL_P (exp) && !TREE_USED (exp) == 0)
6562	    {
6563	      assemble_external (exp);
6564	      TREE_USED (exp) = 1;
6565	    }
6566
6567	  if (modifier != EXPAND_INITIALIZER
6568	      && modifier != EXPAND_CONST_ADDRESS)
6569	    result = force_operand (result, target);
6570	  return result;
6571	}
6572
6573      /* Pass FALSE as the last argument to get_inner_reference although
6574	 we are expanding to RTL.  The rationale is that we know how to
6575	 handle "aligning nodes" here: we can just bypass them because
6576	 they won't change the final object whose address will be returned
6577	 (they actually exist only for that purpose).  */
6578      inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6579				   &mode1, &unsignedp, &volatilep, false);
6580      break;
6581    }
6582
6583  /* We must have made progress.  */
6584  gcc_assert (inner != exp);
6585
6586  subtarget = offset || bitpos ? NULL_RTX : target;
6587  result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6588
6589  if (offset)
6590    {
6591      rtx tmp;
6592
6593      if (modifier != EXPAND_NORMAL)
6594	result = force_operand (result, NULL);
6595      tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6596
6597      result = convert_memory_address (tmode, result);
6598      tmp = convert_memory_address (tmode, tmp);
6599
6600      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6601	result = gen_rtx_PLUS (tmode, result, tmp);
6602      else
6603	{
6604	  subtarget = bitpos ? NULL_RTX : target;
6605	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6606					1, OPTAB_LIB_WIDEN);
6607	}
6608    }
6609
6610  if (bitpos)
6611    {
6612      /* Someone beforehand should have rejected taking the address
6613	 of such an object.  */
6614      gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6615
6616      result = plus_constant (result, bitpos / BITS_PER_UNIT);
6617      if (modifier < EXPAND_SUM)
6618	result = force_operand (result, target);
6619    }
6620
6621  return result;
6622}
6623
6624/* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
6625   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6626
6627static rtx
6628expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6629		       enum expand_modifier modifier)
6630{
6631  enum machine_mode rmode;
6632  rtx result;
6633
6634  /* Target mode of VOIDmode says "whatever's natural".  */
6635  if (tmode == VOIDmode)
6636    tmode = TYPE_MODE (TREE_TYPE (exp));
6637
6638  /* We can get called with some Weird Things if the user does silliness
6639     like "(short) &a".  In that case, convert_memory_address won't do
6640     the right thing, so ignore the given target mode.  */
6641  if (tmode != Pmode && tmode != ptr_mode)
6642    tmode = Pmode;
6643
6644  result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6645				    tmode, modifier);
6646
6647  /* Despite expand_expr claims concerning ignoring TMODE when not
6648     strictly convenient, stuff breaks if we don't honor it.  Note
6649     that combined with the above, we only do this for pointer modes.  */
6650  rmode = GET_MODE (result);
6651  if (rmode == VOIDmode)
6652    rmode = tmode;
6653  if (rmode != tmode)
6654    result = convert_memory_address (tmode, result);
6655
6656  return result;
6657}
6658
6659
6660/* expand_expr: generate code for computing expression EXP.
6661   An rtx for the computed value is returned.  The value is never null.
6662   In the case of a void EXP, const0_rtx is returned.
6663
6664   The value may be stored in TARGET if TARGET is nonzero.
6665   TARGET is just a suggestion; callers must assume that
6666   the rtx returned may not be the same as TARGET.
6667
6668   If TARGET is CONST0_RTX, it means that the value will be ignored.
6669
6670   If TMODE is not VOIDmode, it suggests generating the
6671   result in mode TMODE.  But this is done only when convenient.
6672   Otherwise, TMODE is ignored and the value generated in its natural mode.
6673   TMODE is just a suggestion; callers must assume that
6674   the rtx returned may not have mode TMODE.
6675
6676   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6677   probably will not be used.
6678
6679   If MODIFIER is EXPAND_SUM then when EXP is an addition
6680   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6681   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6682   products as above, or REG or MEM, or constant.
6683   Ordinarily in such cases we would output mul or add instructions
6684   and then return a pseudo reg containing the sum.
6685
6686   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6687   it also marks a label as absolutely required (it can't be dead).
6688   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6689   This is used for outputting expressions used in initializers.
6690
6691   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6692   with a constant address even if that address is not normally legitimate.
6693   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6694
6695   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6696   a call parameter.  Such targets require special care as we haven't yet
6697   marked TARGET so that it's safe from being trashed by libcalls.  We
6698   don't want to use TARGET for anything but the final result;
6699   Intermediate values must go elsewhere.   Additionally, calls to
6700   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6701
6702   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6703   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6704   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
6705   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6706   recursively.  */
6707
6708static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6709			       enum expand_modifier, rtx *);
6710
6711rtx
6712expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6713		  enum expand_modifier modifier, rtx *alt_rtl)
6714{
6715  int rn = -1;
6716  rtx ret, last = NULL;
6717
6718  /* Handle ERROR_MARK before anybody tries to access its type.  */
6719  if (TREE_CODE (exp) == ERROR_MARK
6720      || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6721    {
6722      ret = CONST0_RTX (tmode);
6723      return ret ? ret : const0_rtx;
6724    }
6725
6726  if (flag_non_call_exceptions)
6727    {
6728      rn = lookup_stmt_eh_region (exp);
6729      /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw.  */
6730      if (rn >= 0)
6731	last = get_last_insn ();
6732    }
6733
6734  /* If this is an expression of some kind and it has an associated line
6735     number, then emit the line number before expanding the expression.
6736
6737     We need to save and restore the file and line information so that
6738     errors discovered during expansion are emitted with the right
6739     information.  It would be better of the diagnostic routines
6740     used the file/line information embedded in the tree nodes rather
6741     than globals.  */
6742  if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6743    {
6744      location_t saved_location = input_location;
6745      input_location = EXPR_LOCATION (exp);
6746      emit_line_note (input_location);
6747
6748      /* Record where the insns produced belong.  */
6749      record_block_change (TREE_BLOCK (exp));
6750
6751      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6752
6753      input_location = saved_location;
6754    }
6755  else
6756    {
6757      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6758    }
6759
6760  /* If using non-call exceptions, mark all insns that may trap.
6761     expand_call() will mark CALL_INSNs before we get to this code,
6762     but it doesn't handle libcalls, and these may trap.  */
6763  if (rn >= 0)
6764    {
6765      rtx insn;
6766      for (insn = next_real_insn (last); insn;
6767	   insn = next_real_insn (insn))
6768	{
6769	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6770	      /* If we want exceptions for non-call insns, any
6771		 may_trap_p instruction may throw.  */
6772	      && GET_CODE (PATTERN (insn)) != CLOBBER
6773	      && GET_CODE (PATTERN (insn)) != USE
6774	      && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6775	    {
6776	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6777						  REG_NOTES (insn));
6778	    }
6779	}
6780    }
6781
6782  return ret;
6783}
6784
6785static rtx
6786expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6787		    enum expand_modifier modifier, rtx *alt_rtl)
6788{
6789  rtx op0, op1, temp, decl_rtl;
6790  tree type = TREE_TYPE (exp);
6791  int unsignedp;
6792  enum machine_mode mode;
6793  enum tree_code code = TREE_CODE (exp);
6794  optab this_optab;
6795  rtx subtarget, original_target;
6796  int ignore;
6797  tree context, subexp0, subexp1;
6798  bool reduce_bit_field = false;
6799#define REDUCE_BIT_FIELD(expr)	(reduce_bit_field && !ignore		  \
6800				 ? reduce_to_bit_field_precision ((expr), \
6801								  target, \
6802								  type)	  \
6803				 : (expr))
6804
6805  mode = TYPE_MODE (type);
6806  unsignedp = TYPE_UNSIGNED (type);
6807  if (lang_hooks.reduce_bit_field_operations
6808      && TREE_CODE (type) == INTEGER_TYPE
6809      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6810    {
6811      /* An operation in what may be a bit-field type needs the
6812	 result to be reduced to the precision of the bit-field type,
6813	 which is narrower than that of the type's mode.  */
6814      reduce_bit_field = true;
6815      if (modifier == EXPAND_STACK_PARM)
6816	target = 0;
6817    }
6818
6819  /* Use subtarget as the target for operand 0 of a binary operation.  */
6820  subtarget = get_subtarget (target);
6821  original_target = target;
6822  ignore = (target == const0_rtx
6823	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6824		 || code == CONVERT_EXPR || code == COND_EXPR
6825		 || code == VIEW_CONVERT_EXPR)
6826		&& TREE_CODE (type) == VOID_TYPE));
6827
6828  /* If we are going to ignore this result, we need only do something
6829     if there is a side-effect somewhere in the expression.  If there
6830     is, short-circuit the most common cases here.  Note that we must
6831     not call expand_expr with anything but const0_rtx in case this
6832     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6833
6834  if (ignore)
6835    {
6836      if (! TREE_SIDE_EFFECTS (exp))
6837	return const0_rtx;
6838
6839      /* Ensure we reference a volatile object even if value is ignored, but
6840	 don't do this if all we are doing is taking its address.  */
6841      if (TREE_THIS_VOLATILE (exp)
6842	  && TREE_CODE (exp) != FUNCTION_DECL
6843	  && mode != VOIDmode && mode != BLKmode
6844	  && modifier != EXPAND_CONST_ADDRESS)
6845	{
6846	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6847	  if (MEM_P (temp))
6848	    temp = copy_to_reg (temp);
6849	  return const0_rtx;
6850	}
6851
6852      if (TREE_CODE_CLASS (code) == tcc_unary
6853	  || code == COMPONENT_REF || code == INDIRECT_REF)
6854	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6855			    modifier);
6856
6857      else if (TREE_CODE_CLASS (code) == tcc_binary
6858	       || TREE_CODE_CLASS (code) == tcc_comparison
6859	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6860	{
6861	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6862	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6863	  return const0_rtx;
6864	}
6865      else if (code == BIT_FIELD_REF)
6866	{
6867	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6868	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6869	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6870	  return const0_rtx;
6871	}
6872
6873      target = 0;
6874    }
6875
6876
6877  switch (code)
6878    {
6879    case LABEL_DECL:
6880      {
6881	tree function = decl_function_context (exp);
6882
6883	temp = label_rtx (exp);
6884	temp = gen_rtx_LABEL_REF (Pmode, temp);
6885
6886	if (function != current_function_decl
6887	    && function != 0)
6888	  LABEL_REF_NONLOCAL_P (temp) = 1;
6889
6890	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6891	return temp;
6892      }
6893
6894    case SSA_NAME:
6895      return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6896				 NULL);
6897
6898    case PARM_DECL:
6899    case VAR_DECL:
6900      /* If a static var's type was incomplete when the decl was written,
6901	 but the type is complete now, lay out the decl now.  */
6902      if (DECL_SIZE (exp) == 0
6903	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6904	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6905	layout_decl (exp, 0);
6906
6907      /* ... fall through ...  */
6908
6909    case FUNCTION_DECL:
6910    case RESULT_DECL:
6911      decl_rtl = DECL_RTL (exp);
6912      gcc_assert (decl_rtl);
6913
6914      /* Ensure variable marked as used even if it doesn't go through
6915	 a parser.  If it hasn't be used yet, write out an external
6916	 definition.  */
6917      if (! TREE_USED (exp))
6918	{
6919	  assemble_external (exp);
6920	  TREE_USED (exp) = 1;
6921	}
6922
6923      /* Show we haven't gotten RTL for this yet.  */
6924      temp = 0;
6925
6926      /* Variables inherited from containing functions should have
6927	 been lowered by this point.  */
6928      context = decl_function_context (exp);
6929      gcc_assert (!context
6930		  || context == current_function_decl
6931		  || TREE_STATIC (exp)
6932		  /* ??? C++ creates functions that are not TREE_STATIC.  */
6933		  || TREE_CODE (exp) == FUNCTION_DECL);
6934
6935      /* This is the case of an array whose size is to be determined
6936	 from its initializer, while the initializer is still being parsed.
6937	 See expand_decl.  */
6938
6939      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6940	temp = validize_mem (decl_rtl);
6941
6942      /* If DECL_RTL is memory, we are in the normal case and either
6943	 the address is not valid or it is not a register and -fforce-addr
6944	 is specified, get the address into a register.  */
6945
6946      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6947	{
6948	  if (alt_rtl)
6949	    *alt_rtl = decl_rtl;
6950	  decl_rtl = use_anchored_address (decl_rtl);
6951	  if (modifier != EXPAND_CONST_ADDRESS
6952	      && modifier != EXPAND_SUM
6953	      && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6954		  || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6955	    temp = replace_equiv_address (decl_rtl,
6956					  copy_rtx (XEXP (decl_rtl, 0)));
6957	}
6958
6959      /* If we got something, return it.  But first, set the alignment
6960	 if the address is a register.  */
6961      if (temp != 0)
6962	{
6963	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6964	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6965
6966	  return temp;
6967	}
6968
6969      /* If the mode of DECL_RTL does not match that of the decl, it
6970	 must be a promoted value.  We return a SUBREG of the wanted mode,
6971	 but mark it so that we know that it was already extended.  */
6972
6973      if (REG_P (decl_rtl)
6974	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
6975	{
6976	  enum machine_mode pmode;
6977
6978	  /* Get the signedness used for this variable.  Ensure we get the
6979	     same mode we got when the variable was declared.  */
6980	  pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6981				(TREE_CODE (exp) == RESULT_DECL
6982				 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6983	  gcc_assert (GET_MODE (decl_rtl) == pmode);
6984
6985	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
6986	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6987	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6988	  return temp;
6989	}
6990
6991      return decl_rtl;
6992
6993    case INTEGER_CST:
6994      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6995				 TREE_INT_CST_HIGH (exp), mode);
6996
6997      /* ??? If overflow is set, fold will have done an incomplete job,
6998	 which can result in (plus xx (const_int 0)), which can get
6999	 simplified by validate_replace_rtx during virtual register
7000	 instantiation, which can result in unrecognizable insns.
7001	 Avoid this by forcing all overflows into registers.  */
7002      if (TREE_CONSTANT_OVERFLOW (exp)
7003	  && modifier != EXPAND_INITIALIZER)
7004	temp = force_reg (mode, temp);
7005
7006      return temp;
7007
7008    case VECTOR_CST:
7009      {
7010	tree tmp = NULL_TREE;
7011	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7012	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7013	  return const_vector_from_tree (exp);
7014	if (GET_MODE_CLASS (mode) == MODE_INT)
7015	  {
7016	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7017	    if (type_for_mode)
7018	      tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7019	  }
7020	if (!tmp)
7021	  tmp = build_constructor_from_list (type,
7022					     TREE_VECTOR_CST_ELTS (exp));
7023	return expand_expr (tmp, ignore ? const0_rtx : target,
7024			    tmode, modifier);
7025      }
7026
7027    case CONST_DECL:
7028      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7029
7030    case REAL_CST:
7031      /* If optimized, generate immediate CONST_DOUBLE
7032	 which will be turned into memory by reload if necessary.
7033
7034	 We used to force a register so that loop.c could see it.  But
7035	 this does not allow gen_* patterns to perform optimizations with
7036	 the constants.  It also produces two insns in cases like "x = 1.0;".
7037	 On most machines, floating-point constants are not permitted in
7038	 many insns, so we'd end up copying it to a register in any case.
7039
7040	 Now, we do the copying in expand_binop, if appropriate.  */
7041      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7042					   TYPE_MODE (TREE_TYPE (exp)));
7043
7044    case COMPLEX_CST:
7045      /* Handle evaluating a complex constant in a CONCAT target.  */
7046      if (original_target && GET_CODE (original_target) == CONCAT)
7047	{
7048	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7049	  rtx rtarg, itarg;
7050
7051	  rtarg = XEXP (original_target, 0);
7052	  itarg = XEXP (original_target, 1);
7053
7054	  /* Move the real and imaginary parts separately.  */
7055	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7056	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7057
7058	  if (op0 != rtarg)
7059	    emit_move_insn (rtarg, op0);
7060	  if (op1 != itarg)
7061	    emit_move_insn (itarg, op1);
7062
7063	  return original_target;
7064	}
7065
7066      /* ... fall through ...  */
7067
7068    case STRING_CST:
7069      temp = expand_expr_constant (exp, 1, modifier);
7070
7071      /* temp contains a constant address.
7072	 On RISC machines where a constant address isn't valid,
7073	 make some insns to get that address into a register.  */
7074      if (modifier != EXPAND_CONST_ADDRESS
7075	  && modifier != EXPAND_INITIALIZER
7076	  && modifier != EXPAND_SUM
7077	  && (! memory_address_p (mode, XEXP (temp, 0))
7078	      || flag_force_addr))
7079	return replace_equiv_address (temp,
7080				      copy_rtx (XEXP (temp, 0)));
7081      return temp;
7082
7083    case SAVE_EXPR:
7084      {
7085	tree val = TREE_OPERAND (exp, 0);
7086	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7087
7088	if (!SAVE_EXPR_RESOLVED_P (exp))
7089	  {
7090	    /* We can indeed still hit this case, typically via builtin
7091	       expanders calling save_expr immediately before expanding
7092	       something.  Assume this means that we only have to deal
7093	       with non-BLKmode values.  */
7094	    gcc_assert (GET_MODE (ret) != BLKmode);
7095
7096	    val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7097	    DECL_ARTIFICIAL (val) = 1;
7098	    DECL_IGNORED_P (val) = 1;
7099	    TREE_OPERAND (exp, 0) = val;
7100	    SAVE_EXPR_RESOLVED_P (exp) = 1;
7101
7102	    if (!CONSTANT_P (ret))
7103	      ret = copy_to_reg (ret);
7104	    SET_DECL_RTL (val, ret);
7105	  }
7106
7107        return ret;
7108      }
7109
7110    case GOTO_EXPR:
7111      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7112	expand_goto (TREE_OPERAND (exp, 0));
7113      else
7114	expand_computed_goto (TREE_OPERAND (exp, 0));
7115      return const0_rtx;
7116
7117    case CONSTRUCTOR:
7118      /* If we don't need the result, just ensure we evaluate any
7119	 subexpressions.  */
7120      if (ignore)
7121	{
7122	  unsigned HOST_WIDE_INT idx;
7123	  tree value;
7124
7125	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7126	    expand_expr (value, const0_rtx, VOIDmode, 0);
7127
7128	  return const0_rtx;
7129	}
7130
7131      /* Try to avoid creating a temporary at all.  This is possible
7132	 if all of the initializer is zero.
7133	 FIXME: try to handle all [0..255] initializers we can handle
7134	 with memset.  */
7135      else if (TREE_STATIC (exp)
7136	       && !TREE_ADDRESSABLE (exp)
7137	       && target != 0 && mode == BLKmode
7138	       && all_zeros_p (exp))
7139	{
7140	  clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7141	  return target;
7142	}
7143
7144      /* All elts simple constants => refer to a constant in memory.  But
7145	 if this is a non-BLKmode mode, let it store a field at a time
7146	 since that should make a CONST_INT or CONST_DOUBLE when we
7147	 fold.  Likewise, if we have a target we can use, it is best to
7148	 store directly into the target unless the type is large enough
7149	 that memcpy will be used.  If we are making an initializer and
7150	 all operands are constant, put it in memory as well.
7151
7152	FIXME: Avoid trying to fill vector constructors piece-meal.
7153	Output them with output_constant_def below unless we're sure
7154	they're zeros.  This should go away when vector initializers
7155	are treated like VECTOR_CST instead of arrays.
7156      */
7157      else if ((TREE_STATIC (exp)
7158		&& ((mode == BLKmode
7159		     && ! (target != 0 && safe_from_p (target, exp, 1)))
7160		    || TREE_ADDRESSABLE (exp)
7161		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7162			&& (! MOVE_BY_PIECES_P
7163			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7164			     TYPE_ALIGN (type)))
7165			&& ! mostly_zeros_p (exp))))
7166	       || ((modifier == EXPAND_INITIALIZER
7167		    || modifier == EXPAND_CONST_ADDRESS)
7168		   && TREE_CONSTANT (exp)))
7169	{
7170	  rtx constructor = expand_expr_constant (exp, 1, modifier);
7171
7172	  if (modifier != EXPAND_CONST_ADDRESS
7173	      && modifier != EXPAND_INITIALIZER
7174	      && modifier != EXPAND_SUM)
7175	    constructor = validize_mem (constructor);
7176
7177	  return constructor;
7178	}
7179      else
7180	{
7181	  /* Handle calls that pass values in multiple non-contiguous
7182	     locations.  The Irix 6 ABI has examples of this.  */
7183	  if (target == 0 || ! safe_from_p (target, exp, 1)
7184	      || GET_CODE (target) == PARALLEL
7185	      || modifier == EXPAND_STACK_PARM)
7186	    target
7187	      = assign_temp (build_qualified_type (type,
7188						   (TYPE_QUALS (type)
7189						    | (TREE_READONLY (exp)
7190						       * TYPE_QUAL_CONST))),
7191			     0, TREE_ADDRESSABLE (exp), 1);
7192
7193	  store_constructor (exp, target, 0, int_expr_size (exp));
7194	  return target;
7195	}
7196
7197    case MISALIGNED_INDIRECT_REF:
7198    case ALIGN_INDIRECT_REF:
7199    case INDIRECT_REF:
7200      {
7201	tree exp1 = TREE_OPERAND (exp, 0);
7202
7203	if (modifier != EXPAND_WRITE)
7204	  {
7205	    tree t;
7206
7207	    t = fold_read_from_constant_string (exp);
7208	    if (t)
7209	      return expand_expr (t, target, tmode, modifier);
7210	  }
7211
7212	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7213	op0 = memory_address (mode, op0);
7214
7215	if (code == ALIGN_INDIRECT_REF)
7216	  {
7217	    int align = TYPE_ALIGN_UNIT (type);
7218	    op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7219	    op0 = memory_address (mode, op0);
7220	  }
7221
7222	temp = gen_rtx_MEM (mode, op0);
7223
7224	set_mem_attributes (temp, exp, 0);
7225
7226	/* Resolve the misalignment now, so that we don't have to remember
7227	   to resolve it later.  Of course, this only works for reads.  */
7228	/* ??? When we get around to supporting writes, we'll have to handle
7229	   this in store_expr directly.  The vectorizer isn't generating
7230	   those yet, however.  */
7231	if (code == MISALIGNED_INDIRECT_REF)
7232	  {
7233	    int icode;
7234	    rtx reg, insn;
7235
7236	    gcc_assert (modifier == EXPAND_NORMAL
7237			|| modifier == EXPAND_STACK_PARM);
7238
7239	    /* The vectorizer should have already checked the mode.  */
7240	    icode = movmisalign_optab->handlers[mode].insn_code;
7241	    gcc_assert (icode != CODE_FOR_nothing);
7242
7243	    /* We've already validated the memory, and we're creating a
7244	       new pseudo destination.  The predicates really can't fail.  */
7245	    reg = gen_reg_rtx (mode);
7246
7247	    /* Nor can the insn generator.  */
7248	    insn = GEN_FCN (icode) (reg, temp);
7249	    emit_insn (insn);
7250
7251	    return reg;
7252	  }
7253
7254	return temp;
7255      }
7256
7257    case TARGET_MEM_REF:
7258      {
7259	struct mem_address addr;
7260
7261	get_address_description (exp, &addr);
7262	op0 = addr_for_mem_ref (&addr, true);
7263	op0 = memory_address (mode, op0);
7264	temp = gen_rtx_MEM (mode, op0);
7265	set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7266      }
7267      return temp;
7268
7269    case ARRAY_REF:
7270
7271      {
7272	tree array = TREE_OPERAND (exp, 0);
7273	tree index = TREE_OPERAND (exp, 1);
7274
7275	/* Fold an expression like: "foo"[2].
7276	   This is not done in fold so it won't happen inside &.
7277	   Don't fold if this is for wide characters since it's too
7278	   difficult to do correctly and this is a very rare case.  */
7279
7280	if (modifier != EXPAND_CONST_ADDRESS
7281	    && modifier != EXPAND_INITIALIZER
7282	    && modifier != EXPAND_MEMORY)
7283	  {
7284	    tree t = fold_read_from_constant_string (exp);
7285
7286	    if (t)
7287	      return expand_expr (t, target, tmode, modifier);
7288	  }
7289
7290	/* If this is a constant index into a constant array,
7291	   just get the value from the array.  Handle both the cases when
7292	   we have an explicit constructor and when our operand is a variable
7293	   that was declared const.  */
7294
7295	if (modifier != EXPAND_CONST_ADDRESS
7296	    && modifier != EXPAND_INITIALIZER
7297	    && modifier != EXPAND_MEMORY
7298	    && TREE_CODE (array) == CONSTRUCTOR
7299	    && ! TREE_SIDE_EFFECTS (array)
7300	    && TREE_CODE (index) == INTEGER_CST)
7301	  {
7302	    unsigned HOST_WIDE_INT ix;
7303	    tree field, value;
7304
7305	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7306				      field, value)
7307	      if (tree_int_cst_equal (field, index))
7308		{
7309		  if (!TREE_SIDE_EFFECTS (value))
7310		    return expand_expr (fold (value), target, tmode, modifier);
7311		  break;
7312		}
7313	  }
7314
7315	else if (optimize >= 1
7316		 && modifier != EXPAND_CONST_ADDRESS
7317		 && modifier != EXPAND_INITIALIZER
7318		 && modifier != EXPAND_MEMORY
7319		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7320		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7321		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7322		 && targetm.binds_local_p (array))
7323	  {
7324	    if (TREE_CODE (index) == INTEGER_CST)
7325	      {
7326		tree init = DECL_INITIAL (array);
7327
7328		if (TREE_CODE (init) == CONSTRUCTOR)
7329		  {
7330		    unsigned HOST_WIDE_INT ix;
7331		    tree field, value;
7332
7333		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7334					      field, value)
7335		      if (tree_int_cst_equal (field, index))
7336			{
7337			  if (!TREE_SIDE_EFFECTS (value))
7338			    return expand_expr (fold (value), target, tmode,
7339						modifier);
7340			  break;
7341			}
7342		  }
7343		else if(TREE_CODE (init) == STRING_CST)
7344		  {
7345		    tree index1 = index;
7346		    tree low_bound = array_ref_low_bound (exp);
7347		    index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7348
7349		    /* Optimize the special-case of a zero lower bound.
7350
7351		       We convert the low_bound to sizetype to avoid some problems
7352		       with constant folding.  (E.g. suppose the lower bound is 1,
7353		       and its mode is QI.  Without the conversion,l (ARRAY
7354		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7355		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
7356
7357		    if (! integer_zerop (low_bound))
7358		      index1 = size_diffop (index1, fold_convert (sizetype,
7359								  low_bound));
7360
7361		    if (0 > compare_tree_int (index1,
7362					      TREE_STRING_LENGTH (init)))
7363		      {
7364			tree type = TREE_TYPE (TREE_TYPE (init));
7365			enum machine_mode mode = TYPE_MODE (type);
7366
7367			if (GET_MODE_CLASS (mode) == MODE_INT
7368			    && GET_MODE_SIZE (mode) == 1)
7369			  return gen_int_mode (TREE_STRING_POINTER (init)
7370					       [TREE_INT_CST_LOW (index1)],
7371					       mode);
7372		      }
7373		  }
7374	      }
7375	  }
7376      }
7377      goto normal_inner_ref;
7378
7379    case COMPONENT_REF:
7380      /* If the operand is a CONSTRUCTOR, we can just extract the
7381	 appropriate field if it is present.  */
7382      if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7383	{
7384	  unsigned HOST_WIDE_INT idx;
7385	  tree field, value;
7386
7387	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7388				    idx, field, value)
7389	    if (field == TREE_OPERAND (exp, 1)
7390		/* We can normally use the value of the field in the
7391		   CONSTRUCTOR.  However, if this is a bitfield in
7392		   an integral mode that we can fit in a HOST_WIDE_INT,
7393		   we must mask only the number of bits in the bitfield,
7394		   since this is done implicitly by the constructor.  If
7395		   the bitfield does not meet either of those conditions,
7396		   we can't do this optimization.  */
7397		&& (! DECL_BIT_FIELD (field)
7398		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7399			&& (GET_MODE_BITSIZE (DECL_MODE (field))
7400			    <= HOST_BITS_PER_WIDE_INT))))
7401	      {
7402		if (DECL_BIT_FIELD (field)
7403		    && modifier == EXPAND_STACK_PARM)
7404		  target = 0;
7405		op0 = expand_expr (value, target, tmode, modifier);
7406		if (DECL_BIT_FIELD (field))
7407		  {
7408		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7409		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7410
7411		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
7412		      {
7413			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7414			op0 = expand_and (imode, op0, op1, target);
7415		      }
7416		    else
7417		      {
7418			tree count
7419			  = build_int_cst (NULL_TREE,
7420					   GET_MODE_BITSIZE (imode) - bitsize);
7421
7422			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7423					    target, 0);
7424			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7425					    target, 0);
7426		      }
7427		  }
7428
7429		return op0;
7430	      }
7431	}
7432      goto normal_inner_ref;
7433
7434    case BIT_FIELD_REF:
7435    case ARRAY_RANGE_REF:
7436    normal_inner_ref:
7437      {
7438	enum machine_mode mode1;
7439	HOST_WIDE_INT bitsize, bitpos;
7440	tree offset;
7441	int volatilep = 0;
7442	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7443					&mode1, &unsignedp, &volatilep, true);
7444	rtx orig_op0;
7445
7446	/* If we got back the original object, something is wrong.  Perhaps
7447	   we are evaluating an expression too early.  In any event, don't
7448	   infinitely recurse.  */
7449	gcc_assert (tem != exp);
7450
7451	/* If TEM's type is a union of variable size, pass TARGET to the inner
7452	   computation, since it will need a temporary and TARGET is known
7453	   to have to do.  This occurs in unchecked conversion in Ada.  */
7454
7455	orig_op0 = op0
7456	  = expand_expr (tem,
7457			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7458			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7459			      != INTEGER_CST)
7460			  && modifier != EXPAND_STACK_PARM
7461			  ? target : NULL_RTX),
7462			 VOIDmode,
7463			 (modifier == EXPAND_INITIALIZER
7464			  || modifier == EXPAND_CONST_ADDRESS
7465			  || modifier == EXPAND_STACK_PARM)
7466			 ? modifier : EXPAND_NORMAL);
7467
7468	/* If this is a constant, put it into a register if it is a legitimate
7469	   constant, OFFSET is 0, and we won't try to extract outside the
7470	   register (in case we were passed a partially uninitialized object
7471	   or a view_conversion to a larger size).  Force the constant to
7472	   memory otherwise.  */
7473	if (CONSTANT_P (op0))
7474	  {
7475	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7476	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7477		&& offset == 0
7478		&& bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7479	      op0 = force_reg (mode, op0);
7480	    else
7481	      op0 = validize_mem (force_const_mem (mode, op0));
7482	  }
7483
7484	/* Otherwise, if this object not in memory and we either have an
7485	   offset, a BLKmode result, or a reference outside the object, put it
7486	   there.  Such cases can occur in Ada if we have unchecked conversion
7487	   of an expression from a scalar type to an array or record type or
7488	   for an ARRAY_RANGE_REF whose type is BLKmode.  */
7489	else if (!MEM_P (op0)
7490		 && (offset != 0
7491		     || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7492		     || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7493	  {
7494	    tree nt = build_qualified_type (TREE_TYPE (tem),
7495					    (TYPE_QUALS (TREE_TYPE (tem))
7496					     | TYPE_QUAL_CONST));
7497	    rtx memloc = assign_temp (nt, 1, 1, 1);
7498
7499	    emit_move_insn (memloc, op0);
7500	    op0 = memloc;
7501	  }
7502
7503	if (offset != 0)
7504	  {
7505	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7506					  EXPAND_SUM);
7507
7508	    gcc_assert (MEM_P (op0));
7509
7510#ifdef POINTERS_EXTEND_UNSIGNED
7511	    if (GET_MODE (offset_rtx) != Pmode)
7512	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7513#else
7514	    if (GET_MODE (offset_rtx) != ptr_mode)
7515	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7516#endif
7517
7518	    if (GET_MODE (op0) == BLKmode
7519		/* A constant address in OP0 can have VOIDmode, we must
7520		   not try to call force_reg in that case.  */
7521		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
7522		&& bitsize != 0
7523		&& (bitpos % bitsize) == 0
7524		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7525		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7526	      {
7527		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7528		bitpos = 0;
7529	      }
7530
7531	    op0 = offset_address (op0, offset_rtx,
7532				  highest_pow2_factor (offset));
7533	  }
7534
7535	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7536	   record its alignment as BIGGEST_ALIGNMENT.  */
7537	if (MEM_P (op0) && bitpos == 0 && offset != 0
7538	    && is_aligning_offset (offset, tem))
7539	  set_mem_align (op0, BIGGEST_ALIGNMENT);
7540
7541	/* Don't forget about volatility even if this is a bitfield.  */
7542	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7543	  {
7544	    if (op0 == orig_op0)
7545	      op0 = copy_rtx (op0);
7546
7547	    MEM_VOLATILE_P (op0) = 1;
7548	  }
7549
7550	/* The following code doesn't handle CONCAT.
7551	   Assume only bitpos == 0 can be used for CONCAT, due to
7552	   one element arrays having the same mode as its element.  */
7553	if (GET_CODE (op0) == CONCAT)
7554	  {
7555	    gcc_assert (bitpos == 0
7556			&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7557	    return op0;
7558	  }
7559
7560	/* In cases where an aligned union has an unaligned object
7561	   as a field, we might be extracting a BLKmode value from
7562	   an integer-mode (e.g., SImode) object.  Handle this case
7563	   by doing the extract into an object as wide as the field
7564	   (which we know to be the width of a basic mode), then
7565	   storing into memory, and changing the mode to BLKmode.  */
7566	if (mode1 == VOIDmode
7567	    || REG_P (op0) || GET_CODE (op0) == SUBREG
7568	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7569		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7570		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7571		&& modifier != EXPAND_CONST_ADDRESS
7572		&& modifier != EXPAND_INITIALIZER)
7573	    /* If the field isn't aligned enough to fetch as a memref,
7574	       fetch it as a bit field.  */
7575	    || (mode1 != BLKmode
7576		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7577		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7578		      || (MEM_P (op0)
7579			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7580			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7581		     && ((modifier == EXPAND_CONST_ADDRESS
7582			  || modifier == EXPAND_INITIALIZER)
7583			 ? STRICT_ALIGNMENT
7584			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7585		    || (bitpos % BITS_PER_UNIT != 0)))
7586	    /* If the type and the field are a constant size and the
7587	       size of the type isn't the same size as the bitfield,
7588	       we must use bitfield operations.  */
7589	    || (bitsize >= 0
7590		&& TYPE_SIZE (TREE_TYPE (exp))
7591		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7592		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7593					  bitsize)))
7594	  {
7595	    enum machine_mode ext_mode = mode;
7596
7597	    if (ext_mode == BLKmode
7598		&& ! (target != 0 && MEM_P (op0)
7599		      && MEM_P (target)
7600		      && bitpos % BITS_PER_UNIT == 0))
7601	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7602
7603	    if (ext_mode == BLKmode)
7604	      {
7605		if (target == 0)
7606		  target = assign_temp (type, 0, 1, 1);
7607
7608		if (bitsize == 0)
7609		  return target;
7610
7611		/* In this case, BITPOS must start at a byte boundary and
7612		   TARGET, if specified, must be a MEM.  */
7613		gcc_assert (MEM_P (op0)
7614			    && (!target || MEM_P (target))
7615			    && !(bitpos % BITS_PER_UNIT));
7616
7617		emit_block_move (target,
7618				 adjust_address (op0, VOIDmode,
7619						 bitpos / BITS_PER_UNIT),
7620				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7621					  / BITS_PER_UNIT),
7622				 (modifier == EXPAND_STACK_PARM
7623				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7624
7625		return target;
7626	      }
7627
7628	    op0 = validize_mem (op0);
7629
7630	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7631	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7632
7633	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7634				     (modifier == EXPAND_STACK_PARM
7635				      ? NULL_RTX : target),
7636				     ext_mode, ext_mode);
7637
7638	    /* If the result is a record type and BITSIZE is narrower than
7639	       the mode of OP0, an integral mode, and this is a big endian
7640	       machine, we must put the field into the high-order bits.  */
7641	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7642		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7643		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7644	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7645				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7646					    - bitsize),
7647				  op0, 1);
7648
7649	    /* If the result type is BLKmode, store the data into a temporary
7650	       of the appropriate type, but with the mode corresponding to the
7651	       mode for the data we have (op0's mode).  It's tempting to make
7652	       this a constant type, since we know it's only being stored once,
7653	       but that can cause problems if we are taking the address of this
7654	       COMPONENT_REF because the MEM of any reference via that address
7655	       will have flags corresponding to the type, which will not
7656	       necessarily be constant.  */
7657	    if (mode == BLKmode)
7658	      {
7659		rtx new
7660		  = assign_stack_temp_for_type
7661		    (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7662
7663		emit_move_insn (new, op0);
7664		op0 = copy_rtx (new);
7665		PUT_MODE (op0, BLKmode);
7666		set_mem_attributes (op0, exp, 1);
7667	      }
7668
7669	    return op0;
7670	  }
7671
7672	/* If the result is BLKmode, use that to access the object
7673	   now as well.  */
7674	if (mode == BLKmode)
7675	  mode1 = BLKmode;
7676
7677	/* Get a reference to just this component.  */
7678	if (modifier == EXPAND_CONST_ADDRESS
7679	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7680	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7681	else
7682	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7683
7684	if (op0 == orig_op0)
7685	  op0 = copy_rtx (op0);
7686
7687	set_mem_attributes (op0, exp, 0);
7688	if (REG_P (XEXP (op0, 0)))
7689	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7690
7691	MEM_VOLATILE_P (op0) |= volatilep;
7692	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7693	    || modifier == EXPAND_CONST_ADDRESS
7694	    || modifier == EXPAND_INITIALIZER)
7695	  return op0;
7696	else if (target == 0)
7697	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7698
7699	convert_move (target, op0, unsignedp);
7700	return target;
7701      }
7702
7703    case OBJ_TYPE_REF:
7704      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7705
7706    case CALL_EXPR:
7707      /* Check for a built-in function.  */
7708      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7709	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7710	      == FUNCTION_DECL)
7711	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7712	{
7713	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7714	      == BUILT_IN_FRONTEND)
7715	    return lang_hooks.expand_expr (exp, original_target,
7716					   tmode, modifier,
7717					   alt_rtl);
7718	  else
7719	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7720	}
7721
7722      return expand_call (exp, target, ignore);
7723
7724    case NON_LVALUE_EXPR:
7725    case NOP_EXPR:
7726    case CONVERT_EXPR:
7727      if (TREE_OPERAND (exp, 0) == error_mark_node)
7728	return const0_rtx;
7729
7730      if (TREE_CODE (type) == UNION_TYPE)
7731	{
7732	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7733
7734	  /* If both input and output are BLKmode, this conversion isn't doing
7735	     anything except possibly changing memory attribute.  */
7736	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7737	    {
7738	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7739					modifier);
7740
7741	      result = copy_rtx (result);
7742	      set_mem_attributes (result, exp, 0);
7743	      return result;
7744	    }
7745
7746	  if (target == 0)
7747	    {
7748	      if (TYPE_MODE (type) != BLKmode)
7749		target = gen_reg_rtx (TYPE_MODE (type));
7750	      else
7751		target = assign_temp (type, 0, 1, 1);
7752	    }
7753
7754	  if (MEM_P (target))
7755	    /* Store data into beginning of memory target.  */
7756	    store_expr (TREE_OPERAND (exp, 0),
7757			adjust_address (target, TYPE_MODE (valtype), 0),
7758			modifier == EXPAND_STACK_PARM);
7759
7760	  else
7761	    {
7762	      gcc_assert (REG_P (target));
7763
7764	      /* Store this field into a union of the proper type.  */
7765	      store_field (target,
7766			   MIN ((int_size_in_bytes (TREE_TYPE
7767						    (TREE_OPERAND (exp, 0)))
7768				 * BITS_PER_UNIT),
7769				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7770			   0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7771			   type, 0);
7772	    }
7773
7774	  /* Return the entire union.  */
7775	  return target;
7776	}
7777
7778      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7779	{
7780	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7781			     modifier);
7782
7783	  /* If the signedness of the conversion differs and OP0 is
7784	     a promoted SUBREG, clear that indication since we now
7785	     have to do the proper extension.  */
7786	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7787	      && GET_CODE (op0) == SUBREG)
7788	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7789
7790	  return REDUCE_BIT_FIELD (op0);
7791	}
7792
7793      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7794			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7795      if (GET_MODE (op0) == mode)
7796	;
7797
7798      /* If OP0 is a constant, just convert it into the proper mode.  */
7799      else if (CONSTANT_P (op0))
7800	{
7801	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7802	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7803
7804	  if (modifier == EXPAND_INITIALIZER)
7805	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
7806				       subreg_lowpart_offset (mode,
7807							      inner_mode));
7808	  else
7809	    op0=  convert_modes (mode, inner_mode, op0,
7810				 TYPE_UNSIGNED (inner_type));
7811	}
7812
7813      else if (modifier == EXPAND_INITIALIZER)
7814	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7815
7816      else if (target == 0)
7817	op0 = convert_to_mode (mode, op0,
7818			       TYPE_UNSIGNED (TREE_TYPE
7819					      (TREE_OPERAND (exp, 0))));
7820      else
7821	{
7822	  convert_move (target, op0,
7823			TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7824	  op0 = target;
7825	}
7826
7827      return REDUCE_BIT_FIELD (op0);
7828
7829    case VIEW_CONVERT_EXPR:
7830      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7831
7832      /* If the input and output modes are both the same, we are done.  */
7833      if (TYPE_MODE (type) == GET_MODE (op0))
7834	;
7835      /* If neither mode is BLKmode, and both modes are the same size
7836	 then we can use gen_lowpart.  */
7837      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7838	       && GET_MODE_SIZE (TYPE_MODE (type))
7839		   == GET_MODE_SIZE (GET_MODE (op0)))
7840	{
7841	  if (GET_CODE (op0) == SUBREG)
7842	    op0 = force_reg (GET_MODE (op0), op0);
7843	  op0 = gen_lowpart (TYPE_MODE (type), op0);
7844	}
7845      /* If both modes are integral, then we can convert from one to the
7846	 other.  */
7847      else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7848	       && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7849	op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7850			     TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7851      /* As a last resort, spill op0 to memory, and reload it in a
7852	 different mode.  */
7853      else if (!MEM_P (op0))
7854	{
7855	  /* If the operand is not a MEM, force it into memory.  Since we
7856	     are going to be changing the mode of the MEM, don't call
7857	     force_const_mem for constants because we don't allow pool
7858	     constants to change mode.  */
7859	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7860
7861	  gcc_assert (!TREE_ADDRESSABLE (exp));
7862
7863	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7864	    target
7865	      = assign_stack_temp_for_type
7866		(TYPE_MODE (inner_type),
7867		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7868
7869	  emit_move_insn (target, op0);
7870	  op0 = target;
7871	}
7872
7873      /* At this point, OP0 is in the correct mode.  If the output type is such
7874	 that the operand is known to be aligned, indicate that it is.
7875	 Otherwise, we need only be concerned about alignment for non-BLKmode
7876	 results.  */
7877      if (MEM_P (op0))
7878	{
7879	  op0 = copy_rtx (op0);
7880
7881	  if (TYPE_ALIGN_OK (type))
7882	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7883	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7884		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7885	    {
7886	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7887	      HOST_WIDE_INT temp_size
7888		= MAX (int_size_in_bytes (inner_type),
7889		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7890	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7891						    temp_size, 0, type);
7892	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7893
7894	      gcc_assert (!TREE_ADDRESSABLE (exp));
7895
7896	      if (GET_MODE (op0) == BLKmode)
7897		emit_block_move (new_with_op0_mode, op0,
7898				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7899				 (modifier == EXPAND_STACK_PARM
7900				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7901	      else
7902		emit_move_insn (new_with_op0_mode, op0);
7903
7904	      op0 = new;
7905	    }
7906
7907	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7908	}
7909
7910      return op0;
7911
7912    case PLUS_EXPR:
7913      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7914	 something else, make sure we add the register to the constant and
7915	 then to the other thing.  This case can occur during strength
7916	 reduction and doing it this way will produce better code if the
7917	 frame pointer or argument pointer is eliminated.
7918
7919	 fold-const.c will ensure that the constant is always in the inner
7920	 PLUS_EXPR, so the only case we need to do anything about is if
7921	 sp, ap, or fp is our second argument, in which case we must swap
7922	 the innermost first argument and our second argument.  */
7923
7924      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7925	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7926	  && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7927	  && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7928	      || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7929	      || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7930	{
7931	  tree t = TREE_OPERAND (exp, 1);
7932
7933	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7934	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7935	}
7936
7937      /* If the result is to be ptr_mode and we are adding an integer to
7938	 something, we might be forming a constant.  So try to use
7939	 plus_constant.  If it produces a sum and we can't accept it,
7940	 use force_operand.  This allows P = &ARR[const] to generate
7941	 efficient code on machines where a SYMBOL_REF is not a valid
7942	 address.
7943
7944	 If this is an EXPAND_SUM call, always return the sum.  */
7945      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7946	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7947	{
7948	  if (modifier == EXPAND_STACK_PARM)
7949	    target = 0;
7950	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7951	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7952	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7953	    {
7954	      rtx constant_part;
7955
7956	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7957				 EXPAND_SUM);
7958	      /* Use immed_double_const to ensure that the constant is
7959		 truncated according to the mode of OP1, then sign extended
7960		 to a HOST_WIDE_INT.  Using the constant directly can result
7961		 in non-canonical RTL in a 64x32 cross compile.  */
7962	      constant_part
7963		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7964				      (HOST_WIDE_INT) 0,
7965				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7966	      op1 = plus_constant (op1, INTVAL (constant_part));
7967	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7968		op1 = force_operand (op1, target);
7969	      return REDUCE_BIT_FIELD (op1);
7970	    }
7971
7972	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7973		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7974		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7975	    {
7976	      rtx constant_part;
7977
7978	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7979				 (modifier == EXPAND_INITIALIZER
7980				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7981	      if (! CONSTANT_P (op0))
7982		{
7983		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7984				     VOIDmode, modifier);
7985		  /* Return a PLUS if modifier says it's OK.  */
7986		  if (modifier == EXPAND_SUM
7987		      || modifier == EXPAND_INITIALIZER)
7988		    return simplify_gen_binary (PLUS, mode, op0, op1);
7989		  goto binop2;
7990		}
7991	      /* Use immed_double_const to ensure that the constant is
7992		 truncated according to the mode of OP1, then sign extended
7993		 to a HOST_WIDE_INT.  Using the constant directly can result
7994		 in non-canonical RTL in a 64x32 cross compile.  */
7995	      constant_part
7996		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7997				      (HOST_WIDE_INT) 0,
7998				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7999	      op0 = plus_constant (op0, INTVAL (constant_part));
8000	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8001		op0 = force_operand (op0, target);
8002	      return REDUCE_BIT_FIELD (op0);
8003	    }
8004	}
8005
8006      /* No sense saving up arithmetic to be done
8007	 if it's all in the wrong mode to form part of an address.
8008	 And force_operand won't know whether to sign-extend or
8009	 zero-extend.  */
8010      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8011	  || mode != ptr_mode)
8012	{
8013	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8014			   subtarget, &op0, &op1, 0);
8015	  if (op0 == const0_rtx)
8016	    return op1;
8017	  if (op1 == const0_rtx)
8018	    return op0;
8019	  goto binop2;
8020	}
8021
8022      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8023		       subtarget, &op0, &op1, modifier);
8024      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8025
8026    case MINUS_EXPR:
8027      /* For initializers, we are allowed to return a MINUS of two
8028	 symbolic constants.  Here we handle all cases when both operands
8029	 are constant.  */
8030      /* Handle difference of two symbolic constants,
8031	 for the sake of an initializer.  */
8032      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8033	  && really_constant_p (TREE_OPERAND (exp, 0))
8034	  && really_constant_p (TREE_OPERAND (exp, 1)))
8035	{
8036	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8037			   NULL_RTX, &op0, &op1, modifier);
8038
8039	  /* If the last operand is a CONST_INT, use plus_constant of
8040	     the negated constant.  Else make the MINUS.  */
8041	  if (GET_CODE (op1) == CONST_INT)
8042	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8043	  else
8044	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8045	}
8046
8047      /* No sense saving up arithmetic to be done
8048	 if it's all in the wrong mode to form part of an address.
8049	 And force_operand won't know whether to sign-extend or
8050	 zero-extend.  */
8051      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8052	  || mode != ptr_mode)
8053	goto binop;
8054
8055      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8056		       subtarget, &op0, &op1, modifier);
8057
8058      /* Convert A - const to A + (-const).  */
8059      if (GET_CODE (op1) == CONST_INT)
8060	{
8061	  op1 = negate_rtx (mode, op1);
8062	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8063	}
8064
8065      goto binop2;
8066
8067    case MULT_EXPR:
8068      /* If first operand is constant, swap them.
8069	 Thus the following special case checks need only
8070	 check the second operand.  */
8071      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8072	{
8073	  tree t1 = TREE_OPERAND (exp, 0);
8074	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8075	  TREE_OPERAND (exp, 1) = t1;
8076	}
8077
8078      /* Attempt to return something suitable for generating an
8079	 indexed address, for machines that support that.  */
8080
8081      if (modifier == EXPAND_SUM && mode == ptr_mode
8082	  && host_integerp (TREE_OPERAND (exp, 1), 0))
8083	{
8084	  tree exp1 = TREE_OPERAND (exp, 1);
8085
8086	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8087			     EXPAND_SUM);
8088
8089	  if (!REG_P (op0))
8090	    op0 = force_operand (op0, NULL_RTX);
8091	  if (!REG_P (op0))
8092	    op0 = copy_to_mode_reg (mode, op0);
8093
8094	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8095			       gen_int_mode (tree_low_cst (exp1, 0),
8096					     TYPE_MODE (TREE_TYPE (exp1)))));
8097	}
8098
8099      if (modifier == EXPAND_STACK_PARM)
8100	target = 0;
8101
8102      /* Check for multiplying things that have been extended
8103	 from a narrower type.  If this machine supports multiplying
8104	 in that narrower type with a result in the desired type,
8105	 do it that way, and avoid the explicit type-conversion.  */
8106
8107      subexp0 = TREE_OPERAND (exp, 0);
8108      subexp1 = TREE_OPERAND (exp, 1);
8109      /* First, check if we have a multiplication of one signed and one
8110	 unsigned operand.  */
8111      if (TREE_CODE (subexp0) == NOP_EXPR
8112	  && TREE_CODE (subexp1) == NOP_EXPR
8113	  && TREE_CODE (type) == INTEGER_TYPE
8114	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8115	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8116	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8117	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8118	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8119	      != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8120	{
8121	  enum machine_mode innermode
8122	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8123	  this_optab = usmul_widen_optab;
8124	  if (mode == GET_MODE_WIDER_MODE (innermode))
8125	    {
8126	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8127		{
8128		  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8129		    expand_operands (TREE_OPERAND (subexp0, 0),
8130				     TREE_OPERAND (subexp1, 0),
8131				     NULL_RTX, &op0, &op1, 0);
8132		  else
8133		    expand_operands (TREE_OPERAND (subexp0, 0),
8134				     TREE_OPERAND (subexp1, 0),
8135				     NULL_RTX, &op1, &op0, 0);
8136
8137		  goto binop3;
8138		}
8139	    }
8140	}
8141      /* Check for a multiplication with matching signedness.  */
8142      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8143	  && TREE_CODE (type) == INTEGER_TYPE
8144	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8145	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8146	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8147	       && int_fits_type_p (TREE_OPERAND (exp, 1),
8148				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8149	       /* Don't use a widening multiply if a shift will do.  */
8150	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8151		    > HOST_BITS_PER_WIDE_INT)
8152		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8153	      ||
8154	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8155	       && (TYPE_PRECISION (TREE_TYPE
8156				   (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8157		   == TYPE_PRECISION (TREE_TYPE
8158				      (TREE_OPERAND
8159				       (TREE_OPERAND (exp, 0), 0))))
8160	       /* If both operands are extended, they must either both
8161		  be zero-extended or both be sign-extended.  */
8162	       && (TYPE_UNSIGNED (TREE_TYPE
8163				  (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8164		   == TYPE_UNSIGNED (TREE_TYPE
8165				     (TREE_OPERAND
8166				      (TREE_OPERAND (exp, 0), 0)))))))
8167	{
8168	  tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8169	  enum machine_mode innermode = TYPE_MODE (op0type);
8170	  bool zextend_p = TYPE_UNSIGNED (op0type);
8171	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8172	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8173
8174	  if (mode == GET_MODE_2XWIDER_MODE (innermode))
8175	    {
8176	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8177		{
8178		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8179		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8180				     TREE_OPERAND (exp, 1),
8181				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8182		  else
8183		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8184				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8185				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8186		  goto binop3;
8187		}
8188	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8189		       && innermode == word_mode)
8190		{
8191		  rtx htem, hipart;
8192		  op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8193		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8194		    op1 = convert_modes (innermode, mode,
8195					 expand_normal (TREE_OPERAND (exp, 1)),
8196					 unsignedp);
8197		  else
8198		    op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8199		  temp = expand_binop (mode, other_optab, op0, op1, target,
8200				       unsignedp, OPTAB_LIB_WIDEN);
8201		  hipart = gen_highpart (innermode, temp);
8202		  htem = expand_mult_highpart_adjust (innermode, hipart,
8203						      op0, op1, hipart,
8204						      zextend_p);
8205		  if (htem != hipart)
8206		    emit_move_insn (hipart, htem);
8207		  return REDUCE_BIT_FIELD (temp);
8208		}
8209	    }
8210	}
8211      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8212		       subtarget, &op0, &op1, 0);
8213      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8214
8215    case TRUNC_DIV_EXPR:
8216    case FLOOR_DIV_EXPR:
8217    case CEIL_DIV_EXPR:
8218    case ROUND_DIV_EXPR:
8219    case EXACT_DIV_EXPR:
8220      if (modifier == EXPAND_STACK_PARM)
8221	target = 0;
8222      /* Possible optimization: compute the dividend with EXPAND_SUM
8223	 then if the divisor is constant can optimize the case
8224	 where some terms of the dividend have coeffs divisible by it.  */
8225      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8226		       subtarget, &op0, &op1, 0);
8227      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8228
8229    case RDIV_EXPR:
8230      goto binop;
8231
8232    case TRUNC_MOD_EXPR:
8233    case FLOOR_MOD_EXPR:
8234    case CEIL_MOD_EXPR:
8235    case ROUND_MOD_EXPR:
8236      if (modifier == EXPAND_STACK_PARM)
8237	target = 0;
8238      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8239		       subtarget, &op0, &op1, 0);
8240      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8241
8242    case FIX_ROUND_EXPR:
8243    case FIX_FLOOR_EXPR:
8244    case FIX_CEIL_EXPR:
8245      gcc_unreachable ();			/* Not used for C.  */
8246
8247    case FIX_TRUNC_EXPR:
8248      op0 = expand_normal (TREE_OPERAND (exp, 0));
8249      if (target == 0 || modifier == EXPAND_STACK_PARM)
8250	target = gen_reg_rtx (mode);
8251      expand_fix (target, op0, unsignedp);
8252      return target;
8253
8254    case FLOAT_EXPR:
8255      op0 = expand_normal (TREE_OPERAND (exp, 0));
8256      if (target == 0 || modifier == EXPAND_STACK_PARM)
8257	target = gen_reg_rtx (mode);
8258      /* expand_float can't figure out what to do if FROM has VOIDmode.
8259	 So give it the correct mode.  With -O, cse will optimize this.  */
8260      if (GET_MODE (op0) == VOIDmode)
8261	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8262				op0);
8263      expand_float (target, op0,
8264		    TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8265      return target;
8266
8267    case NEGATE_EXPR:
8268      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8269      if (modifier == EXPAND_STACK_PARM)
8270	target = 0;
8271      temp = expand_unop (mode,
8272      			  optab_for_tree_code (NEGATE_EXPR, type),
8273			  op0, target, 0);
8274      gcc_assert (temp);
8275      return REDUCE_BIT_FIELD (temp);
8276
8277    case ABS_EXPR:
8278      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8279      if (modifier == EXPAND_STACK_PARM)
8280	target = 0;
8281
8282      /* ABS_EXPR is not valid for complex arguments.  */
8283      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8284		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8285
8286      /* Unsigned abs is simply the operand.  Testing here means we don't
8287	 risk generating incorrect code below.  */
8288      if (TYPE_UNSIGNED (type))
8289	return op0;
8290
8291      return expand_abs (mode, op0, target, unsignedp,
8292			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8293
8294    case MAX_EXPR:
8295    case MIN_EXPR:
8296      target = original_target;
8297      if (target == 0
8298	  || modifier == EXPAND_STACK_PARM
8299	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8300	  || GET_MODE (target) != mode
8301	  || (REG_P (target)
8302	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8303	target = gen_reg_rtx (mode);
8304      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8305		       target, &op0, &op1, 0);
8306
8307      /* First try to do it with a special MIN or MAX instruction.
8308	 If that does not win, use a conditional jump to select the proper
8309	 value.  */
8310      this_optab = optab_for_tree_code (code, type);
8311      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8312			   OPTAB_WIDEN);
8313      if (temp != 0)
8314	return temp;
8315
8316      /* At this point, a MEM target is no longer useful; we will get better
8317	 code without it.  */
8318
8319      if (! REG_P (target))
8320	target = gen_reg_rtx (mode);
8321
8322      /* If op1 was placed in target, swap op0 and op1.  */
8323      if (target != op0 && target == op1)
8324	{
8325	  temp = op0;
8326	  op0 = op1;
8327	  op1 = temp;
8328	}
8329
8330      /* We generate better code and avoid problems with op1 mentioning
8331	 target by forcing op1 into a pseudo if it isn't a constant.  */
8332      if (! CONSTANT_P (op1))
8333	op1 = force_reg (mode, op1);
8334
8335      {
8336	enum rtx_code comparison_code;
8337	rtx cmpop1 = op1;
8338
8339	if (code == MAX_EXPR)
8340	  comparison_code = unsignedp ? GEU : GE;
8341	else
8342	  comparison_code = unsignedp ? LEU : LE;
8343
8344	/* Canonicalize to comparisons against 0.  */
8345	if (op1 == const1_rtx)
8346	  {
8347	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8348	       or (a != 0 ? a : 1) for unsigned.
8349	       For MIN we are safe converting (a <= 1 ? a : 1)
8350	       into (a <= 0 ? a : 1)  */
8351	    cmpop1 = const0_rtx;
8352	    if (code == MAX_EXPR)
8353	      comparison_code = unsignedp ? NE : GT;
8354	  }
8355	if (op1 == constm1_rtx && !unsignedp)
8356	  {
8357	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8358	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8359	    cmpop1 = const0_rtx;
8360	    if (code == MIN_EXPR)
8361	      comparison_code = LT;
8362	  }
8363#ifdef HAVE_conditional_move
8364	/* Use a conditional move if possible.  */
8365	if (can_conditionally_move_p (mode))
8366	  {
8367	    rtx insn;
8368
8369	    /* ??? Same problem as in expmed.c: emit_conditional_move
8370	       forces a stack adjustment via compare_from_rtx, and we
8371	       lose the stack adjustment if the sequence we are about
8372	       to create is discarded.  */
8373	    do_pending_stack_adjust ();
8374
8375	    start_sequence ();
8376
8377	    /* Try to emit the conditional move.  */
8378	    insn = emit_conditional_move (target, comparison_code,
8379					  op0, cmpop1, mode,
8380					  op0, op1, mode,
8381					  unsignedp);
8382
8383	    /* If we could do the conditional move, emit the sequence,
8384	       and return.  */
8385	    if (insn)
8386	      {
8387		rtx seq = get_insns ();
8388		end_sequence ();
8389		emit_insn (seq);
8390		return target;
8391	      }
8392
8393	    /* Otherwise discard the sequence and fall back to code with
8394	       branches.  */
8395	    end_sequence ();
8396	  }
8397#endif
8398	if (target != op0)
8399	  emit_move_insn (target, op0);
8400
8401	temp = gen_label_rtx ();
8402	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8403				 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8404      }
8405      emit_move_insn (target, op1);
8406      emit_label (temp);
8407      return target;
8408
8409    case BIT_NOT_EXPR:
8410      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8411      if (modifier == EXPAND_STACK_PARM)
8412	target = 0;
8413      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8414      gcc_assert (temp);
8415      return temp;
8416
8417      /* ??? Can optimize bitwise operations with one arg constant.
8418	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8419	 and (a bitwise1 b) bitwise2 b (etc)
8420	 but that is probably not worth while.  */
8421
8422      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8423	 boolean values when we want in all cases to compute both of them.  In
8424	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8425	 as actual zero-or-1 values and then bitwise anding.  In cases where
8426	 there cannot be any side effects, better code would be made by
8427	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8428	 how to recognize those cases.  */
8429
8430    case TRUTH_AND_EXPR:
8431      code = BIT_AND_EXPR;
8432    case BIT_AND_EXPR:
8433      goto binop;
8434
8435    case TRUTH_OR_EXPR:
8436      code = BIT_IOR_EXPR;
8437    case BIT_IOR_EXPR:
8438      goto binop;
8439
8440    case TRUTH_XOR_EXPR:
8441      code = BIT_XOR_EXPR;
8442    case BIT_XOR_EXPR:
8443      goto binop;
8444
8445    case LSHIFT_EXPR:
8446    case RSHIFT_EXPR:
8447    case LROTATE_EXPR:
8448    case RROTATE_EXPR:
8449      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8450	subtarget = 0;
8451      if (modifier == EXPAND_STACK_PARM)
8452	target = 0;
8453      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8454      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8455			   unsignedp);
8456
8457      /* Could determine the answer when only additive constants differ.  Also,
8458	 the addition of one can be handled by changing the condition.  */
8459    case LT_EXPR:
8460    case LE_EXPR:
8461    case GT_EXPR:
8462    case GE_EXPR:
8463    case EQ_EXPR:
8464    case NE_EXPR:
8465    case UNORDERED_EXPR:
8466    case ORDERED_EXPR:
8467    case UNLT_EXPR:
8468    case UNLE_EXPR:
8469    case UNGT_EXPR:
8470    case UNGE_EXPR:
8471    case UNEQ_EXPR:
8472    case LTGT_EXPR:
8473      temp = do_store_flag (exp,
8474			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8475			    tmode != VOIDmode ? tmode : mode, 0);
8476      if (temp != 0)
8477	return temp;
8478
8479      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8480      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8481	  && original_target
8482	  && REG_P (original_target)
8483	  && (GET_MODE (original_target)
8484	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8485	{
8486	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8487			      VOIDmode, 0);
8488
8489	  /* If temp is constant, we can just compute the result.  */
8490	  if (GET_CODE (temp) == CONST_INT)
8491	    {
8492	      if (INTVAL (temp) != 0)
8493	        emit_move_insn (target, const1_rtx);
8494	      else
8495	        emit_move_insn (target, const0_rtx);
8496
8497	      return target;
8498	    }
8499
8500	  if (temp != original_target)
8501	    {
8502	      enum machine_mode mode1 = GET_MODE (temp);
8503	      if (mode1 == VOIDmode)
8504		mode1 = tmode != VOIDmode ? tmode : mode;
8505
8506	      temp = copy_to_mode_reg (mode1, temp);
8507	    }
8508
8509	  op1 = gen_label_rtx ();
8510	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8511				   GET_MODE (temp), unsignedp, op1);
8512	  emit_move_insn (temp, const1_rtx);
8513	  emit_label (op1);
8514	  return temp;
8515	}
8516
8517      /* If no set-flag instruction, must generate a conditional store
8518	 into a temporary variable.  Drop through and handle this
8519	 like && and ||.  */
8520
8521      if (! ignore
8522	  && (target == 0
8523	      || modifier == EXPAND_STACK_PARM
8524	      || ! safe_from_p (target, exp, 1)
8525	      /* Make sure we don't have a hard reg (such as function's return
8526		 value) live across basic blocks, if not optimizing.  */
8527	      || (!optimize && REG_P (target)
8528		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8529	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8530
8531      if (target)
8532	emit_move_insn (target, const0_rtx);
8533
8534      op1 = gen_label_rtx ();
8535      jumpifnot (exp, op1);
8536
8537      if (target)
8538	emit_move_insn (target, const1_rtx);
8539
8540      emit_label (op1);
8541      return ignore ? const0_rtx : target;
8542
8543    case TRUTH_NOT_EXPR:
8544      if (modifier == EXPAND_STACK_PARM)
8545	target = 0;
8546      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8547      /* The parser is careful to generate TRUTH_NOT_EXPR
8548	 only with operands that are always zero or one.  */
8549      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8550			   target, 1, OPTAB_LIB_WIDEN);
8551      gcc_assert (temp);
8552      return temp;
8553
8554    case STATEMENT_LIST:
8555      {
8556	tree_stmt_iterator iter;
8557
8558	gcc_assert (ignore);
8559
8560	for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8561	  expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8562      }
8563      return const0_rtx;
8564
8565    case COND_EXPR:
8566      /* A COND_EXPR with its type being VOID_TYPE represents a
8567	 conditional jump and is handled in
8568	 expand_gimple_cond_expr.  */
8569      gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8570
8571        /* Note that COND_EXPRs whose type is a structure or union
8572  	 are required to be constructed to contain assignments of
8573  	 a temporary variable, so that we can evaluate them here
8574  	 for side effect only.  If type is void, we must do likewise.  */
8575
8576        gcc_assert (!TREE_ADDRESSABLE (type)
8577		    && !ignore
8578		    && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8579		    && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8580
8581       /* If we are not to produce a result, we have no target.  Otherwise,
8582 	 if a target was specified use it; it will not be used as an
8583 	 intermediate target unless it is safe.  If no target, use a
8584 	 temporary.  */
8585
8586       if (modifier != EXPAND_STACK_PARM
8587 	  && original_target
8588 	  && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8589 	  && GET_MODE (original_target) == mode
8590#ifdef HAVE_conditional_move
8591 	  && (! can_conditionally_move_p (mode)
8592 	      || REG_P (original_target))
8593#endif
8594 	  && !MEM_P (original_target))
8595 	temp = original_target;
8596       else
8597 	temp = assign_temp (type, 0, 0, 1);
8598
8599       do_pending_stack_adjust ();
8600       NO_DEFER_POP;
8601       op0 = gen_label_rtx ();
8602       op1 = gen_label_rtx ();
8603       jumpifnot (TREE_OPERAND (exp, 0), op0);
8604       store_expr (TREE_OPERAND (exp, 1), temp,
8605 		  modifier == EXPAND_STACK_PARM);
8606
8607       emit_jump_insn (gen_jump (op1));
8608       emit_barrier ();
8609       emit_label (op0);
8610       store_expr (TREE_OPERAND (exp, 2), temp,
8611 		  modifier == EXPAND_STACK_PARM);
8612
8613       emit_label (op1);
8614       OK_DEFER_POP;
8615       return temp;
8616
8617    case VEC_COND_EXPR:
8618	target = expand_vec_cond_expr (exp, target);
8619	return target;
8620
8621    case MODIFY_EXPR:
8622      {
8623	tree lhs = TREE_OPERAND (exp, 0);
8624	tree rhs = TREE_OPERAND (exp, 1);
8625
8626	gcc_assert (ignore);
8627
8628	/* Check for |= or &= of a bitfield of size one into another bitfield
8629	   of size 1.  In this case, (unless we need the result of the
8630	   assignment) we can do this more efficiently with a
8631	   test followed by an assignment, if necessary.
8632
8633	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8634	   things change so we do, this code should be enhanced to
8635	   support it.  */
8636	if (TREE_CODE (lhs) == COMPONENT_REF
8637	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8638		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8639	    && TREE_OPERAND (rhs, 0) == lhs
8640	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8641	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8642	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8643	  {
8644	    rtx label = gen_label_rtx ();
8645	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8646	    do_jump (TREE_OPERAND (rhs, 1),
8647		     value ? label : 0,
8648		     value ? 0 : label);
8649	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8650	    do_pending_stack_adjust ();
8651	    emit_label (label);
8652	    return const0_rtx;
8653	  }
8654
8655	expand_assignment (lhs, rhs);
8656
8657	return const0_rtx;
8658      }
8659
8660    case RETURN_EXPR:
8661      if (!TREE_OPERAND (exp, 0))
8662	expand_null_return ();
8663      else
8664	expand_return (TREE_OPERAND (exp, 0));
8665      return const0_rtx;
8666
8667    case ADDR_EXPR:
8668      return expand_expr_addr_expr (exp, target, tmode, modifier);
8669
8670    case COMPLEX_EXPR:
8671      /* Get the rtx code of the operands.  */
8672      op0 = expand_normal (TREE_OPERAND (exp, 0));
8673      op1 = expand_normal (TREE_OPERAND (exp, 1));
8674
8675      if (!target)
8676	target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8677
8678      /* Move the real (op0) and imaginary (op1) parts to their location.  */
8679      write_complex_part (target, op0, false);
8680      write_complex_part (target, op1, true);
8681
8682      return target;
8683
8684    case REALPART_EXPR:
8685      op0 = expand_normal (TREE_OPERAND (exp, 0));
8686      return read_complex_part (op0, false);
8687
8688    case IMAGPART_EXPR:
8689      op0 = expand_normal (TREE_OPERAND (exp, 0));
8690      return read_complex_part (op0, true);
8691
8692    case RESX_EXPR:
8693      expand_resx_expr (exp);
8694      return const0_rtx;
8695
8696    case TRY_CATCH_EXPR:
8697    case CATCH_EXPR:
8698    case EH_FILTER_EXPR:
8699    case TRY_FINALLY_EXPR:
8700      /* Lowered by tree-eh.c.  */
8701      gcc_unreachable ();
8702
8703    case WITH_CLEANUP_EXPR:
8704    case CLEANUP_POINT_EXPR:
8705    case TARGET_EXPR:
8706    case CASE_LABEL_EXPR:
8707    case VA_ARG_EXPR:
8708    case BIND_EXPR:
8709    case INIT_EXPR:
8710    case CONJ_EXPR:
8711    case COMPOUND_EXPR:
8712    case PREINCREMENT_EXPR:
8713    case PREDECREMENT_EXPR:
8714    case POSTINCREMENT_EXPR:
8715    case POSTDECREMENT_EXPR:
8716    case LOOP_EXPR:
8717    case EXIT_EXPR:
8718    case TRUTH_ANDIF_EXPR:
8719    case TRUTH_ORIF_EXPR:
8720      /* Lowered by gimplify.c.  */
8721      gcc_unreachable ();
8722
8723    case EXC_PTR_EXPR:
8724      return get_exception_pointer (cfun);
8725
8726    case FILTER_EXPR:
8727      return get_exception_filter (cfun);
8728
8729    case FDESC_EXPR:
8730      /* Function descriptors are not valid except for as
8731	 initialization constants, and should not be expanded.  */
8732      gcc_unreachable ();
8733
8734    case SWITCH_EXPR:
8735      expand_case (exp);
8736      return const0_rtx;
8737
8738    case LABEL_EXPR:
8739      expand_label (TREE_OPERAND (exp, 0));
8740      return const0_rtx;
8741
8742    case ASM_EXPR:
8743      expand_asm_expr (exp);
8744      return const0_rtx;
8745
8746    case WITH_SIZE_EXPR:
8747      /* WITH_SIZE_EXPR expands to its first argument.  The caller should
8748	 have pulled out the size to use in whatever context it needed.  */
8749      return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8750			       modifier, alt_rtl);
8751
8752    case REALIGN_LOAD_EXPR:
8753      {
8754        tree oprnd0 = TREE_OPERAND (exp, 0);
8755        tree oprnd1 = TREE_OPERAND (exp, 1);
8756        tree oprnd2 = TREE_OPERAND (exp, 2);
8757        rtx op2;
8758
8759        this_optab = optab_for_tree_code (code, type);
8760        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8761        op2 = expand_normal (oprnd2);
8762        temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8763				  target, unsignedp);
8764        gcc_assert (temp);
8765        return temp;
8766      }
8767
8768    case DOT_PROD_EXPR:
8769      {
8770	tree oprnd0 = TREE_OPERAND (exp, 0);
8771	tree oprnd1 = TREE_OPERAND (exp, 1);
8772	tree oprnd2 = TREE_OPERAND (exp, 2);
8773	rtx op2;
8774
8775	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8776	op2 = expand_normal (oprnd2);
8777	target = expand_widen_pattern_expr (exp, op0, op1, op2,
8778					    target, unsignedp);
8779	return target;
8780      }
8781
8782    case WIDEN_SUM_EXPR:
8783      {
8784        tree oprnd0 = TREE_OPERAND (exp, 0);
8785        tree oprnd1 = TREE_OPERAND (exp, 1);
8786
8787        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8788        target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8789                                            target, unsignedp);
8790        return target;
8791      }
8792
8793    case REDUC_MAX_EXPR:
8794    case REDUC_MIN_EXPR:
8795    case REDUC_PLUS_EXPR:
8796      {
8797        op0 = expand_normal (TREE_OPERAND (exp, 0));
8798        this_optab = optab_for_tree_code (code, type);
8799        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8800        gcc_assert (temp);
8801        return temp;
8802      }
8803
8804    case VEC_LSHIFT_EXPR:
8805    case VEC_RSHIFT_EXPR:
8806      {
8807	target = expand_vec_shift_expr (exp, target);
8808	return target;
8809      }
8810
8811    default:
8812      return lang_hooks.expand_expr (exp, original_target, tmode,
8813				     modifier, alt_rtl);
8814    }
8815
8816  /* Here to do an ordinary binary operator.  */
8817 binop:
8818  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8819		   subtarget, &op0, &op1, 0);
8820 binop2:
8821  this_optab = optab_for_tree_code (code, type);
8822 binop3:
8823  if (modifier == EXPAND_STACK_PARM)
8824    target = 0;
8825  temp = expand_binop (mode, this_optab, op0, op1, target,
8826		       unsignedp, OPTAB_LIB_WIDEN);
8827  gcc_assert (temp);
8828  return REDUCE_BIT_FIELD (temp);
8829}
8830#undef REDUCE_BIT_FIELD
8831
8832/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8833   signedness of TYPE), possibly returning the result in TARGET.  */
8834static rtx
8835reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8836{
8837  HOST_WIDE_INT prec = TYPE_PRECISION (type);
8838  if (target && GET_MODE (target) != GET_MODE (exp))
8839    target = 0;
8840  /* For constant values, reduce using build_int_cst_type. */
8841  if (GET_CODE (exp) == CONST_INT)
8842    {
8843      HOST_WIDE_INT value = INTVAL (exp);
8844      tree t = build_int_cst_type (type, value);
8845      return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8846    }
8847  else if (TYPE_UNSIGNED (type))
8848    {
8849      rtx mask;
8850      if (prec < HOST_BITS_PER_WIDE_INT)
8851	mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8852				   GET_MODE (exp));
8853      else
8854	mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8855				   ((unsigned HOST_WIDE_INT) 1
8856				    << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8857				   GET_MODE (exp));
8858      return expand_and (GET_MODE (exp), exp, mask, target);
8859    }
8860  else
8861    {
8862      tree count = build_int_cst (NULL_TREE,
8863				  GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8864      exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8865      return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8866    }
8867}
8868
8869/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8870   when applied to the address of EXP produces an address known to be
8871   aligned more than BIGGEST_ALIGNMENT.  */
8872
8873static int
8874is_aligning_offset (tree offset, tree exp)
8875{
8876  /* Strip off any conversions.  */
8877  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8878	 || TREE_CODE (offset) == NOP_EXPR
8879	 || TREE_CODE (offset) == CONVERT_EXPR)
8880    offset = TREE_OPERAND (offset, 0);
8881
8882  /* We must now have a BIT_AND_EXPR with a constant that is one less than
8883     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8884  if (TREE_CODE (offset) != BIT_AND_EXPR
8885      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8886      || compare_tree_int (TREE_OPERAND (offset, 1),
8887			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8888      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8889    return 0;
8890
8891  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8892     It must be NEGATE_EXPR.  Then strip any more conversions.  */
8893  offset = TREE_OPERAND (offset, 0);
8894  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8895	 || TREE_CODE (offset) == NOP_EXPR
8896	 || TREE_CODE (offset) == CONVERT_EXPR)
8897    offset = TREE_OPERAND (offset, 0);
8898
8899  if (TREE_CODE (offset) != NEGATE_EXPR)
8900    return 0;
8901
8902  offset = TREE_OPERAND (offset, 0);
8903  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8904	 || TREE_CODE (offset) == NOP_EXPR
8905	 || TREE_CODE (offset) == CONVERT_EXPR)
8906    offset = TREE_OPERAND (offset, 0);
8907
8908  /* This must now be the address of EXP.  */
8909  return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8910}
8911
8912/* Return the tree node if an ARG corresponds to a string constant or zero
8913   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
8914   in bytes within the string that ARG is accessing.  The type of the
8915   offset will be `sizetype'.  */
8916
8917tree
8918string_constant (tree arg, tree *ptr_offset)
8919{
8920  tree array, offset;
8921  STRIP_NOPS (arg);
8922
8923  if (TREE_CODE (arg) == ADDR_EXPR)
8924    {
8925      if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8926	{
8927	  *ptr_offset = size_zero_node;
8928	  return TREE_OPERAND (arg, 0);
8929	}
8930      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8931	{
8932	  array = TREE_OPERAND (arg, 0);
8933	  offset = size_zero_node;
8934	}
8935      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8936	{
8937	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8938	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8939	  if (TREE_CODE (array) != STRING_CST
8940	      && TREE_CODE (array) != VAR_DECL)
8941	    return 0;
8942	}
8943      else
8944	return 0;
8945    }
8946  else if (TREE_CODE (arg) == PLUS_EXPR)
8947    {
8948      tree arg0 = TREE_OPERAND (arg, 0);
8949      tree arg1 = TREE_OPERAND (arg, 1);
8950
8951      STRIP_NOPS (arg0);
8952      STRIP_NOPS (arg1);
8953
8954      if (TREE_CODE (arg0) == ADDR_EXPR
8955	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8956	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8957	{
8958	  array = TREE_OPERAND (arg0, 0);
8959	  offset = arg1;
8960	}
8961      else if (TREE_CODE (arg1) == ADDR_EXPR
8962	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8963		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8964	{
8965	  array = TREE_OPERAND (arg1, 0);
8966	  offset = arg0;
8967	}
8968      else
8969	return 0;
8970    }
8971  else
8972    return 0;
8973
8974  if (TREE_CODE (array) == STRING_CST)
8975    {
8976      *ptr_offset = fold_convert (sizetype, offset);
8977      return array;
8978    }
8979  else if (TREE_CODE (array) == VAR_DECL)
8980    {
8981      int length;
8982
8983      /* Variables initialized to string literals can be handled too.  */
8984      if (DECL_INITIAL (array) == NULL_TREE
8985	  || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8986	return 0;
8987
8988      /* If they are read-only, non-volatile and bind locally.  */
8989      if (! TREE_READONLY (array)
8990	  || TREE_SIDE_EFFECTS (array)
8991	  || ! targetm.binds_local_p (array))
8992	return 0;
8993
8994      /* Avoid const char foo[4] = "abcde";  */
8995      if (DECL_SIZE_UNIT (array) == NULL_TREE
8996	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8997	  || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8998	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8999	return 0;
9000
9001      /* If variable is bigger than the string literal, OFFSET must be constant
9002	 and inside of the bounds of the string literal.  */
9003      offset = fold_convert (sizetype, offset);
9004      if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9005	  && (! host_integerp (offset, 1)
9006	      || compare_tree_int (offset, length) >= 0))
9007	return 0;
9008
9009      *ptr_offset = offset;
9010      return DECL_INITIAL (array);
9011    }
9012
9013  return 0;
9014}
9015
9016/* Generate code to calculate EXP using a store-flag instruction
9017   and return an rtx for the result.  EXP is either a comparison
9018   or a TRUTH_NOT_EXPR whose operand is a comparison.
9019
9020   If TARGET is nonzero, store the result there if convenient.
9021
9022   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9023   cheap.
9024
9025   Return zero if there is no suitable set-flag instruction
9026   available on this machine.
9027
9028   Once expand_expr has been called on the arguments of the comparison,
9029   we are committed to doing the store flag, since it is not safe to
9030   re-evaluate the expression.  We emit the store-flag insn by calling
9031   emit_store_flag, but only expand the arguments if we have a reason
9032   to believe that emit_store_flag will be successful.  If we think that
9033   it will, but it isn't, we have to simulate the store-flag with a
9034   set/jump/set sequence.  */
9035
9036static rtx
9037do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9038{
9039  enum rtx_code code;
9040  tree arg0, arg1, type;
9041  tree tem;
9042  enum machine_mode operand_mode;
9043  int invert = 0;
9044  int unsignedp;
9045  rtx op0, op1;
9046  enum insn_code icode;
9047  rtx subtarget = target;
9048  rtx result, label;
9049
9050  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9051     result at the end.  We can't simply invert the test since it would
9052     have already been inverted if it were valid.  This case occurs for
9053     some floating-point comparisons.  */
9054
9055  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9056    invert = 1, exp = TREE_OPERAND (exp, 0);
9057
9058  arg0 = TREE_OPERAND (exp, 0);
9059  arg1 = TREE_OPERAND (exp, 1);
9060
9061  /* Don't crash if the comparison was erroneous.  */
9062  if (arg0 == error_mark_node || arg1 == error_mark_node)
9063    return const0_rtx;
9064
9065  type = TREE_TYPE (arg0);
9066  operand_mode = TYPE_MODE (type);
9067  unsignedp = TYPE_UNSIGNED (type);
9068
9069  /* We won't bother with BLKmode store-flag operations because it would mean
9070     passing a lot of information to emit_store_flag.  */
9071  if (operand_mode == BLKmode)
9072    return 0;
9073
9074  /* We won't bother with store-flag operations involving function pointers
9075     when function pointers must be canonicalized before comparisons.  */
9076#ifdef HAVE_canonicalize_funcptr_for_compare
9077  if (HAVE_canonicalize_funcptr_for_compare
9078      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9079	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9080	       == FUNCTION_TYPE))
9081	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9082	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9083		  == FUNCTION_TYPE))))
9084    return 0;
9085#endif
9086
9087  STRIP_NOPS (arg0);
9088  STRIP_NOPS (arg1);
9089
9090  /* Get the rtx comparison code to use.  We know that EXP is a comparison
9091     operation of some type.  Some comparisons against 1 and -1 can be
9092     converted to comparisons with zero.  Do so here so that the tests
9093     below will be aware that we have a comparison with zero.   These
9094     tests will not catch constants in the first operand, but constants
9095     are rarely passed as the first operand.  */
9096
9097  switch (TREE_CODE (exp))
9098    {
9099    case EQ_EXPR:
9100      code = EQ;
9101      break;
9102    case NE_EXPR:
9103      code = NE;
9104      break;
9105    case LT_EXPR:
9106      if (integer_onep (arg1))
9107	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9108      else
9109	code = unsignedp ? LTU : LT;
9110      break;
9111    case LE_EXPR:
9112      if (! unsignedp && integer_all_onesp (arg1))
9113	arg1 = integer_zero_node, code = LT;
9114      else
9115	code = unsignedp ? LEU : LE;
9116      break;
9117    case GT_EXPR:
9118      if (! unsignedp && integer_all_onesp (arg1))
9119	arg1 = integer_zero_node, code = GE;
9120      else
9121	code = unsignedp ? GTU : GT;
9122      break;
9123    case GE_EXPR:
9124      if (integer_onep (arg1))
9125	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9126      else
9127	code = unsignedp ? GEU : GE;
9128      break;
9129
9130    case UNORDERED_EXPR:
9131      code = UNORDERED;
9132      break;
9133    case ORDERED_EXPR:
9134      code = ORDERED;
9135      break;
9136    case UNLT_EXPR:
9137      code = UNLT;
9138      break;
9139    case UNLE_EXPR:
9140      code = UNLE;
9141      break;
9142    case UNGT_EXPR:
9143      code = UNGT;
9144      break;
9145    case UNGE_EXPR:
9146      code = UNGE;
9147      break;
9148    case UNEQ_EXPR:
9149      code = UNEQ;
9150      break;
9151    case LTGT_EXPR:
9152      code = LTGT;
9153      break;
9154
9155    default:
9156      gcc_unreachable ();
9157    }
9158
9159  /* Put a constant second.  */
9160  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9161    {
9162      tem = arg0; arg0 = arg1; arg1 = tem;
9163      code = swap_condition (code);
9164    }
9165
9166  /* If this is an equality or inequality test of a single bit, we can
9167     do this by shifting the bit being tested to the low-order bit and
9168     masking the result with the constant 1.  If the condition was EQ,
9169     we xor it with 1.  This does not require an scc insn and is faster
9170     than an scc insn even if we have it.
9171
9172     The code to make this transformation was moved into fold_single_bit_test,
9173     so we just call into the folder and expand its result.  */
9174
9175  if ((code == NE || code == EQ)
9176      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9177      && integer_pow2p (TREE_OPERAND (arg0, 1)))
9178    {
9179      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9180      return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9181						arg0, arg1, type),
9182			  target, VOIDmode, EXPAND_NORMAL);
9183    }
9184
9185  /* Now see if we are likely to be able to do this.  Return if not.  */
9186  if (! can_compare_p (code, operand_mode, ccp_store_flag))
9187    return 0;
9188
9189  icode = setcc_gen_code[(int) code];
9190  if (icode == CODE_FOR_nothing
9191      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9192    {
9193      /* We can only do this if it is one of the special cases that
9194	 can be handled without an scc insn.  */
9195      if ((code == LT && integer_zerop (arg1))
9196	  || (! only_cheap && code == GE && integer_zerop (arg1)))
9197	;
9198      else if (! only_cheap && (code == NE || code == EQ)
9199	       && TREE_CODE (type) != REAL_TYPE
9200	       && ((abs_optab->handlers[(int) operand_mode].insn_code
9201		    != CODE_FOR_nothing)
9202		   || (ffs_optab->handlers[(int) operand_mode].insn_code
9203		       != CODE_FOR_nothing)))
9204	;
9205      else
9206	return 0;
9207    }
9208
9209  if (! get_subtarget (target)
9210      || GET_MODE (subtarget) != operand_mode)
9211    subtarget = 0;
9212
9213  expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9214
9215  if (target == 0)
9216    target = gen_reg_rtx (mode);
9217
9218  result = emit_store_flag (target, code, op0, op1,
9219			    operand_mode, unsignedp, 1);
9220
9221  if (result)
9222    {
9223      if (invert)
9224	result = expand_binop (mode, xor_optab, result, const1_rtx,
9225			       result, 0, OPTAB_LIB_WIDEN);
9226      return result;
9227    }
9228
9229  /* If this failed, we have to do this with set/compare/jump/set code.  */
9230  if (!REG_P (target)
9231      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9232    target = gen_reg_rtx (GET_MODE (target));
9233
9234  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9235  result = compare_from_rtx (op0, op1, code, unsignedp,
9236			     operand_mode, NULL_RTX);
9237  if (GET_CODE (result) == CONST_INT)
9238    return (((result == const0_rtx && ! invert)
9239	     || (result != const0_rtx && invert))
9240	    ? const0_rtx : const1_rtx);
9241
9242  /* The code of RESULT may not match CODE if compare_from_rtx
9243     decided to swap its operands and reverse the original code.
9244
9245     We know that compare_from_rtx returns either a CONST_INT or
9246     a new comparison code, so it is safe to just extract the
9247     code from RESULT.  */
9248  code = GET_CODE (result);
9249
9250  label = gen_label_rtx ();
9251  gcc_assert (bcc_gen_fctn[(int) code]);
9252
9253  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9254  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9255  emit_label (label);
9256
9257  return target;
9258}
9259
9260
9261/* Stubs in case we haven't got a casesi insn.  */
9262#ifndef HAVE_casesi
9263# define HAVE_casesi 0
9264# define gen_casesi(a, b, c, d, e) (0)
9265# define CODE_FOR_casesi CODE_FOR_nothing
9266#endif
9267
9268/* If the machine does not have a case insn that compares the bounds,
9269   this means extra overhead for dispatch tables, which raises the
9270   threshold for using them.  */
9271#ifndef CASE_VALUES_THRESHOLD
9272#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9273#endif /* CASE_VALUES_THRESHOLD */
9274
9275unsigned int
9276case_values_threshold (void)
9277{
9278  return CASE_VALUES_THRESHOLD;
9279}
9280
9281/* Attempt to generate a casesi instruction.  Returns 1 if successful,
9282   0 otherwise (i.e. if there is no casesi instruction).  */
9283int
9284try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9285	    rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9286{
9287  enum machine_mode index_mode = SImode;
9288  int index_bits = GET_MODE_BITSIZE (index_mode);
9289  rtx op1, op2, index;
9290  enum machine_mode op_mode;
9291
9292  if (! HAVE_casesi)
9293    return 0;
9294
9295  /* Convert the index to SImode.  */
9296  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9297    {
9298      enum machine_mode omode = TYPE_MODE (index_type);
9299      rtx rangertx = expand_normal (range);
9300
9301      /* We must handle the endpoints in the original mode.  */
9302      index_expr = build2 (MINUS_EXPR, index_type,
9303			   index_expr, minval);
9304      minval = integer_zero_node;
9305      index = expand_normal (index_expr);
9306      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9307			       omode, 1, default_label);
9308      /* Now we can safely truncate.  */
9309      index = convert_to_mode (index_mode, index, 0);
9310    }
9311  else
9312    {
9313      if (TYPE_MODE (index_type) != index_mode)
9314	{
9315	  index_type = lang_hooks.types.type_for_size (index_bits, 0);
9316	  index_expr = fold_convert (index_type, index_expr);
9317	}
9318
9319      index = expand_normal (index_expr);
9320    }
9321
9322  do_pending_stack_adjust ();
9323
9324  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9325  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9326      (index, op_mode))
9327    index = copy_to_mode_reg (op_mode, index);
9328
9329  op1 = expand_normal (minval);
9330
9331  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9332  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9333		       op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9334  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9335      (op1, op_mode))
9336    op1 = copy_to_mode_reg (op_mode, op1);
9337
9338  op2 = expand_normal (range);
9339
9340  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9341  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9342		       op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9343  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9344      (op2, op_mode))
9345    op2 = copy_to_mode_reg (op_mode, op2);
9346
9347  emit_jump_insn (gen_casesi (index, op1, op2,
9348			      table_label, default_label));
9349  return 1;
9350}
9351
9352/* Attempt to generate a tablejump instruction; same concept.  */
9353#ifndef HAVE_tablejump
9354#define HAVE_tablejump 0
9355#define gen_tablejump(x, y) (0)
9356#endif
9357
9358/* Subroutine of the next function.
9359
9360   INDEX is the value being switched on, with the lowest value
9361   in the table already subtracted.
9362   MODE is its expected mode (needed if INDEX is constant).
9363   RANGE is the length of the jump table.
9364   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9365
9366   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9367   index value is out of range.  */
9368
9369static void
9370do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9371	      rtx default_label)
9372{
9373  rtx temp, vector;
9374
9375  if (INTVAL (range) > cfun->max_jumptable_ents)
9376    cfun->max_jumptable_ents = INTVAL (range);
9377
9378  /* Do an unsigned comparison (in the proper mode) between the index
9379     expression and the value which represents the length of the range.
9380     Since we just finished subtracting the lower bound of the range
9381     from the index expression, this comparison allows us to simultaneously
9382     check that the original index expression value is both greater than
9383     or equal to the minimum value of the range and less than or equal to
9384     the maximum value of the range.  */
9385
9386  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9387			   default_label);
9388
9389  /* If index is in range, it must fit in Pmode.
9390     Convert to Pmode so we can index with it.  */
9391  if (mode != Pmode)
9392    index = convert_to_mode (Pmode, index, 1);
9393
9394  /* Don't let a MEM slip through, because then INDEX that comes
9395     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9396     and break_out_memory_refs will go to work on it and mess it up.  */
9397#ifdef PIC_CASE_VECTOR_ADDRESS
9398  if (flag_pic && !REG_P (index))
9399    index = copy_to_mode_reg (Pmode, index);
9400#endif
9401
9402  /* If flag_force_addr were to affect this address
9403     it could interfere with the tricky assumptions made
9404     about addresses that contain label-refs,
9405     which may be valid only very near the tablejump itself.  */
9406  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9407     GET_MODE_SIZE, because this indicates how large insns are.  The other
9408     uses should all be Pmode, because they are addresses.  This code
9409     could fail if addresses and insns are not the same size.  */
9410  index = gen_rtx_PLUS (Pmode,
9411			gen_rtx_MULT (Pmode, index,
9412				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9413			gen_rtx_LABEL_REF (Pmode, table_label));
9414#ifdef PIC_CASE_VECTOR_ADDRESS
9415  if (flag_pic)
9416    index = PIC_CASE_VECTOR_ADDRESS (index);
9417  else
9418#endif
9419    index = memory_address_noforce (CASE_VECTOR_MODE, index);
9420  temp = gen_reg_rtx (CASE_VECTOR_MODE);
9421  vector = gen_const_mem (CASE_VECTOR_MODE, index);
9422  convert_move (temp, vector, 0);
9423
9424  emit_jump_insn (gen_tablejump (temp, table_label));
9425
9426  /* If we are generating PIC code or if the table is PC-relative, the
9427     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
9428  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9429    emit_barrier ();
9430}
9431
9432int
9433try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9434	       rtx table_label, rtx default_label)
9435{
9436  rtx index;
9437
9438  if (! HAVE_tablejump)
9439    return 0;
9440
9441  index_expr = fold_build2 (MINUS_EXPR, index_type,
9442			    fold_convert (index_type, index_expr),
9443			    fold_convert (index_type, minval));
9444  index = expand_normal (index_expr);
9445  do_pending_stack_adjust ();
9446
9447  do_tablejump (index, TYPE_MODE (index_type),
9448		convert_modes (TYPE_MODE (index_type),
9449			       TYPE_MODE (TREE_TYPE (range)),
9450			       expand_normal (range),
9451			       TYPE_UNSIGNED (TREE_TYPE (range))),
9452		table_label, default_label);
9453  return 1;
9454}
9455
9456/* Nonzero if the mode is a valid vector mode for this architecture.
9457   This returns nonzero even if there is no hardware support for the
9458   vector mode, but we can emulate with narrower modes.  */
9459
9460int
9461vector_mode_valid_p (enum machine_mode mode)
9462{
9463  enum mode_class class = GET_MODE_CLASS (mode);
9464  enum machine_mode innermode;
9465
9466  /* Doh!  What's going on?  */
9467  if (class != MODE_VECTOR_INT
9468      && class != MODE_VECTOR_FLOAT)
9469    return 0;
9470
9471  /* Hardware support.  Woo hoo!  */
9472  if (targetm.vector_mode_supported_p (mode))
9473    return 1;
9474
9475  innermode = GET_MODE_INNER (mode);
9476
9477  /* We should probably return 1 if requesting V4DI and we have no DI,
9478     but we have V2DI, but this is probably very unlikely.  */
9479
9480  /* If we have support for the inner mode, we can safely emulate it.
9481     We may not have V2DI, but me can emulate with a pair of DIs.  */
9482  return targetm.scalar_mode_supported_p (innermode);
9483}
9484
9485/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
9486static rtx
9487const_vector_from_tree (tree exp)
9488{
9489  rtvec v;
9490  int units, i;
9491  tree link, elt;
9492  enum machine_mode inner, mode;
9493
9494  mode = TYPE_MODE (TREE_TYPE (exp));
9495
9496  if (initializer_zerop (exp))
9497    return CONST0_RTX (mode);
9498
9499  units = GET_MODE_NUNITS (mode);
9500  inner = GET_MODE_INNER (mode);
9501
9502  v = rtvec_alloc (units);
9503
9504  link = TREE_VECTOR_CST_ELTS (exp);
9505  for (i = 0; link; link = TREE_CHAIN (link), ++i)
9506    {
9507      elt = TREE_VALUE (link);
9508
9509      if (TREE_CODE (elt) == REAL_CST)
9510	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9511							 inner);
9512      else
9513	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9514					       TREE_INT_CST_HIGH (elt),
9515					       inner);
9516    }
9517
9518  /* Initialize remaining elements to 0.  */
9519  for (; i < units; ++i)
9520    RTVEC_ELT (v, i) = CONST0_RTX (inner);
9521
9522  return gen_rtx_CONST_VECTOR (mode, v);
9523}
9524#include "gt-expr.h"
9525