expr.c revision 102780
195142Sjmallett/* Convert tree expression to rtl instructions, for GNU compiler.
295142Sjmallett   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
395142Sjmallett   2000, 2001, 2002 Free Software Foundation, Inc.
495142Sjmallett
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "tree.h"
27#include "obstack.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
59#endif
60
61#endif
62
63#ifndef STACK_PUSH_CODE
64#ifdef STACK_GROWS_DOWNWARD
65#define STACK_PUSH_CODE PRE_DEC
66#else
67#define STACK_PUSH_CODE PRE_INC
68#endif
69#endif
70
71/* Assume that case vectors are not pc-relative.  */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
76/* If this is nonzero, we do not bother generating VOLATILE
77   around volatile memory references, and we are willing to
78   output indirect addresses.  If cse is to follow, we reject
79   indirect addresses so a useful potential cse is generated;
80   if it is used only once, instruction combination will produce
81   the same indirect address eventually.  */
82int cse_not_expected;
83
84/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
85static tree placeholder_list = 0;
86
87/* This structure is used by move_by_pieces to describe the move to
88   be performed.  */
89struct move_by_pieces
90{
91  rtx to;
92  rtx to_addr;
93  int autinc_to;
94  int explicit_inc_to;
95  rtx from;
96  rtx from_addr;
97  int autinc_from;
98  int explicit_inc_from;
99  unsigned HOST_WIDE_INT len;
100  HOST_WIDE_INT offset;
101  int reverse;
102};
103
104/* This structure is used by store_by_pieces to describe the clear to
105   be performed.  */
106
107struct store_by_pieces
108{
109  rtx to;
110  rtx to_addr;
111  int autinc_to;
112  int explicit_inc_to;
113  unsigned HOST_WIDE_INT len;
114  HOST_WIDE_INT offset;
115  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116  PTR constfundata;
117  int reverse;
118};
119
120extern struct obstack permanent_obstack;
121
122static rtx enqueue_insn		PARAMS ((rtx, rtx));
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124				PARAMS ((unsigned HOST_WIDE_INT,
125					 unsigned int));
126static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127					 struct move_by_pieces *));
128static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
129					 enum machine_mode));
130static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
131					 unsigned int));
132static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
133					 unsigned int));
134static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
135					 enum machine_mode,
136					 struct store_by_pieces *));
137static rtx get_subtarget	PARAMS ((rtx));
138static int is_zeros_p		PARAMS ((tree));
139static int mostly_zeros_p	PARAMS ((tree));
140static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141					     HOST_WIDE_INT, enum machine_mode,
142					     tree, tree, int, int));
143static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
145					 HOST_WIDE_INT, enum machine_mode,
146					 tree, enum machine_mode, int, tree,
147					 int));
148static rtx var_rtx		PARAMS ((tree));
149static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
151static int is_aligning_offset	PARAMS ((tree, tree));
152static rtx expand_increment	PARAMS ((tree, int, int));
153static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
154static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
155static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
156					 rtx, rtx));
157static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
158#ifdef PUSH_ROUNDING
159static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
160#endif
161static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
162
163/* Record for each mode whether we can move a register directly to or
164   from an object of that mode in memory.  If we can't, we won't try
165   to use that mode directly when accessing a field of that mode.  */
166
167static char direct_load[NUM_MACHINE_MODES];
168static char direct_store[NUM_MACHINE_MODES];
169
170/* If a memory-to-memory move would take MOVE_RATIO or more simple
171   move-instruction sequences, we will do a movstr or libcall instead.  */
172
173#ifndef MOVE_RATIO
174#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175#define MOVE_RATIO 2
176#else
177/* If we are optimizing for space (-Os), cut down the default move ratio.  */
178#define MOVE_RATIO (optimize_size ? 3 : 15)
179#endif
180#endif
181
182/* This macro is used to determine whether move_by_pieces should be called
183   to perform a structure copy.  */
184#ifndef MOVE_BY_PIECES_P
185#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
186  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187#endif
188
189/* This array records the insn_code of insns to perform block moves.  */
190enum insn_code movstr_optab[NUM_MACHINE_MODES];
191
192/* This array records the insn_code of insns to perform block clears.  */
193enum insn_code clrstr_optab[NUM_MACHINE_MODES];
194
195/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
196
197#ifndef SLOW_UNALIGNED_ACCESS
198#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199#endif
200
201/* This is run once per compilation to set up which modes can be used
202   directly in memory and to initialize the block move optab.  */
203
204void
205init_expr_once ()
206{
207  rtx insn, pat;
208  enum machine_mode mode;
209  int num_clobbers;
210  rtx mem, mem1;
211
212  start_sequence ();
213
214  /* Try indexing by frame ptr and try by stack ptr.
215     It is known that on the Convex the stack ptr isn't a valid index.
216     With luck, one or the other is valid on any machine.  */
217  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219
220  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
221  pat = PATTERN (insn);
222
223  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
224       mode = (enum machine_mode) ((int) mode + 1))
225    {
226      int regno;
227      rtx reg;
228
229      direct_load[(int) mode] = direct_store[(int) mode] = 0;
230      PUT_MODE (mem, mode);
231      PUT_MODE (mem1, mode);
232
233      /* See if there is some register that can be used in this mode and
234	 directly loaded or stored from memory.  */
235
236      if (mode != VOIDmode && mode != BLKmode)
237	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
238	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239	     regno++)
240	  {
241	    if (! HARD_REGNO_MODE_OK (regno, mode))
242	      continue;
243
244	    reg = gen_rtx_REG (mode, regno);
245
246	    SET_SRC (pat) = mem;
247	    SET_DEST (pat) = reg;
248	    if (recog (pat, insn, &num_clobbers) >= 0)
249	      direct_load[(int) mode] = 1;
250
251	    SET_SRC (pat) = mem1;
252	    SET_DEST (pat) = reg;
253	    if (recog (pat, insn, &num_clobbers) >= 0)
254	      direct_load[(int) mode] = 1;
255
256	    SET_SRC (pat) = reg;
257	    SET_DEST (pat) = mem;
258	    if (recog (pat, insn, &num_clobbers) >= 0)
259	      direct_store[(int) mode] = 1;
260
261	    SET_SRC (pat) = reg;
262	    SET_DEST (pat) = mem1;
263	    if (recog (pat, insn, &num_clobbers) >= 0)
264	      direct_store[(int) mode] = 1;
265	  }
266    }
267
268  end_sequence ();
269}
270
271/* This is run at the start of compiling a function.  */
272
273void
274init_expr ()
275{
276  cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277
278  pending_chain = 0;
279  pending_stack_adjust = 0;
280  stack_pointer_delta = 0;
281  inhibit_defer_pop = 0;
282  saveregs_value = 0;
283  apply_args_value = 0;
284  forced_labels = 0;
285}
286
287void
288mark_expr_status (p)
289     struct expr_status *p;
290{
291  if (p == NULL)
292    return;
293
294  ggc_mark_rtx (p->x_saveregs_value);
295  ggc_mark_rtx (p->x_apply_args_value);
296  ggc_mark_rtx (p->x_forced_labels);
297}
298
299void
300free_expr_status (f)
301     struct function *f;
302{
303  free (f->expr);
304  f->expr = NULL;
305}
306
307/* Small sanity check that the queue is empty at the end of a function.  */
308
309void
310finish_expr_for_function ()
311{
312  if (pending_chain)
313    abort ();
314}
315
316/* Manage the queue of increment instructions to be output
317   for POSTINCREMENT_EXPR expressions, etc.  */
318
319/* Queue up to increment (or change) VAR later.  BODY says how:
320   BODY should be the same thing you would pass to emit_insn
321   to increment right away.  It will go to emit_insn later on.
322
323   The value is a QUEUED expression to be used in place of VAR
324   where you want to guarantee the pre-incrementation value of VAR.  */
325
326static rtx
327enqueue_insn (var, body)
328     rtx var, body;
329{
330  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
331				  body, pending_chain);
332  return pending_chain;
333}
334
335/* Use protect_from_queue to convert a QUEUED expression
336   into something that you can put immediately into an instruction.
337   If the queued incrementation has not happened yet,
338   protect_from_queue returns the variable itself.
339   If the incrementation has happened, protect_from_queue returns a temp
340   that contains a copy of the old value of the variable.
341
342   Any time an rtx which might possibly be a QUEUED is to be put
343   into an instruction, it must be passed through protect_from_queue first.
344   QUEUED expressions are not meaningful in instructions.
345
346   Do not pass a value through protect_from_queue and then hold
347   on to it for a while before putting it in an instruction!
348   If the queue is flushed in between, incorrect code will result.  */
349
350rtx
351protect_from_queue (x, modify)
352     rtx x;
353     int modify;
354{
355  RTX_CODE code = GET_CODE (x);
356
357#if 0  /* A QUEUED can hang around after the queue is forced out.  */
358  /* Shortcut for most common case.  */
359  if (pending_chain == 0)
360    return x;
361#endif
362
363  if (code != QUEUED)
364    {
365      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
366	 use of autoincrement.  Make a copy of the contents of the memory
367	 location rather than a copy of the address, but not if the value is
368	 of mode BLKmode.  Don't modify X in place since it might be
369	 shared.  */
370      if (code == MEM && GET_MODE (x) != BLKmode
371	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372	{
373	  rtx y = XEXP (x, 0);
374	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
375
376	  if (QUEUED_INSN (y))
377	    {
378	      rtx temp = gen_reg_rtx (GET_MODE (x));
379
380	      emit_insn_before (gen_move_insn (temp, new),
381				QUEUED_INSN (y));
382	      return temp;
383	    }
384
385	  /* Copy the address into a pseudo, so that the returned value
386	     remains correct across calls to emit_queue.  */
387	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388	}
389
390      /* Otherwise, recursively protect the subexpressions of all
391	 the kinds of rtx's that can contain a QUEUED.  */
392      if (code == MEM)
393	{
394	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
395	  if (tem != XEXP (x, 0))
396	    {
397	      x = copy_rtx (x);
398	      XEXP (x, 0) = tem;
399	    }
400	}
401      else if (code == PLUS || code == MULT)
402	{
403	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
404	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
405	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
406	    {
407	      x = copy_rtx (x);
408	      XEXP (x, 0) = new0;
409	      XEXP (x, 1) = new1;
410	    }
411	}
412      return x;
413    }
414  /* If the increment has not happened, use the variable itself.  Copy it
415     into a new pseudo so that the value remains correct across calls to
416     emit_queue.  */
417  if (QUEUED_INSN (x) == 0)
418    return copy_to_reg (QUEUED_VAR (x));
419  /* If the increment has happened and a pre-increment copy exists,
420     use that copy.  */
421  if (QUEUED_COPY (x) != 0)
422    return QUEUED_COPY (x);
423  /* The increment has happened but we haven't set up a pre-increment copy.
424     Set one up now, and use it.  */
425  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
426  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
427		    QUEUED_INSN (x));
428  return QUEUED_COPY (x);
429}
430
431/* Return nonzero if X contains a QUEUED expression:
432   if it contains anything that will be altered by a queued increment.
433   We handle only combinations of MEM, PLUS, MINUS and MULT operators
434   since memory addresses generally contain only those.  */
435
436int
437queued_subexp_p (x)
438     rtx x;
439{
440  enum rtx_code code = GET_CODE (x);
441  switch (code)
442    {
443    case QUEUED:
444      return 1;
445    case MEM:
446      return queued_subexp_p (XEXP (x, 0));
447    case MULT:
448    case PLUS:
449    case MINUS:
450      return (queued_subexp_p (XEXP (x, 0))
451	      || queued_subexp_p (XEXP (x, 1)));
452    default:
453      return 0;
454    }
455}
456
457/* Perform all the pending incrementations.  */
458
459void
460emit_queue ()
461{
462  rtx p;
463  while ((p = pending_chain))
464    {
465      rtx body = QUEUED_BODY (p);
466
467      if (GET_CODE (body) == SEQUENCE)
468	{
469	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
470	  emit_insn (QUEUED_BODY (p));
471	}
472      else
473	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
474      pending_chain = QUEUED_NEXT (p);
475    }
476}
477
478/* Copy data from FROM to TO, where the machine modes are not the same.
479   Both modes may be integer, or both may be floating.
480   UNSIGNEDP should be nonzero if FROM is an unsigned type.
481   This causes zero-extension instead of sign-extension.  */
482
483void
484convert_move (to, from, unsignedp)
485     rtx to, from;
486     int unsignedp;
487{
488  enum machine_mode to_mode = GET_MODE (to);
489  enum machine_mode from_mode = GET_MODE (from);
490  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
491  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
492  enum insn_code code;
493  rtx libcall;
494
495  /* rtx code for making an equivalent value.  */
496  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
497
498  to = protect_from_queue (to, 1);
499  from = protect_from_queue (from, 0);
500
501  if (to_real != from_real)
502    abort ();
503
504  /* If FROM is a SUBREG that indicates that we have already done at least
505     the required extension, strip it.  We don't handle such SUBREGs as
506     TO here.  */
507
508  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
509      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
510	  >= GET_MODE_SIZE (to_mode))
511      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
512    from = gen_lowpart (to_mode, from), from_mode = to_mode;
513
514  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515    abort ();
516
517  if (to_mode == from_mode
518      || (from_mode == VOIDmode && CONSTANT_P (from)))
519    {
520      emit_move_insn (to, from);
521      return;
522    }
523
524  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
525    {
526      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527	abort ();
528
529      if (VECTOR_MODE_P (to_mode))
530	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
531      else
532	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
533
534      emit_move_insn (to, from);
535      return;
536    }
537
538  if (to_real != from_real)
539    abort ();
540
541  if (to_real)
542    {
543      rtx value, insns;
544
545      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
546	{
547	  /* Try converting directly if the insn is supported.  */
548	  if ((code = can_extend_p (to_mode, from_mode, 0))
549	      != CODE_FOR_nothing)
550	    {
551	      emit_unop_insn (code, to, from, UNKNOWN);
552	      return;
553	    }
554	}
555
556#ifdef HAVE_trunchfqf2
557      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
558	{
559	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
560	  return;
561	}
562#endif
563#ifdef HAVE_trunctqfqf2
564      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
565	{
566	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
567	  return;
568	}
569#endif
570#ifdef HAVE_truncsfqf2
571      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
572	{
573	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
574	  return;
575	}
576#endif
577#ifdef HAVE_truncdfqf2
578      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
579	{
580	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
581	  return;
582	}
583#endif
584#ifdef HAVE_truncxfqf2
585      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
586	{
587	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
588	  return;
589	}
590#endif
591#ifdef HAVE_trunctfqf2
592      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
593	{
594	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
595	  return;
596	}
597#endif
598
599#ifdef HAVE_trunctqfhf2
600      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
601	{
602	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
603	  return;
604	}
605#endif
606#ifdef HAVE_truncsfhf2
607      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
608	{
609	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
610	  return;
611	}
612#endif
613#ifdef HAVE_truncdfhf2
614      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
615	{
616	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
617	  return;
618	}
619#endif
620#ifdef HAVE_truncxfhf2
621      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
622	{
623	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
624	  return;
625	}
626#endif
627#ifdef HAVE_trunctfhf2
628      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
629	{
630	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
631	  return;
632	}
633#endif
634
635#ifdef HAVE_truncsftqf2
636      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
637	{
638	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
639	  return;
640	}
641#endif
642#ifdef HAVE_truncdftqf2
643      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
644	{
645	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
646	  return;
647	}
648#endif
649#ifdef HAVE_truncxftqf2
650      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
651	{
652	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
653	  return;
654	}
655#endif
656#ifdef HAVE_trunctftqf2
657      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
658	{
659	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
660	  return;
661	}
662#endif
663
664#ifdef HAVE_truncdfsf2
665      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
666	{
667	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668	  return;
669	}
670#endif
671#ifdef HAVE_truncxfsf2
672      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
673	{
674	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675	  return;
676	}
677#endif
678#ifdef HAVE_trunctfsf2
679      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
680	{
681	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682	  return;
683	}
684#endif
685#ifdef HAVE_truncxfdf2
686      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
687	{
688	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689	  return;
690	}
691#endif
692#ifdef HAVE_trunctfdf2
693      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
694	{
695	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
696	  return;
697	}
698#endif
699
700      libcall = (rtx) 0;
701      switch (from_mode)
702	{
703	case SFmode:
704	  switch (to_mode)
705	    {
706	    case DFmode:
707	      libcall = extendsfdf2_libfunc;
708	      break;
709
710	    case XFmode:
711	      libcall = extendsfxf2_libfunc;
712	      break;
713
714	    case TFmode:
715	      libcall = extendsftf2_libfunc;
716	      break;
717
718	    default:
719	      break;
720	    }
721	  break;
722
723	case DFmode:
724	  switch (to_mode)
725	    {
726	    case SFmode:
727	      libcall = truncdfsf2_libfunc;
728	      break;
729
730	    case XFmode:
731	      libcall = extenddfxf2_libfunc;
732	      break;
733
734	    case TFmode:
735	      libcall = extenddftf2_libfunc;
736	      break;
737
738	    default:
739	      break;
740	    }
741	  break;
742
743	case XFmode:
744	  switch (to_mode)
745	    {
746	    case SFmode:
747	      libcall = truncxfsf2_libfunc;
748	      break;
749
750	    case DFmode:
751	      libcall = truncxfdf2_libfunc;
752	      break;
753
754	    default:
755	      break;
756	    }
757	  break;
758
759	case TFmode:
760	  switch (to_mode)
761	    {
762	    case SFmode:
763	      libcall = trunctfsf2_libfunc;
764	      break;
765
766	    case DFmode:
767	      libcall = trunctfdf2_libfunc;
768	      break;
769
770	    default:
771	      break;
772	    }
773	  break;
774
775	default:
776	  break;
777	}
778
779      if (libcall == (rtx) 0)
780	/* This conversion is not implemented yet.  */
781	abort ();
782
783      start_sequence ();
784      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
785				       1, from, from_mode);
786      insns = get_insns ();
787      end_sequence ();
788      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
789								    from));
790      return;
791    }
792
793  /* Now both modes are integers.  */
794
795  /* Handle expanding beyond a word.  */
796  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
797      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
798    {
799      rtx insns;
800      rtx lowpart;
801      rtx fill_value;
802      rtx lowfrom;
803      int i;
804      enum machine_mode lowpart_mode;
805      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
806
807      /* Try converting directly if the insn is supported.  */
808      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809	  != CODE_FOR_nothing)
810	{
811	  /* If FROM is a SUBREG, put it into a register.  Do this
812	     so that we always generate the same set of insns for
813	     better cse'ing; if an intermediate assignment occurred,
814	     we won't be doing the operation directly on the SUBREG.  */
815	  if (optimize > 0 && GET_CODE (from) == SUBREG)
816	    from = force_reg (from_mode, from);
817	  emit_unop_insn (code, to, from, equiv_code);
818	  return;
819	}
820      /* Next, try converting via full word.  */
821      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
822	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
823		   != CODE_FOR_nothing))
824	{
825	  if (GET_CODE (to) == REG)
826	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
827	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
828	  emit_unop_insn (code, to,
829			  gen_lowpart (word_mode, to), equiv_code);
830	  return;
831	}
832
833      /* No special multiword conversion insn; do it by hand.  */
834      start_sequence ();
835
836      /* Since we will turn this into a no conflict block, we must ensure
837	 that the source does not overlap the target.  */
838
839      if (reg_overlap_mentioned_p (to, from))
840	from = force_reg (from_mode, from);
841
842      /* Get a copy of FROM widened to a word, if necessary.  */
843      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
844	lowpart_mode = word_mode;
845      else
846	lowpart_mode = from_mode;
847
848      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
849
850      lowpart = gen_lowpart (lowpart_mode, to);
851      emit_move_insn (lowpart, lowfrom);
852
853      /* Compute the value to put in each remaining word.  */
854      if (unsignedp)
855	fill_value = const0_rtx;
856      else
857	{
858#ifdef HAVE_slt
859	  if (HAVE_slt
860	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
861	      && STORE_FLAG_VALUE == -1)
862	    {
863	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
864			     lowpart_mode, 0);
865	      fill_value = gen_reg_rtx (word_mode);
866	      emit_insn (gen_slt (fill_value));
867	    }
868	  else
869#endif
870	    {
871	      fill_value
872		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
873				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
874				NULL_RTX, 0);
875	      fill_value = convert_to_mode (word_mode, fill_value, 1);
876	    }
877	}
878
879      /* Fill the remaining words.  */
880      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
881	{
882	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
883	  rtx subword = operand_subword (to, index, 1, to_mode);
884
885	  if (subword == 0)
886	    abort ();
887
888	  if (fill_value != subword)
889	    emit_move_insn (subword, fill_value);
890	}
891
892      insns = get_insns ();
893      end_sequence ();
894
895      emit_no_conflict_block (insns, to, from, NULL_RTX,
896			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
897      return;
898    }
899
900  /* Truncating multi-word to a word or less.  */
901  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
902      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
903    {
904      if (!((GET_CODE (from) == MEM
905	     && ! MEM_VOLATILE_P (from)
906	     && direct_load[(int) to_mode]
907	     && ! mode_dependent_address_p (XEXP (from, 0)))
908	    || GET_CODE (from) == REG
909	    || GET_CODE (from) == SUBREG))
910	from = force_reg (from_mode, from);
911      convert_move (to, gen_lowpart (word_mode, from), 0);
912      return;
913    }
914
915  /* Handle pointer conversion.  */			/* SPEE 900220.  */
916  if (to_mode == PQImode)
917    {
918      if (from_mode != QImode)
919	from = convert_to_mode (QImode, from, unsignedp);
920
921#ifdef HAVE_truncqipqi2
922      if (HAVE_truncqipqi2)
923	{
924	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925	  return;
926	}
927#endif /* HAVE_truncqipqi2 */
928      abort ();
929    }
930
931  if (from_mode == PQImode)
932    {
933      if (to_mode != QImode)
934	{
935	  from = convert_to_mode (QImode, from, unsignedp);
936	  from_mode = QImode;
937	}
938      else
939	{
940#ifdef HAVE_extendpqiqi2
941	  if (HAVE_extendpqiqi2)
942	    {
943	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944	      return;
945	    }
946#endif /* HAVE_extendpqiqi2 */
947	  abort ();
948	}
949    }
950
951  if (to_mode == PSImode)
952    {
953      if (from_mode != SImode)
954	from = convert_to_mode (SImode, from, unsignedp);
955
956#ifdef HAVE_truncsipsi2
957      if (HAVE_truncsipsi2)
958	{
959	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960	  return;
961	}
962#endif /* HAVE_truncsipsi2 */
963      abort ();
964    }
965
966  if (from_mode == PSImode)
967    {
968      if (to_mode != SImode)
969	{
970	  from = convert_to_mode (SImode, from, unsignedp);
971	  from_mode = SImode;
972	}
973      else
974	{
975#ifdef HAVE_extendpsisi2
976	  if (! unsignedp && HAVE_extendpsisi2)
977	    {
978	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979	      return;
980	    }
981#endif /* HAVE_extendpsisi2 */
982#ifdef HAVE_zero_extendpsisi2
983	  if (unsignedp && HAVE_zero_extendpsisi2)
984	    {
985	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986	      return;
987	    }
988#endif /* HAVE_zero_extendpsisi2 */
989	  abort ();
990	}
991    }
992
993  if (to_mode == PDImode)
994    {
995      if (from_mode != DImode)
996	from = convert_to_mode (DImode, from, unsignedp);
997
998#ifdef HAVE_truncdipdi2
999      if (HAVE_truncdipdi2)
1000	{
1001	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002	  return;
1003	}
1004#endif /* HAVE_truncdipdi2 */
1005      abort ();
1006    }
1007
1008  if (from_mode == PDImode)
1009    {
1010      if (to_mode != DImode)
1011	{
1012	  from = convert_to_mode (DImode, from, unsignedp);
1013	  from_mode = DImode;
1014	}
1015      else
1016	{
1017#ifdef HAVE_extendpdidi2
1018	  if (HAVE_extendpdidi2)
1019	    {
1020	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021	      return;
1022	    }
1023#endif /* HAVE_extendpdidi2 */
1024	  abort ();
1025	}
1026    }
1027
1028  /* Now follow all the conversions between integers
1029     no more than a word long.  */
1030
1031  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1032  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1033      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1034				GET_MODE_BITSIZE (from_mode)))
1035    {
1036      if (!((GET_CODE (from) == MEM
1037	     && ! MEM_VOLATILE_P (from)
1038	     && direct_load[(int) to_mode]
1039	     && ! mode_dependent_address_p (XEXP (from, 0)))
1040	    || GET_CODE (from) == REG
1041	    || GET_CODE (from) == SUBREG))
1042	from = force_reg (from_mode, from);
1043      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1044	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1045	from = copy_to_reg (from);
1046      emit_move_insn (to, gen_lowpart (to_mode, from));
1047      return;
1048    }
1049
1050  /* Handle extension.  */
1051  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1052    {
1053      /* Convert directly if that works.  */
1054      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1055	  != CODE_FOR_nothing)
1056	{
1057	  if (flag_force_mem)
1058	    from = force_not_mem (from);
1059
1060	  emit_unop_insn (code, to, from, equiv_code);
1061	  return;
1062	}
1063      else
1064	{
1065	  enum machine_mode intermediate;
1066	  rtx tmp;
1067	  tree shift_amount;
1068
1069	  /* Search for a mode to convert via.  */
1070	  for (intermediate = from_mode; intermediate != VOIDmode;
1071	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1072	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1073		  != CODE_FOR_nothing)
1074		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1075		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1076					       GET_MODE_BITSIZE (intermediate))))
1077		&& (can_extend_p (intermediate, from_mode, unsignedp)
1078		    != CODE_FOR_nothing))
1079	      {
1080		convert_move (to, convert_to_mode (intermediate, from,
1081						   unsignedp), unsignedp);
1082		return;
1083	      }
1084
1085	  /* No suitable intermediate mode.
1086	     Generate what we need with	shifts.  */
1087	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1088				      - GET_MODE_BITSIZE (from_mode), 0);
1089	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1090	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1091			      to, unsignedp);
1092	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093			      to, unsignedp);
1094	  if (tmp != to)
1095	    emit_move_insn (to, tmp);
1096	  return;
1097	}
1098    }
1099
1100  /* Support special truncate insns for certain modes.  */
1101
1102  if (from_mode == DImode && to_mode == SImode)
1103    {
1104#ifdef HAVE_truncdisi2
1105      if (HAVE_truncdisi2)
1106	{
1107	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1108	  return;
1109	}
1110#endif
1111      convert_move (to, force_reg (from_mode, from), unsignedp);
1112      return;
1113    }
1114
1115  if (from_mode == DImode && to_mode == HImode)
1116    {
1117#ifdef HAVE_truncdihi2
1118      if (HAVE_truncdihi2)
1119	{
1120	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1121	  return;
1122	}
1123#endif
1124      convert_move (to, force_reg (from_mode, from), unsignedp);
1125      return;
1126    }
1127
1128  if (from_mode == DImode && to_mode == QImode)
1129    {
1130#ifdef HAVE_truncdiqi2
1131      if (HAVE_truncdiqi2)
1132	{
1133	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1134	  return;
1135	}
1136#endif
1137      convert_move (to, force_reg (from_mode, from), unsignedp);
1138      return;
1139    }
1140
1141  if (from_mode == SImode && to_mode == HImode)
1142    {
1143#ifdef HAVE_truncsihi2
1144      if (HAVE_truncsihi2)
1145	{
1146	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1147	  return;
1148	}
1149#endif
1150      convert_move (to, force_reg (from_mode, from), unsignedp);
1151      return;
1152    }
1153
1154  if (from_mode == SImode && to_mode == QImode)
1155    {
1156#ifdef HAVE_truncsiqi2
1157      if (HAVE_truncsiqi2)
1158	{
1159	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1160	  return;
1161	}
1162#endif
1163      convert_move (to, force_reg (from_mode, from), unsignedp);
1164      return;
1165    }
1166
1167  if (from_mode == HImode && to_mode == QImode)
1168    {
1169#ifdef HAVE_trunchiqi2
1170      if (HAVE_trunchiqi2)
1171	{
1172	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1173	  return;
1174	}
1175#endif
1176      convert_move (to, force_reg (from_mode, from), unsignedp);
1177      return;
1178    }
1179
1180  if (from_mode == TImode && to_mode == DImode)
1181    {
1182#ifdef HAVE_trunctidi2
1183      if (HAVE_trunctidi2)
1184	{
1185	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1186	  return;
1187	}
1188#endif
1189      convert_move (to, force_reg (from_mode, from), unsignedp);
1190      return;
1191    }
1192
1193  if (from_mode == TImode && to_mode == SImode)
1194    {
1195#ifdef HAVE_trunctisi2
1196      if (HAVE_trunctisi2)
1197	{
1198	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1199	  return;
1200	}
1201#endif
1202      convert_move (to, force_reg (from_mode, from), unsignedp);
1203      return;
1204    }
1205
1206  if (from_mode == TImode && to_mode == HImode)
1207    {
1208#ifdef HAVE_trunctihi2
1209      if (HAVE_trunctihi2)
1210	{
1211	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1212	  return;
1213	}
1214#endif
1215      convert_move (to, force_reg (from_mode, from), unsignedp);
1216      return;
1217    }
1218
1219  if (from_mode == TImode && to_mode == QImode)
1220    {
1221#ifdef HAVE_trunctiqi2
1222      if (HAVE_trunctiqi2)
1223	{
1224	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1225	  return;
1226	}
1227#endif
1228      convert_move (to, force_reg (from_mode, from), unsignedp);
1229      return;
1230    }
1231
1232  /* Handle truncation of volatile memrefs, and so on;
1233     the things that couldn't be truncated directly,
1234     and for which there was no special instruction.  */
1235  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1236    {
1237      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1238      emit_move_insn (to, temp);
1239      return;
1240    }
1241
1242  /* Mode combination is not recognized.  */
1243  abort ();
1244}
1245
1246/* Return an rtx for a value that would result
1247   from converting X to mode MODE.
1248   Both X and MODE may be floating, or both integer.
1249   UNSIGNEDP is nonzero if X is an unsigned value.
1250   This can be done by referring to a part of X in place
1251   or by copying to a new temporary with conversion.
1252
1253   This function *must not* call protect_from_queue
1254   except when putting X into an insn (in which case convert_move does it).  */
1255
1256rtx
1257convert_to_mode (mode, x, unsignedp)
1258     enum machine_mode mode;
1259     rtx x;
1260     int unsignedp;
1261{
1262  return convert_modes (mode, VOIDmode, x, unsignedp);
1263}
1264
1265/* Return an rtx for a value that would result
1266   from converting X from mode OLDMODE to mode MODE.
1267   Both modes may be floating, or both integer.
1268   UNSIGNEDP is nonzero if X is an unsigned value.
1269
1270   This can be done by referring to a part of X in place
1271   or by copying to a new temporary with conversion.
1272
1273   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1274
1275   This function *must not* call protect_from_queue
1276   except when putting X into an insn (in which case convert_move does it).  */
1277
1278rtx
1279convert_modes (mode, oldmode, x, unsignedp)
1280     enum machine_mode mode, oldmode;
1281     rtx x;
1282     int unsignedp;
1283{
1284  rtx temp;
1285
1286  /* If FROM is a SUBREG that indicates that we have already done at least
1287     the required extension, strip it.  */
1288
1289  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1290      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1291      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1292    x = gen_lowpart (mode, x);
1293
1294  if (GET_MODE (x) != VOIDmode)
1295    oldmode = GET_MODE (x);
1296
1297  if (mode == oldmode)
1298    return x;
1299
1300  /* There is one case that we must handle specially: If we are converting
1301     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1302     we are to interpret the constant as unsigned, gen_lowpart will do
1303     the wrong if the constant appears negative.  What we want to do is
1304     make the high-order word of the constant zero, not all ones.  */
1305
1306  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1307      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1308      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1309    {
1310      HOST_WIDE_INT val = INTVAL (x);
1311
1312      if (oldmode != VOIDmode
1313	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1314	{
1315	  int width = GET_MODE_BITSIZE (oldmode);
1316
1317	  /* We need to zero extend VAL.  */
1318	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319	}
1320
1321      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322    }
1323
1324  /* We can do this with a gen_lowpart if both desired and current modes
1325     are integer, and this is either a constant integer, a register, or a
1326     non-volatile MEM.  Except for the constant case where MODE is no
1327     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1328
1329  if ((GET_CODE (x) == CONST_INT
1330       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1331      || (GET_MODE_CLASS (mode) == MODE_INT
1332	  && GET_MODE_CLASS (oldmode) == MODE_INT
1333	  && (GET_CODE (x) == CONST_DOUBLE
1334	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1335		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1336		       && direct_load[(int) mode])
1337		      || (GET_CODE (x) == REG
1338			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1339						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1340    {
1341      /* ?? If we don't know OLDMODE, we have to assume here that
1342	 X does not need sign- or zero-extension.   This may not be
1343	 the case, but it's the best we can do.  */
1344      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1345	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1346	{
1347	  HOST_WIDE_INT val = INTVAL (x);
1348	  int width = GET_MODE_BITSIZE (oldmode);
1349
1350	  /* We must sign or zero-extend in this case.  Start by
1351	     zero-extending, then sign extend if we need to.  */
1352	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1353	  if (! unsignedp
1354	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1355	    val |= (HOST_WIDE_INT) (-1) << width;
1356
1357	  return GEN_INT (trunc_int_for_mode (val, mode));
1358	}
1359
1360      return gen_lowpart (mode, x);
1361    }
1362
1363  temp = gen_reg_rtx (mode);
1364  convert_move (temp, x, unsignedp);
1365  return temp;
1366}
1367
1368/* This macro is used to determine what the largest unit size that
1369   move_by_pieces can use is.  */
1370
1371/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1372   move efficiently, as opposed to  MOVE_MAX which is the maximum
1373   number of bytes we can move with a single instruction.  */
1374
1375#ifndef MOVE_MAX_PIECES
1376#define MOVE_MAX_PIECES   MOVE_MAX
1377#endif
1378
1379/* Generate several move instructions to copy LEN bytes from block FROM to
1380   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1381   and TO through protect_from_queue before calling.
1382
1383   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1384   used to push FROM to the stack.
1385
1386   ALIGN is maximum alignment we can assume.  */
1387
1388void
1389move_by_pieces (to, from, len, align)
1390     rtx to, from;
1391     unsigned HOST_WIDE_INT len;
1392     unsigned int align;
1393{
1394  struct move_by_pieces data;
1395  rtx to_addr, from_addr = XEXP (from, 0);
1396  unsigned int max_size = MOVE_MAX_PIECES + 1;
1397  enum machine_mode mode = VOIDmode, tmode;
1398  enum insn_code icode;
1399
1400  data.offset = 0;
1401  data.from_addr = from_addr;
1402  if (to)
1403    {
1404      to_addr = XEXP (to, 0);
1405      data.to = to;
1406      data.autinc_to
1407	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1408	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1409      data.reverse
1410	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1411    }
1412  else
1413    {
1414      to_addr = NULL_RTX;
1415      data.to = NULL_RTX;
1416      data.autinc_to = 1;
1417#ifdef STACK_GROWS_DOWNWARD
1418      data.reverse = 1;
1419#else
1420      data.reverse = 0;
1421#endif
1422    }
1423  data.to_addr = to_addr;
1424  data.from = from;
1425  data.autinc_from
1426    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1427       || GET_CODE (from_addr) == POST_INC
1428       || GET_CODE (from_addr) == POST_DEC);
1429
1430  data.explicit_inc_from = 0;
1431  data.explicit_inc_to = 0;
1432  if (data.reverse) data.offset = len;
1433  data.len = len;
1434
1435  /* If copying requires more than two move insns,
1436     copy addresses to registers (to make displacements shorter)
1437     and use post-increment if available.  */
1438  if (!(data.autinc_from && data.autinc_to)
1439      && move_by_pieces_ninsns (len, align) > 2)
1440    {
1441      /* Find the mode of the largest move...  */
1442      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1443	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1444	if (GET_MODE_SIZE (tmode) < max_size)
1445	  mode = tmode;
1446
1447      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1448	{
1449	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1450	  data.autinc_from = 1;
1451	  data.explicit_inc_from = -1;
1452	}
1453      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1454	{
1455	  data.from_addr = copy_addr_to_reg (from_addr);
1456	  data.autinc_from = 1;
1457	  data.explicit_inc_from = 1;
1458	}
1459      if (!data.autinc_from && CONSTANT_P (from_addr))
1460	data.from_addr = copy_addr_to_reg (from_addr);
1461      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1462	{
1463	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1464	  data.autinc_to = 1;
1465	  data.explicit_inc_to = -1;
1466	}
1467      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1468	{
1469	  data.to_addr = copy_addr_to_reg (to_addr);
1470	  data.autinc_to = 1;
1471	  data.explicit_inc_to = 1;
1472	}
1473      if (!data.autinc_to && CONSTANT_P (to_addr))
1474	data.to_addr = copy_addr_to_reg (to_addr);
1475    }
1476
1477  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1478      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1479    align = MOVE_MAX * BITS_PER_UNIT;
1480
1481  /* First move what we can in the largest integer mode, then go to
1482     successively smaller modes.  */
1483
1484  while (max_size > 1)
1485    {
1486      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1487	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1488	if (GET_MODE_SIZE (tmode) < max_size)
1489	  mode = tmode;
1490
1491      if (mode == VOIDmode)
1492	break;
1493
1494      icode = mov_optab->handlers[(int) mode].insn_code;
1495      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1496	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1497
1498      max_size = GET_MODE_SIZE (mode);
1499    }
1500
1501  /* The code above should have handled everything.  */
1502  if (data.len > 0)
1503    abort ();
1504}
1505
1506/* Return number of insns required to move L bytes by pieces.
1507   ALIGN (in bits) is maximum alignment we can assume.  */
1508
1509static unsigned HOST_WIDE_INT
1510move_by_pieces_ninsns (l, align)
1511     unsigned HOST_WIDE_INT l;
1512     unsigned int align;
1513{
1514  unsigned HOST_WIDE_INT n_insns = 0;
1515  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1516
1517  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1518      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1519    align = MOVE_MAX * BITS_PER_UNIT;
1520
1521  while (max_size > 1)
1522    {
1523      enum machine_mode mode = VOIDmode, tmode;
1524      enum insn_code icode;
1525
1526      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1527	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1528	if (GET_MODE_SIZE (tmode) < max_size)
1529	  mode = tmode;
1530
1531      if (mode == VOIDmode)
1532	break;
1533
1534      icode = mov_optab->handlers[(int) mode].insn_code;
1535      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1536	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1537
1538      max_size = GET_MODE_SIZE (mode);
1539    }
1540
1541  if (l)
1542    abort ();
1543  return n_insns;
1544}
1545
1546/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1547   with move instructions for mode MODE.  GENFUN is the gen_... function
1548   to make a move insn for that mode.  DATA has all the other info.  */
1549
1550static void
1551move_by_pieces_1 (genfun, mode, data)
1552     rtx (*genfun) PARAMS ((rtx, ...));
1553     enum machine_mode mode;
1554     struct move_by_pieces *data;
1555{
1556  unsigned int size = GET_MODE_SIZE (mode);
1557  rtx to1 = NULL_RTX, from1;
1558
1559  while (data->len >= size)
1560    {
1561      if (data->reverse)
1562	data->offset -= size;
1563
1564      if (data->to)
1565	{
1566	  if (data->autinc_to)
1567	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568					     data->offset);
1569	  else
1570	    to1 = adjust_address (data->to, mode, data->offset);
1571	}
1572
1573      if (data->autinc_from)
1574	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575					   data->offset);
1576      else
1577	from1 = adjust_address (data->from, mode, data->offset);
1578
1579      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1580	emit_insn (gen_add2_insn (data->to_addr,
1581				  GEN_INT (-(HOST_WIDE_INT)size)));
1582      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1583	emit_insn (gen_add2_insn (data->from_addr,
1584				  GEN_INT (-(HOST_WIDE_INT)size)));
1585
1586      if (data->to)
1587	emit_insn ((*genfun) (to1, from1));
1588      else
1589	{
1590#ifdef PUSH_ROUNDING
1591	  emit_single_push_insn (mode, from1, NULL);
1592#else
1593	  abort ();
1594#endif
1595	}
1596
1597      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1598	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1599      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1600	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1601
1602      if (! data->reverse)
1603	data->offset += size;
1604
1605      data->len -= size;
1606    }
1607}
1608
1609/* Emit code to move a block Y to a block X.
1610   This may be done with string-move instructions,
1611   with multiple scalar move instructions, or with a library call.
1612
1613   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1614   with mode BLKmode.
1615   SIZE is an rtx that says how long they are.
1616   ALIGN is the maximum alignment we can assume they have.
1617
1618   Return the address of the new block, if memcpy is called and returns it,
1619   0 otherwise.  */
1620
1621rtx
1622emit_block_move (x, y, size)
1623     rtx x, y;
1624     rtx size;
1625{
1626  rtx retval = 0;
1627#ifdef TARGET_MEM_FUNCTIONS
1628  static tree fn;
1629  tree call_expr, arg_list;
1630#endif
1631  unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1632
1633  if (GET_MODE (x) != BLKmode)
1634    abort ();
1635
1636  if (GET_MODE (y) != BLKmode)
1637    abort ();
1638
1639  x = protect_from_queue (x, 1);
1640  y = protect_from_queue (y, 0);
1641  size = protect_from_queue (size, 0);
1642
1643  if (GET_CODE (x) != MEM)
1644    abort ();
1645  if (GET_CODE (y) != MEM)
1646    abort ();
1647  if (size == 0)
1648    abort ();
1649
1650  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1651    move_by_pieces (x, y, INTVAL (size), align);
1652  else
1653    {
1654      /* Try the most limited insn first, because there's no point
1655	 including more than one in the machine description unless
1656	 the more limited one has some advantage.  */
1657
1658      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1659      enum machine_mode mode;
1660
1661      /* Since this is a move insn, we don't care about volatility.  */
1662      volatile_ok = 1;
1663
1664      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1665	   mode = GET_MODE_WIDER_MODE (mode))
1666	{
1667	  enum insn_code code = movstr_optab[(int) mode];
1668	  insn_operand_predicate_fn pred;
1669
1670	  if (code != CODE_FOR_nothing
1671	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1672		 here because if SIZE is less than the mode mask, as it is
1673		 returned by the macro, it will definitely be less than the
1674		 actual mode mask.  */
1675	      && ((GET_CODE (size) == CONST_INT
1676		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1677		       <= (GET_MODE_MASK (mode) >> 1)))
1678		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1679	      && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1680		  || (*pred) (x, BLKmode))
1681	      && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1682		  || (*pred) (y, BLKmode))
1683	      && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1684		  || (*pred) (opalign, VOIDmode)))
1685	    {
1686	      rtx op2;
1687	      rtx last = get_last_insn ();
1688	      rtx pat;
1689
1690	      op2 = convert_to_mode (mode, size, 1);
1691	      pred = insn_data[(int) code].operand[2].predicate;
1692	      if (pred != 0 && ! (*pred) (op2, mode))
1693		op2 = copy_to_mode_reg (mode, op2);
1694
1695	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1696	      if (pat)
1697		{
1698		  emit_insn (pat);
1699		  volatile_ok = 0;
1700		  return 0;
1701		}
1702	      else
1703		delete_insns_since (last);
1704	    }
1705	}
1706
1707      volatile_ok = 0;
1708
1709      /* X, Y, or SIZE may have been passed through protect_from_queue.
1710
1711	 It is unsafe to save the value generated by protect_from_queue
1712	 and reuse it later.  Consider what happens if emit_queue is
1713	 called before the return value from protect_from_queue is used.
1714
1715	 Expansion of the CALL_EXPR below will call emit_queue before
1716	 we are finished emitting RTL for argument setup.  So if we are
1717	 not careful we could get the wrong value for an argument.
1718
1719	 To avoid this problem we go ahead and emit code to copy X, Y &
1720	 SIZE into new pseudos.  We can then place those new pseudos
1721	 into an RTL_EXPR and use them later, even after a call to
1722	 emit_queue.
1723
1724	 Note this is not strictly needed for library calls since they
1725	 do not call emit_queue before loading their arguments.  However,
1726	 we may need to have library calls call emit_queue in the future
1727	 since failing to do so could cause problems for targets which
1728	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1729      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1730      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1731
1732#ifdef TARGET_MEM_FUNCTIONS
1733      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1734#else
1735      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1736			      TREE_UNSIGNED (integer_type_node));
1737      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738#endif
1739
1740#ifdef TARGET_MEM_FUNCTIONS
1741      /* It is incorrect to use the libcall calling conventions to call
1742	 memcpy in this context.
1743
1744	 This could be a user call to memcpy and the user may wish to
1745	 examine the return value from memcpy.
1746
1747	 For targets where libcalls and normal calls have different conventions
1748	 for returning pointers, we could end up generating incorrect code.
1749
1750	 So instead of using a libcall sequence we build up a suitable
1751	 CALL_EXPR and expand the call in the normal fashion.  */
1752      if (fn == NULL_TREE)
1753	{
1754	  tree fntype;
1755
1756	  /* This was copied from except.c, I don't know if all this is
1757	     necessary in this context or not.  */
1758	  fn = get_identifier ("memcpy");
1759	  fntype = build_pointer_type (void_type_node);
1760	  fntype = build_function_type (fntype, NULL_TREE);
1761	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1762	  ggc_add_tree_root (&fn, 1);
1763	  DECL_EXTERNAL (fn) = 1;
1764	  TREE_PUBLIC (fn) = 1;
1765	  DECL_ARTIFICIAL (fn) = 1;
1766	  TREE_NOTHROW (fn) = 1;
1767	  make_decl_rtl (fn, NULL);
1768	  assemble_external (fn);
1769	}
1770
1771      /* We need to make an argument list for the function call.
1772
1773	 memcpy has three arguments, the first two are void * addresses and
1774	 the last is a size_t byte count for the copy.  */
1775      arg_list
1776	= build_tree_list (NULL_TREE,
1777			   make_tree (build_pointer_type (void_type_node), x));
1778      TREE_CHAIN (arg_list)
1779	= build_tree_list (NULL_TREE,
1780			   make_tree (build_pointer_type (void_type_node), y));
1781      TREE_CHAIN (TREE_CHAIN (arg_list))
1782	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1783      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1784
1785      /* Now we have to build up the CALL_EXPR itself.  */
1786      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1787      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1788			 call_expr, arg_list, NULL_TREE);
1789      TREE_SIDE_EFFECTS (call_expr) = 1;
1790
1791      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1792#else
1793      emit_library_call (bcopy_libfunc, LCT_NORMAL,
1794			 VOIDmode, 3, y, Pmode, x, Pmode,
1795			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1796					  TREE_UNSIGNED (integer_type_node)),
1797			 TYPE_MODE (integer_type_node));
1798#endif
1799
1800      /* If we are initializing a readonly value, show the above call
1801	 clobbered it.  Otherwise, a load from it may erroneously be hoisted
1802	 from a loop.  */
1803      if (RTX_UNCHANGING_P (x))
1804	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1805    }
1806
1807  return retval;
1808}
1809
1810/* Copy all or part of a value X into registers starting at REGNO.
1811   The number of registers to be filled is NREGS.  */
1812
1813void
1814move_block_to_reg (regno, x, nregs, mode)
1815     int regno;
1816     rtx x;
1817     int nregs;
1818     enum machine_mode mode;
1819{
1820  int i;
1821#ifdef HAVE_load_multiple
1822  rtx pat;
1823  rtx last;
1824#endif
1825
1826  if (nregs == 0)
1827    return;
1828
1829  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1830    x = validize_mem (force_const_mem (mode, x));
1831
1832  /* See if the machine can do this with a load multiple insn.  */
1833#ifdef HAVE_load_multiple
1834  if (HAVE_load_multiple)
1835    {
1836      last = get_last_insn ();
1837      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1838			       GEN_INT (nregs));
1839      if (pat)
1840	{
1841	  emit_insn (pat);
1842	  return;
1843	}
1844      else
1845	delete_insns_since (last);
1846    }
1847#endif
1848
1849  for (i = 0; i < nregs; i++)
1850    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1851		    operand_subword_force (x, i, mode));
1852}
1853
1854/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1855   The number of registers to be filled is NREGS.  SIZE indicates the number
1856   of bytes in the object X.  */
1857
1858void
1859move_block_from_reg (regno, x, nregs, size)
1860     int regno;
1861     rtx x;
1862     int nregs;
1863     int size;
1864{
1865  int i;
1866#ifdef HAVE_store_multiple
1867  rtx pat;
1868  rtx last;
1869#endif
1870  enum machine_mode mode;
1871
1872  if (nregs == 0)
1873    return;
1874
1875  /* If SIZE is that of a mode no bigger than a word, just use that
1876     mode's store operation.  */
1877  if (size <= UNITS_PER_WORD
1878      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1879      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1880    {
1881      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1882      return;
1883    }
1884
1885  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1886     to the left before storing to memory.  Note that the previous test
1887     doesn't handle all cases (e.g. SIZE == 3).  */
1888  if (size < UNITS_PER_WORD
1889      && BYTES_BIG_ENDIAN
1890      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1891    {
1892      rtx tem = operand_subword (x, 0, 1, BLKmode);
1893      rtx shift;
1894
1895      if (tem == 0)
1896	abort ();
1897
1898      shift = expand_shift (LSHIFT_EXPR, word_mode,
1899			    gen_rtx_REG (word_mode, regno),
1900			    build_int_2 ((UNITS_PER_WORD - size)
1901					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1902      emit_move_insn (tem, shift);
1903      return;
1904    }
1905
1906  /* See if the machine can do this with a store multiple insn.  */
1907#ifdef HAVE_store_multiple
1908  if (HAVE_store_multiple)
1909    {
1910      last = get_last_insn ();
1911      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1912				GEN_INT (nregs));
1913      if (pat)
1914	{
1915	  emit_insn (pat);
1916	  return;
1917	}
1918      else
1919	delete_insns_since (last);
1920    }
1921#endif
1922
1923  for (i = 0; i < nregs; i++)
1924    {
1925      rtx tem = operand_subword (x, i, 1, BLKmode);
1926
1927      if (tem == 0)
1928	abort ();
1929
1930      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1931    }
1932}
1933
1934/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1935   registers represented by a PARALLEL.  SSIZE represents the total size of
1936   block SRC in bytes, or -1 if not known.  */
1937/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1938   the balance will be in what would be the low-order memory addresses, i.e.
1939   left justified for big endian, right justified for little endian.  This
1940   happens to be true for the targets currently using this support.  If this
1941   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1942   would be needed.  */
1943
1944void
1945emit_group_load (dst, orig_src, ssize)
1946     rtx dst, orig_src;
1947     int ssize;
1948{
1949  rtx *tmps, src;
1950  int start, i;
1951
1952  if (GET_CODE (dst) != PARALLEL)
1953    abort ();
1954
1955  /* Check for a NULL entry, used to indicate that the parameter goes
1956     both on the stack and in registers.  */
1957  if (XEXP (XVECEXP (dst, 0, 0), 0))
1958    start = 0;
1959  else
1960    start = 1;
1961
1962  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1963
1964  /* Process the pieces.  */
1965  for (i = start; i < XVECLEN (dst, 0); i++)
1966    {
1967      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1968      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1969      unsigned int bytelen = GET_MODE_SIZE (mode);
1970      int shift = 0;
1971
1972      /* Handle trailing fragments that run over the size of the struct.  */
1973      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1974	{
1975	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1976	  bytelen = ssize - bytepos;
1977	  if (bytelen <= 0)
1978	    abort ();
1979	}
1980
1981      /* If we won't be loading directly from memory, protect the real source
1982	 from strange tricks we might play; but make sure that the source can
1983	 be loaded directly into the destination.  */
1984      src = orig_src;
1985      if (GET_CODE (orig_src) != MEM
1986	  && (!CONSTANT_P (orig_src)
1987	      || (GET_MODE (orig_src) != mode
1988		  && GET_MODE (orig_src) != VOIDmode)))
1989	{
1990	  if (GET_MODE (orig_src) == VOIDmode)
1991	    src = gen_reg_rtx (mode);
1992	  else
1993	    src = gen_reg_rtx (GET_MODE (orig_src));
1994
1995	  emit_move_insn (src, orig_src);
1996	}
1997
1998      /* Optimize the access just a bit.  */
1999      if (GET_CODE (src) == MEM
2000	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2001	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2002	  && bytelen == GET_MODE_SIZE (mode))
2003	{
2004	  tmps[i] = gen_reg_rtx (mode);
2005	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2006	}
2007      else if (GET_CODE (src) == CONCAT)
2008	{
2009	  if ((bytepos == 0
2010	       && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011	      || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2012		  && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2013	    {
2014	      tmps[i] = XEXP (src, bytepos != 0);
2015	      if (! CONSTANT_P (tmps[i])
2016		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2017		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2018					     0, 1, NULL_RTX, mode, mode, ssize);
2019	    }
2020	  else if (bytepos == 0)
2021	    {
2022	      rtx mem = assign_stack_temp (GET_MODE (src),
2023					   GET_MODE_SIZE (GET_MODE (src)), 0);
2024	      emit_move_insn (mem, src);
2025	      tmps[i] = adjust_address (mem, mode, 0);
2026	    }
2027	  else
2028	    abort ();
2029	}
2030      else if (CONSTANT_P (src)
2031	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2032	tmps[i] = src;
2033      else
2034	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2035				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2036				     mode, mode, ssize);
2037
2038      if (BYTES_BIG_ENDIAN && shift)
2039	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2040		      tmps[i], 0, OPTAB_WIDEN);
2041    }
2042
2043  emit_queue ();
2044
2045  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2046  for (i = start; i < XVECLEN (dst, 0); i++)
2047    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2048}
2049
2050/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2051   registers represented by a PARALLEL.  SSIZE represents the total size of
2052   block DST, or -1 if not known.  */
2053
2054void
2055emit_group_store (orig_dst, src, ssize)
2056     rtx orig_dst, src;
2057     int ssize;
2058{
2059  rtx *tmps, dst;
2060  int start, i;
2061
2062  if (GET_CODE (src) != PARALLEL)
2063    abort ();
2064
2065  /* Check for a NULL entry, used to indicate that the parameter goes
2066     both on the stack and in registers.  */
2067  if (XEXP (XVECEXP (src, 0, 0), 0))
2068    start = 0;
2069  else
2070    start = 1;
2071
2072  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2073
2074  /* Copy the (probable) hard regs into pseudos.  */
2075  for (i = start; i < XVECLEN (src, 0); i++)
2076    {
2077      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2078      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2079      emit_move_insn (tmps[i], reg);
2080    }
2081  emit_queue ();
2082
2083  /* If we won't be storing directly into memory, protect the real destination
2084     from strange tricks we might play.  */
2085  dst = orig_dst;
2086  if (GET_CODE (dst) == PARALLEL)
2087    {
2088      rtx temp;
2089
2090      /* We can get a PARALLEL dst if there is a conditional expression in
2091	 a return statement.  In that case, the dst and src are the same,
2092	 so no action is necessary.  */
2093      if (rtx_equal_p (dst, src))
2094	return;
2095
2096      /* It is unclear if we can ever reach here, but we may as well handle
2097	 it.  Allocate a temporary, and split this into a store/load to/from
2098	 the temporary.  */
2099
2100      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2101      emit_group_store (temp, src, ssize);
2102      emit_group_load (dst, temp, ssize);
2103      return;
2104    }
2105  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2106    {
2107      dst = gen_reg_rtx (GET_MODE (orig_dst));
2108      /* Make life a bit easier for combine.  */
2109      emit_move_insn (dst, const0_rtx);
2110    }
2111
2112  /* Process the pieces.  */
2113  for (i = start; i < XVECLEN (src, 0); i++)
2114    {
2115      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2116      enum machine_mode mode = GET_MODE (tmps[i]);
2117      unsigned int bytelen = GET_MODE_SIZE (mode);
2118      rtx dest = dst;
2119
2120      /* Handle trailing fragments that run over the size of the struct.  */
2121      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2122	{
2123	  if (BYTES_BIG_ENDIAN)
2124	    {
2125	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2126	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2127			    tmps[i], 0, OPTAB_WIDEN);
2128	    }
2129	  bytelen = ssize - bytepos;
2130	}
2131
2132      if (GET_CODE (dst) == CONCAT)
2133	{
2134	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2135	    dest = XEXP (dst, 0);
2136	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2137	    {
2138	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2139	      dest = XEXP (dst, 1);
2140	    }
2141	  else
2142	    abort ();
2143	}
2144
2145      /* Optimize the access just a bit.  */
2146      if (GET_CODE (dest) == MEM
2147	  && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2148	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2149	  && bytelen == GET_MODE_SIZE (mode))
2150	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2151      else
2152	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2153			 mode, tmps[i], ssize);
2154    }
2155
2156  emit_queue ();
2157
2158  /* Copy from the pseudo into the (probable) hard reg.  */
2159  if (GET_CODE (dst) == REG)
2160    emit_move_insn (orig_dst, dst);
2161}
2162
2163/* Generate code to copy a BLKmode object of TYPE out of a
2164   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2165   is null, a stack temporary is created.  TGTBLK is returned.
2166
2167   The primary purpose of this routine is to handle functions
2168   that return BLKmode structures in registers.  Some machines
2169   (the PA for example) want to return all small structures
2170   in registers regardless of the structure's alignment.  */
2171
2172rtx
2173copy_blkmode_from_reg (tgtblk, srcreg, type)
2174     rtx tgtblk;
2175     rtx srcreg;
2176     tree type;
2177{
2178  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2179  rtx src = NULL, dst = NULL;
2180  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2181  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2182
2183  if (tgtblk == 0)
2184    {
2185      tgtblk = assign_temp (build_qualified_type (type,
2186						  (TYPE_QUALS (type)
2187						   | TYPE_QUAL_CONST)),
2188			    0, 1, 1);
2189      preserve_temp_slots (tgtblk);
2190    }
2191
2192  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2193     into a new pseudo which is a full word.
2194
2195     If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2196     the wrong part of the register gets copied so we fake a type conversion
2197     in place.  */
2198  if (GET_MODE (srcreg) != BLKmode
2199      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2200    {
2201      if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2202	srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2203      else
2204	srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2205    }
2206
2207  /* Structures whose size is not a multiple of a word are aligned
2208     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2209     machine, this means we must skip the empty high order bytes when
2210     calculating the bit offset.  */
2211  if (BYTES_BIG_ENDIAN
2212      && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2213      && bytes % UNITS_PER_WORD)
2214    big_endian_correction
2215      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2216
2217  /* Copy the structure BITSIZE bites at a time.
2218
2219     We could probably emit more efficient code for machines which do not use
2220     strict alignment, but it doesn't seem worth the effort at the current
2221     time.  */
2222  for (bitpos = 0, xbitpos = big_endian_correction;
2223       bitpos < bytes * BITS_PER_UNIT;
2224       bitpos += bitsize, xbitpos += bitsize)
2225    {
2226      /* We need a new source operand each time xbitpos is on a
2227	 word boundary and when xbitpos == big_endian_correction
2228	 (the first time through).  */
2229      if (xbitpos % BITS_PER_WORD == 0
2230	  || xbitpos == big_endian_correction)
2231	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2232				     GET_MODE (srcreg));
2233
2234      /* We need a new destination operand each time bitpos is on
2235	 a word boundary.  */
2236      if (bitpos % BITS_PER_WORD == 0)
2237	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2238
2239      /* Use xbitpos for the source extraction (right justified) and
2240	 xbitpos for the destination store (left justified).  */
2241      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2242		       extract_bit_field (src, bitsize,
2243					  xbitpos % BITS_PER_WORD, 1,
2244					  NULL_RTX, word_mode, word_mode,
2245					  BITS_PER_WORD),
2246		       BITS_PER_WORD);
2247    }
2248
2249  return tgtblk;
2250}
2251
2252/* Add a USE expression for REG to the (possibly empty) list pointed
2253   to by CALL_FUSAGE.  REG must denote a hard register.  */
2254
2255void
2256use_reg (call_fusage, reg)
2257     rtx *call_fusage, reg;
2258{
2259  if (GET_CODE (reg) != REG
2260      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2261    abort ();
2262
2263  *call_fusage
2264    = gen_rtx_EXPR_LIST (VOIDmode,
2265			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2266}
2267
2268/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2269   starting at REGNO.  All of these registers must be hard registers.  */
2270
2271void
2272use_regs (call_fusage, regno, nregs)
2273     rtx *call_fusage;
2274     int regno;
2275     int nregs;
2276{
2277  int i;
2278
2279  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2280    abort ();
2281
2282  for (i = 0; i < nregs; i++)
2283    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2284}
2285
2286/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2287   PARALLEL REGS.  This is for calls that pass values in multiple
2288   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2289
2290void
2291use_group_regs (call_fusage, regs)
2292     rtx *call_fusage;
2293     rtx regs;
2294{
2295  int i;
2296
2297  for (i = 0; i < XVECLEN (regs, 0); i++)
2298    {
2299      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2300
2301      /* A NULL entry means the parameter goes both on the stack and in
2302	 registers.  This can also be a MEM for targets that pass values
2303	 partially on the stack and partially in registers.  */
2304      if (reg != 0 && GET_CODE (reg) == REG)
2305	use_reg (call_fusage, reg);
2306    }
2307}
2308
2309
2310int
2311can_store_by_pieces (len, constfun, constfundata, align)
2312     unsigned HOST_WIDE_INT len;
2313     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2314     PTR constfundata;
2315     unsigned int align;
2316{
2317  unsigned HOST_WIDE_INT max_size, l;
2318  HOST_WIDE_INT offset = 0;
2319  enum machine_mode mode, tmode;
2320  enum insn_code icode;
2321  int reverse;
2322  rtx cst;
2323
2324  if (! MOVE_BY_PIECES_P (len, align))
2325    return 0;
2326
2327  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2328      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2329    align = MOVE_MAX * BITS_PER_UNIT;
2330
2331  /* We would first store what we can in the largest integer mode, then go to
2332     successively smaller modes.  */
2333
2334  for (reverse = 0;
2335       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2336       reverse++)
2337    {
2338      l = len;
2339      mode = VOIDmode;
2340      max_size = MOVE_MAX_PIECES + 1;
2341      while (max_size > 1)
2342	{
2343	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2344	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2345	    if (GET_MODE_SIZE (tmode) < max_size)
2346	      mode = tmode;
2347
2348	  if (mode == VOIDmode)
2349	    break;
2350
2351	  icode = mov_optab->handlers[(int) mode].insn_code;
2352	  if (icode != CODE_FOR_nothing
2353	      && align >= GET_MODE_ALIGNMENT (mode))
2354	    {
2355	      unsigned int size = GET_MODE_SIZE (mode);
2356
2357	      while (l >= size)
2358		{
2359		  if (reverse)
2360		    offset -= size;
2361
2362		  cst = (*constfun) (constfundata, offset, mode);
2363		  if (!LEGITIMATE_CONSTANT_P (cst))
2364		    return 0;
2365
2366		  if (!reverse)
2367		    offset += size;
2368
2369		  l -= size;
2370		}
2371	    }
2372
2373	  max_size = GET_MODE_SIZE (mode);
2374	}
2375
2376      /* The code above should have handled everything.  */
2377      if (l != 0)
2378	abort ();
2379    }
2380
2381  return 1;
2382}
2383
2384/* Generate several move instructions to store LEN bytes generated by
2385   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2386   pointer which will be passed as argument in every CONSTFUN call.
2387   ALIGN is maximum alignment we can assume.  */
2388
2389void
2390store_by_pieces (to, len, constfun, constfundata, align)
2391     rtx to;
2392     unsigned HOST_WIDE_INT len;
2393     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2394     PTR constfundata;
2395     unsigned int align;
2396{
2397  struct store_by_pieces data;
2398
2399  if (! MOVE_BY_PIECES_P (len, align))
2400    abort ();
2401  to = protect_from_queue (to, 1);
2402  data.constfun = constfun;
2403  data.constfundata = constfundata;
2404  data.len = len;
2405  data.to = to;
2406  store_by_pieces_1 (&data, align);
2407}
2408
2409/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2410   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2411   before calling. ALIGN is maximum alignment we can assume.  */
2412
2413static void
2414clear_by_pieces (to, len, align)
2415     rtx to;
2416     unsigned HOST_WIDE_INT len;
2417     unsigned int align;
2418{
2419  struct store_by_pieces data;
2420
2421  data.constfun = clear_by_pieces_1;
2422  data.constfundata = NULL;
2423  data.len = len;
2424  data.to = to;
2425  store_by_pieces_1 (&data, align);
2426}
2427
2428/* Callback routine for clear_by_pieces.
2429   Return const0_rtx unconditionally.  */
2430
2431static rtx
2432clear_by_pieces_1 (data, offset, mode)
2433     PTR data ATTRIBUTE_UNUSED;
2434     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2435     enum machine_mode mode ATTRIBUTE_UNUSED;
2436{
2437  return const0_rtx;
2438}
2439
2440/* Subroutine of clear_by_pieces and store_by_pieces.
2441   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2442   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2443   before calling.  ALIGN is maximum alignment we can assume.  */
2444
2445static void
2446store_by_pieces_1 (data, align)
2447     struct store_by_pieces *data;
2448     unsigned int align;
2449{
2450  rtx to_addr = XEXP (data->to, 0);
2451  unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2452  enum machine_mode mode = VOIDmode, tmode;
2453  enum insn_code icode;
2454
2455  data->offset = 0;
2456  data->to_addr = to_addr;
2457  data->autinc_to
2458    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2459       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2460
2461  data->explicit_inc_to = 0;
2462  data->reverse
2463    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2464  if (data->reverse)
2465    data->offset = data->len;
2466
2467  /* If storing requires more than two move insns,
2468     copy addresses to registers (to make displacements shorter)
2469     and use post-increment if available.  */
2470  if (!data->autinc_to
2471      && move_by_pieces_ninsns (data->len, align) > 2)
2472    {
2473      /* Determine the main mode we'll be using.  */
2474      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2475	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2476	if (GET_MODE_SIZE (tmode) < max_size)
2477	  mode = tmode;
2478
2479      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2480	{
2481	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2482	  data->autinc_to = 1;
2483	  data->explicit_inc_to = -1;
2484	}
2485
2486      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2487	  && ! data->autinc_to)
2488	{
2489	  data->to_addr = copy_addr_to_reg (to_addr);
2490	  data->autinc_to = 1;
2491	  data->explicit_inc_to = 1;
2492	}
2493
2494      if ( !data->autinc_to && CONSTANT_P (to_addr))
2495	data->to_addr = copy_addr_to_reg (to_addr);
2496    }
2497
2498  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2499      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2500    align = MOVE_MAX * BITS_PER_UNIT;
2501
2502  /* First store what we can in the largest integer mode, then go to
2503     successively smaller modes.  */
2504
2505  while (max_size > 1)
2506    {
2507      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2508	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2509	if (GET_MODE_SIZE (tmode) < max_size)
2510	  mode = tmode;
2511
2512      if (mode == VOIDmode)
2513	break;
2514
2515      icode = mov_optab->handlers[(int) mode].insn_code;
2516      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2517	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2518
2519      max_size = GET_MODE_SIZE (mode);
2520    }
2521
2522  /* The code above should have handled everything.  */
2523  if (data->len != 0)
2524    abort ();
2525}
2526
2527/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2528   with move instructions for mode MODE.  GENFUN is the gen_... function
2529   to make a move insn for that mode.  DATA has all the other info.  */
2530
2531static void
2532store_by_pieces_2 (genfun, mode, data)
2533     rtx (*genfun) PARAMS ((rtx, ...));
2534     enum machine_mode mode;
2535     struct store_by_pieces *data;
2536{
2537  unsigned int size = GET_MODE_SIZE (mode);
2538  rtx to1, cst;
2539
2540  while (data->len >= size)
2541    {
2542      if (data->reverse)
2543	data->offset -= size;
2544
2545      if (data->autinc_to)
2546	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2547					 data->offset);
2548      else
2549	to1 = adjust_address (data->to, mode, data->offset);
2550
2551      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2552	emit_insn (gen_add2_insn (data->to_addr,
2553				  GEN_INT (-(HOST_WIDE_INT) size)));
2554
2555      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2556      emit_insn ((*genfun) (to1, cst));
2557
2558      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2559	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2560
2561      if (! data->reverse)
2562	data->offset += size;
2563
2564      data->len -= size;
2565    }
2566}
2567
2568/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2569   its length in bytes.  */
2570
2571rtx
2572clear_storage (object, size)
2573     rtx object;
2574     rtx size;
2575{
2576#ifdef TARGET_MEM_FUNCTIONS
2577  static tree fn;
2578  tree call_expr, arg_list;
2579#endif
2580  rtx retval = 0;
2581  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2582			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2583
2584  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2585     just move a zero.  Otherwise, do this a piece at a time.  */
2586  if (GET_MODE (object) != BLKmode
2587      && GET_CODE (size) == CONST_INT
2588      && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2589    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2590  else
2591    {
2592      object = protect_from_queue (object, 1);
2593      size = protect_from_queue (size, 0);
2594
2595      if (GET_CODE (size) == CONST_INT
2596	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2597	clear_by_pieces (object, INTVAL (size), align);
2598      else
2599	{
2600	  /* Try the most limited insn first, because there's no point
2601	     including more than one in the machine description unless
2602	     the more limited one has some advantage.  */
2603
2604	  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2605	  enum machine_mode mode;
2606
2607	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2608	       mode = GET_MODE_WIDER_MODE (mode))
2609	    {
2610	      enum insn_code code = clrstr_optab[(int) mode];
2611	      insn_operand_predicate_fn pred;
2612
2613	      if (code != CODE_FOR_nothing
2614		  /* We don't need MODE to be narrower than
2615		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2616		     the mode mask, as it is returned by the macro, it will
2617		     definitely be less than the actual mode mask.  */
2618		  && ((GET_CODE (size) == CONST_INT
2619		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2620			   <= (GET_MODE_MASK (mode) >> 1)))
2621		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2622		  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2623		      || (*pred) (object, BLKmode))
2624		  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2625		      || (*pred) (opalign, VOIDmode)))
2626		{
2627		  rtx op1;
2628		  rtx last = get_last_insn ();
2629		  rtx pat;
2630
2631		  op1 = convert_to_mode (mode, size, 1);
2632		  pred = insn_data[(int) code].operand[1].predicate;
2633		  if (pred != 0 && ! (*pred) (op1, mode))
2634		    op1 = copy_to_mode_reg (mode, op1);
2635
2636		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2637		  if (pat)
2638		    {
2639		      emit_insn (pat);
2640		      return 0;
2641		    }
2642		  else
2643		    delete_insns_since (last);
2644		}
2645	    }
2646
2647	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2648
2649	     It is unsafe to save the value generated by protect_from_queue
2650	     and reuse it later.  Consider what happens if emit_queue is
2651	     called before the return value from protect_from_queue is used.
2652
2653	     Expansion of the CALL_EXPR below will call emit_queue before
2654	     we are finished emitting RTL for argument setup.  So if we are
2655	     not careful we could get the wrong value for an argument.
2656
2657	     To avoid this problem we go ahead and emit code to copy OBJECT
2658	     and SIZE into new pseudos.  We can then place those new pseudos
2659	     into an RTL_EXPR and use them later, even after a call to
2660	     emit_queue.
2661
2662	     Note this is not strictly needed for library calls since they
2663	     do not call emit_queue before loading their arguments.  However,
2664	     we may need to have library calls call emit_queue in the future
2665	     since failing to do so could cause problems for targets which
2666	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2667	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2668
2669#ifdef TARGET_MEM_FUNCTIONS
2670	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2671#else
2672	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2673				  TREE_UNSIGNED (integer_type_node));
2674	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2675#endif
2676
2677#ifdef TARGET_MEM_FUNCTIONS
2678	  /* It is incorrect to use the libcall calling conventions to call
2679	     memset in this context.
2680
2681	     This could be a user call to memset and the user may wish to
2682	     examine the return value from memset.
2683
2684	     For targets where libcalls and normal calls have different
2685	     conventions for returning pointers, we could end up generating
2686	     incorrect code.
2687
2688	     So instead of using a libcall sequence we build up a suitable
2689	     CALL_EXPR and expand the call in the normal fashion.  */
2690	  if (fn == NULL_TREE)
2691	    {
2692	      tree fntype;
2693
2694	      /* This was copied from except.c, I don't know if all this is
2695		 necessary in this context or not.  */
2696	      fn = get_identifier ("memset");
2697	      fntype = build_pointer_type (void_type_node);
2698	      fntype = build_function_type (fntype, NULL_TREE);
2699	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2700	      ggc_add_tree_root (&fn, 1);
2701	      DECL_EXTERNAL (fn) = 1;
2702	      TREE_PUBLIC (fn) = 1;
2703	      DECL_ARTIFICIAL (fn) = 1;
2704	      TREE_NOTHROW (fn) = 1;
2705	      make_decl_rtl (fn, NULL);
2706	      assemble_external (fn);
2707	    }
2708
2709	  /* We need to make an argument list for the function call.
2710
2711	     memset has three arguments, the first is a void * addresses, the
2712	     second an integer with the initialization value, the last is a
2713	     size_t byte count for the copy.  */
2714	  arg_list
2715	    = build_tree_list (NULL_TREE,
2716			       make_tree (build_pointer_type (void_type_node),
2717					  object));
2718	  TREE_CHAIN (arg_list)
2719	    = build_tree_list (NULL_TREE,
2720			       make_tree (integer_type_node, const0_rtx));
2721	  TREE_CHAIN (TREE_CHAIN (arg_list))
2722	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2723	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2724
2725	  /* Now we have to build up the CALL_EXPR itself.  */
2726	  call_expr = build1 (ADDR_EXPR,
2727			      build_pointer_type (TREE_TYPE (fn)), fn);
2728	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2729			     call_expr, arg_list, NULL_TREE);
2730	  TREE_SIDE_EFFECTS (call_expr) = 1;
2731
2732	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2733#else
2734	  emit_library_call (bzero_libfunc, LCT_NORMAL,
2735			     VOIDmode, 2, object, Pmode, size,
2736			     TYPE_MODE (integer_type_node));
2737#endif
2738
2739	  /* If we are initializing a readonly value, show the above call
2740	     clobbered it.  Otherwise, a load from it may erroneously be
2741	     hoisted from a loop.  */
2742	  if (RTX_UNCHANGING_P (object))
2743	    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2744	}
2745    }
2746
2747  return retval;
2748}
2749
2750/* Generate code to copy Y into X.
2751   Both Y and X must have the same mode, except that
2752   Y can be a constant with VOIDmode.
2753   This mode cannot be BLKmode; use emit_block_move for that.
2754
2755   Return the last instruction emitted.  */
2756
2757rtx
2758emit_move_insn (x, y)
2759     rtx x, y;
2760{
2761  enum machine_mode mode = GET_MODE (x);
2762  rtx y_cst = NULL_RTX;
2763  rtx last_insn;
2764
2765  x = protect_from_queue (x, 1);
2766  y = protect_from_queue (y, 0);
2767
2768  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2769    abort ();
2770
2771  /* Never force constant_p_rtx to memory.  */
2772  if (GET_CODE (y) == CONSTANT_P_RTX)
2773    ;
2774  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2775    {
2776      y_cst = y;
2777      y = force_const_mem (mode, y);
2778    }
2779
2780  /* If X or Y are memory references, verify that their addresses are valid
2781     for the machine.  */
2782  if (GET_CODE (x) == MEM
2783      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2784	   && ! push_operand (x, GET_MODE (x)))
2785	  || (flag_force_addr
2786	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2787    x = validize_mem (x);
2788
2789  if (GET_CODE (y) == MEM
2790      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2791	  || (flag_force_addr
2792	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2793    y = validize_mem (y);
2794
2795  if (mode == BLKmode)
2796    abort ();
2797
2798  last_insn = emit_move_insn_1 (x, y);
2799
2800  if (y_cst && GET_CODE (x) == REG)
2801    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2802
2803  return last_insn;
2804}
2805
2806/* Low level part of emit_move_insn.
2807   Called just like emit_move_insn, but assumes X and Y
2808   are basically valid.  */
2809
2810rtx
2811emit_move_insn_1 (x, y)
2812     rtx x, y;
2813{
2814  enum machine_mode mode = GET_MODE (x);
2815  enum machine_mode submode;
2816  enum mode_class class = GET_MODE_CLASS (mode);
2817
2818  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2819    abort ();
2820
2821  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2822    return
2823      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2824
2825  /* Expand complex moves by moving real part and imag part, if possible.  */
2826  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2827	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2828						    * BITS_PER_UNIT),
2829						   (class == MODE_COMPLEX_INT
2830						    ? MODE_INT : MODE_FLOAT),
2831						   0))
2832	   && (mov_optab->handlers[(int) submode].insn_code
2833	       != CODE_FOR_nothing))
2834    {
2835      /* Don't split destination if it is a stack push.  */
2836      int stack = push_operand (x, GET_MODE (x));
2837
2838#ifdef PUSH_ROUNDING
2839      /* In case we output to the stack, but the size is smaller machine can
2840	 push exactly, we need to use move instructions.  */
2841      if (stack
2842	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2843	      != GET_MODE_SIZE (submode)))
2844	{
2845	  rtx temp;
2846	  HOST_WIDE_INT offset1, offset2;
2847
2848	  /* Do not use anti_adjust_stack, since we don't want to update
2849	     stack_pointer_delta.  */
2850	  temp = expand_binop (Pmode,
2851#ifdef STACK_GROWS_DOWNWARD
2852			       sub_optab,
2853#else
2854			       add_optab,
2855#endif
2856			       stack_pointer_rtx,
2857			       GEN_INT
2858				 (PUSH_ROUNDING
2859				  (GET_MODE_SIZE (GET_MODE (x)))),
2860			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2861
2862	  if (temp != stack_pointer_rtx)
2863	    emit_move_insn (stack_pointer_rtx, temp);
2864
2865#ifdef STACK_GROWS_DOWNWARD
2866	  offset1 = 0;
2867	  offset2 = GET_MODE_SIZE (submode);
2868#else
2869	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2870	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2871		     + GET_MODE_SIZE (submode));
2872#endif
2873
2874	  emit_move_insn (change_address (x, submode,
2875					  gen_rtx_PLUS (Pmode,
2876						        stack_pointer_rtx,
2877							GEN_INT (offset1))),
2878			  gen_realpart (submode, y));
2879	  emit_move_insn (change_address (x, submode,
2880					  gen_rtx_PLUS (Pmode,
2881						        stack_pointer_rtx,
2882							GEN_INT (offset2))),
2883			  gen_imagpart (submode, y));
2884	}
2885      else
2886#endif
2887      /* If this is a stack, push the highpart first, so it
2888	 will be in the argument order.
2889
2890	 In that case, change_address is used only to convert
2891	 the mode, not to change the address.  */
2892      if (stack)
2893	{
2894	  /* Note that the real part always precedes the imag part in memory
2895	     regardless of machine's endianness.  */
2896#ifdef STACK_GROWS_DOWNWARD
2897	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2898		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2899		      gen_imagpart (submode, y)));
2900	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2901		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2902		      gen_realpart (submode, y)));
2903#else
2904	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2905		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2906		      gen_realpart (submode, y)));
2907	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2908		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2909		      gen_imagpart (submode, y)));
2910#endif
2911	}
2912      else
2913	{
2914	  rtx realpart_x, realpart_y;
2915	  rtx imagpart_x, imagpart_y;
2916
2917	  /* If this is a complex value with each part being smaller than a
2918	     word, the usual calling sequence will likely pack the pieces into
2919	     a single register.  Unfortunately, SUBREG of hard registers only
2920	     deals in terms of words, so we have a problem converting input
2921	     arguments to the CONCAT of two registers that is used elsewhere
2922	     for complex values.  If this is before reload, we can copy it into
2923	     memory and reload.  FIXME, we should see about using extract and
2924	     insert on integer registers, but complex short and complex char
2925	     variables should be rarely used.  */
2926	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2927	      && (reload_in_progress | reload_completed) == 0)
2928	    {
2929	      int packed_dest_p
2930		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2931	      int packed_src_p
2932		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2933
2934	      if (packed_dest_p || packed_src_p)
2935		{
2936		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2937					       ? MODE_FLOAT : MODE_INT);
2938
2939		  enum machine_mode reg_mode
2940		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2941
2942		  if (reg_mode != BLKmode)
2943		    {
2944		      rtx mem = assign_stack_temp (reg_mode,
2945						   GET_MODE_SIZE (mode), 0);
2946		      rtx cmem = adjust_address (mem, mode, 0);
2947
2948		      cfun->cannot_inline
2949			= N_("function using short complex types cannot be inline");
2950
2951		      if (packed_dest_p)
2952			{
2953			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2954
2955			  emit_move_insn_1 (cmem, y);
2956			  return emit_move_insn_1 (sreg, mem);
2957			}
2958		      else
2959			{
2960			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2961
2962			  emit_move_insn_1 (mem, sreg);
2963			  return emit_move_insn_1 (x, cmem);
2964			}
2965		    }
2966		}
2967	    }
2968
2969	  realpart_x = gen_realpart (submode, x);
2970	  realpart_y = gen_realpart (submode, y);
2971	  imagpart_x = gen_imagpart (submode, x);
2972	  imagpart_y = gen_imagpart (submode, y);
2973
2974	  /* Show the output dies here.  This is necessary for SUBREGs
2975	     of pseudos since we cannot track their lifetimes correctly;
2976	     hard regs shouldn't appear here except as return values.
2977	     We never want to emit such a clobber after reload.  */
2978	  if (x != y
2979	      && ! (reload_in_progress || reload_completed)
2980	      && (GET_CODE (realpart_x) == SUBREG
2981		  || GET_CODE (imagpart_x) == SUBREG))
2982	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2983
2984	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2985		     (realpart_x, realpart_y));
2986	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2987		     (imagpart_x, imagpart_y));
2988	}
2989
2990      return get_last_insn ();
2991    }
2992
2993  /* This will handle any multi-word mode that lacks a move_insn pattern.
2994     However, you will get better code if you define such patterns,
2995     even if they must turn into multiple assembler instructions.  */
2996  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2997    {
2998      rtx last_insn = 0;
2999      rtx seq, inner;
3000      int need_clobber;
3001      int i;
3002
3003#ifdef PUSH_ROUNDING
3004
3005      /* If X is a push on the stack, do the push now and replace
3006	 X with a reference to the stack pointer.  */
3007      if (push_operand (x, GET_MODE (x)))
3008	{
3009	  rtx temp;
3010	  enum rtx_code code;
3011
3012	  /* Do not use anti_adjust_stack, since we don't want to update
3013	     stack_pointer_delta.  */
3014	  temp = expand_binop (Pmode,
3015#ifdef STACK_GROWS_DOWNWARD
3016			       sub_optab,
3017#else
3018			       add_optab,
3019#endif
3020			       stack_pointer_rtx,
3021			       GEN_INT
3022				 (PUSH_ROUNDING
3023				  (GET_MODE_SIZE (GET_MODE (x)))),
3024			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3025
3026          if (temp != stack_pointer_rtx)
3027            emit_move_insn (stack_pointer_rtx, temp);
3028
3029	  code = GET_CODE (XEXP (x, 0));
3030
3031	  /* Just hope that small offsets off SP are OK.  */
3032	  if (code == POST_INC)
3033	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3034				GEN_INT (-((HOST_WIDE_INT)
3035					   GET_MODE_SIZE (GET_MODE (x)))));
3036	  else if (code == POST_DEC)
3037	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3038				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3039	  else
3040	    temp = stack_pointer_rtx;
3041
3042	  x = change_address (x, VOIDmode, temp);
3043	}
3044#endif
3045
3046      /* If we are in reload, see if either operand is a MEM whose address
3047	 is scheduled for replacement.  */
3048      if (reload_in_progress && GET_CODE (x) == MEM
3049	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3050	x = replace_equiv_address_nv (x, inner);
3051      if (reload_in_progress && GET_CODE (y) == MEM
3052	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3053	y = replace_equiv_address_nv (y, inner);
3054
3055      start_sequence ();
3056
3057      need_clobber = 0;
3058      for (i = 0;
3059	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3060	   i++)
3061	{
3062	  rtx xpart = operand_subword (x, i, 1, mode);
3063	  rtx ypart = operand_subword (y, i, 1, mode);
3064
3065	  /* If we can't get a part of Y, put Y into memory if it is a
3066	     constant.  Otherwise, force it into a register.  If we still
3067	     can't get a part of Y, abort.  */
3068	  if (ypart == 0 && CONSTANT_P (y))
3069	    {
3070	      y = force_const_mem (mode, y);
3071	      ypart = operand_subword (y, i, 1, mode);
3072	    }
3073	  else if (ypart == 0)
3074	    ypart = operand_subword_force (y, i, mode);
3075
3076	  if (xpart == 0 || ypart == 0)
3077	    abort ();
3078
3079	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3080
3081	  last_insn = emit_move_insn (xpart, ypart);
3082	}
3083
3084      seq = gen_sequence ();
3085      end_sequence ();
3086
3087      /* Show the output dies here.  This is necessary for SUBREGs
3088	 of pseudos since we cannot track their lifetimes correctly;
3089	 hard regs shouldn't appear here except as return values.
3090	 We never want to emit such a clobber after reload.  */
3091      if (x != y
3092	  && ! (reload_in_progress || reload_completed)
3093	  && need_clobber != 0)
3094	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3095
3096      emit_insn (seq);
3097
3098      return last_insn;
3099    }
3100  else
3101    abort ();
3102}
3103
3104/* Pushing data onto the stack.  */
3105
3106/* Push a block of length SIZE (perhaps variable)
3107   and return an rtx to address the beginning of the block.
3108   Note that it is not possible for the value returned to be a QUEUED.
3109   The value may be virtual_outgoing_args_rtx.
3110
3111   EXTRA is the number of bytes of padding to push in addition to SIZE.
3112   BELOW nonzero means this padding comes at low addresses;
3113   otherwise, the padding comes at high addresses.  */
3114
3115rtx
3116push_block (size, extra, below)
3117     rtx size;
3118     int extra, below;
3119{
3120  rtx temp;
3121
3122  size = convert_modes (Pmode, ptr_mode, size, 1);
3123  if (CONSTANT_P (size))
3124    anti_adjust_stack (plus_constant (size, extra));
3125  else if (GET_CODE (size) == REG && extra == 0)
3126    anti_adjust_stack (size);
3127  else
3128    {
3129      temp = copy_to_mode_reg (Pmode, size);
3130      if (extra != 0)
3131	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3132			     temp, 0, OPTAB_LIB_WIDEN);
3133      anti_adjust_stack (temp);
3134    }
3135
3136#ifndef STACK_GROWS_DOWNWARD
3137  if (0)
3138#else
3139  if (1)
3140#endif
3141    {
3142      temp = virtual_outgoing_args_rtx;
3143      if (extra != 0 && below)
3144	temp = plus_constant (temp, extra);
3145    }
3146  else
3147    {
3148      if (GET_CODE (size) == CONST_INT)
3149	temp = plus_constant (virtual_outgoing_args_rtx,
3150			      -INTVAL (size) - (below ? 0 : extra));
3151      else if (extra != 0 && !below)
3152	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3153			     negate_rtx (Pmode, plus_constant (size, extra)));
3154      else
3155	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3156			     negate_rtx (Pmode, size));
3157    }
3158
3159  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3160}
3161
3162#ifdef PUSH_ROUNDING
3163
3164/* Emit single push insn.  */
3165
3166static void
3167emit_single_push_insn (mode, x, type)
3168     rtx x;
3169     enum machine_mode mode;
3170     tree type;
3171{
3172  rtx dest_addr;
3173  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3174  rtx dest;
3175  enum insn_code icode;
3176  insn_operand_predicate_fn pred;
3177
3178  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3179  /* If there is push pattern, use it.  Otherwise try old way of throwing
3180     MEM representing push operation to move expander.  */
3181  icode = push_optab->handlers[(int) mode].insn_code;
3182  if (icode != CODE_FOR_nothing)
3183    {
3184      if (((pred = insn_data[(int) icode].operand[0].predicate)
3185	   && !((*pred) (x, mode))))
3186	x = force_reg (mode, x);
3187      emit_insn (GEN_FCN (icode) (x));
3188      return;
3189    }
3190  if (GET_MODE_SIZE (mode) == rounded_size)
3191    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3192  else
3193    {
3194#ifdef STACK_GROWS_DOWNWARD
3195      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3196				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3197#else
3198      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3199				GEN_INT (rounded_size));
3200#endif
3201      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3202    }
3203
3204  dest = gen_rtx_MEM (mode, dest_addr);
3205
3206  if (type != 0)
3207    {
3208      set_mem_attributes (dest, type, 1);
3209
3210      if (flag_optimize_sibling_calls)
3211	/* Function incoming arguments may overlap with sibling call
3212	   outgoing arguments and we cannot allow reordering of reads
3213	   from function arguments with stores to outgoing arguments
3214	   of sibling calls.  */
3215	set_mem_alias_set (dest, 0);
3216    }
3217  emit_move_insn (dest, x);
3218}
3219#endif
3220
3221/* Generate code to push X onto the stack, assuming it has mode MODE and
3222   type TYPE.
3223   MODE is redundant except when X is a CONST_INT (since they don't
3224   carry mode info).
3225   SIZE is an rtx for the size of data to be copied (in bytes),
3226   needed only if X is BLKmode.
3227
3228   ALIGN (in bits) is maximum alignment we can assume.
3229
3230   If PARTIAL and REG are both nonzero, then copy that many of the first
3231   words of X into registers starting with REG, and push the rest of X.
3232   The amount of space pushed is decreased by PARTIAL words,
3233   rounded *down* to a multiple of PARM_BOUNDARY.
3234   REG must be a hard register in this case.
3235   If REG is zero but PARTIAL is not, take any all others actions for an
3236   argument partially in registers, but do not actually load any
3237   registers.
3238
3239   EXTRA is the amount in bytes of extra space to leave next to this arg.
3240   This is ignored if an argument block has already been allocated.
3241
3242   On a machine that lacks real push insns, ARGS_ADDR is the address of
3243   the bottom of the argument block for this call.  We use indexing off there
3244   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3245   argument block has not been preallocated.
3246
3247   ARGS_SO_FAR is the size of args previously pushed for this call.
3248
3249   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3250   for arguments passed in registers.  If nonzero, it will be the number
3251   of bytes required.  */
3252
3253void
3254emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3255		args_addr, args_so_far, reg_parm_stack_space,
3256                alignment_pad)
3257     rtx x;
3258     enum machine_mode mode;
3259     tree type;
3260     rtx size;
3261     unsigned int align;
3262     int partial;
3263     rtx reg;
3264     int extra;
3265     rtx args_addr;
3266     rtx args_so_far;
3267     int reg_parm_stack_space;
3268     rtx alignment_pad;
3269{
3270  rtx xinner;
3271  enum direction stack_direction
3272#ifdef STACK_GROWS_DOWNWARD
3273    = downward;
3274#else
3275    = upward;
3276#endif
3277
3278  /* Decide where to pad the argument: `downward' for below,
3279     `upward' for above, or `none' for don't pad it.
3280     Default is below for small data on big-endian machines; else above.  */
3281  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3282
3283  /* Invert direction if stack is post-decrement.
3284     FIXME: why?  */
3285  if (STACK_PUSH_CODE == POST_DEC)
3286    if (where_pad != none)
3287      where_pad = (where_pad == downward ? upward : downward);
3288
3289  xinner = x = protect_from_queue (x, 0);
3290
3291  if (mode == BLKmode)
3292    {
3293      /* Copy a block into the stack, entirely or partially.  */
3294
3295      rtx temp;
3296      int used = partial * UNITS_PER_WORD;
3297      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3298      int skip;
3299
3300      if (size == 0)
3301	abort ();
3302
3303      used -= offset;
3304
3305      /* USED is now the # of bytes we need not copy to the stack
3306	 because registers will take care of them.  */
3307
3308      if (partial != 0)
3309	xinner = adjust_address (xinner, BLKmode, used);
3310
3311      /* If the partial register-part of the arg counts in its stack size,
3312	 skip the part of stack space corresponding to the registers.
3313	 Otherwise, start copying to the beginning of the stack space,
3314	 by setting SKIP to 0.  */
3315      skip = (reg_parm_stack_space == 0) ? 0 : used;
3316
3317#ifdef PUSH_ROUNDING
3318      /* Do it with several push insns if that doesn't take lots of insns
3319	 and if there is no difficulty with push insns that skip bytes
3320	 on the stack for alignment purposes.  */
3321      if (args_addr == 0
3322	  && PUSH_ARGS
3323	  && GET_CODE (size) == CONST_INT
3324	  && skip == 0
3325	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3326	  /* Here we avoid the case of a structure whose weak alignment
3327	     forces many pushes of a small amount of data,
3328	     and such small pushes do rounding that causes trouble.  */
3329	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3330	      || align >= BIGGEST_ALIGNMENT
3331	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3332		  == (align / BITS_PER_UNIT)))
3333	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3334	{
3335	  /* Push padding now if padding above and stack grows down,
3336	     or if padding below and stack grows up.
3337	     But if space already allocated, this has already been done.  */
3338	  if (extra && args_addr == 0
3339	      && where_pad != none && where_pad != stack_direction)
3340	    anti_adjust_stack (GEN_INT (extra));
3341
3342	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3343	}
3344      else
3345#endif /* PUSH_ROUNDING  */
3346	{
3347	  rtx target;
3348
3349	  /* Otherwise make space on the stack and copy the data
3350	     to the address of that space.  */
3351
3352	  /* Deduct words put into registers from the size we must copy.  */
3353	  if (partial != 0)
3354	    {
3355	      if (GET_CODE (size) == CONST_INT)
3356		size = GEN_INT (INTVAL (size) - used);
3357	      else
3358		size = expand_binop (GET_MODE (size), sub_optab, size,
3359				     GEN_INT (used), NULL_RTX, 0,
3360				     OPTAB_LIB_WIDEN);
3361	    }
3362
3363	  /* Get the address of the stack space.
3364	     In this case, we do not deal with EXTRA separately.
3365	     A single stack adjust will do.  */
3366	  if (! args_addr)
3367	    {
3368	      temp = push_block (size, extra, where_pad == downward);
3369	      extra = 0;
3370	    }
3371	  else if (GET_CODE (args_so_far) == CONST_INT)
3372	    temp = memory_address (BLKmode,
3373				   plus_constant (args_addr,
3374						  skip + INTVAL (args_so_far)));
3375	  else
3376	    temp = memory_address (BLKmode,
3377				   plus_constant (gen_rtx_PLUS (Pmode,
3378								args_addr,
3379								args_so_far),
3380						  skip));
3381	  target = gen_rtx_MEM (BLKmode, temp);
3382
3383	  if (type != 0)
3384	    {
3385	      set_mem_attributes (target, type, 1);
3386	      /* Function incoming arguments may overlap with sibling call
3387		 outgoing arguments and we cannot allow reordering of reads
3388		 from function arguments with stores to outgoing arguments
3389		 of sibling calls.  */
3390	      set_mem_alias_set (target, 0);
3391	    }
3392	  else
3393	    set_mem_align (target, align);
3394
3395	  /* TEMP is the address of the block.  Copy the data there.  */
3396	  if (GET_CODE (size) == CONST_INT
3397	      && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3398	    {
3399	      move_by_pieces (target, xinner, INTVAL (size), align);
3400	      goto ret;
3401	    }
3402	  else
3403	    {
3404	      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3405	      enum machine_mode mode;
3406
3407	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3408		   mode != VOIDmode;
3409		   mode = GET_MODE_WIDER_MODE (mode))
3410		{
3411		  enum insn_code code = movstr_optab[(int) mode];
3412		  insn_operand_predicate_fn pred;
3413
3414		  if (code != CODE_FOR_nothing
3415		      && ((GET_CODE (size) == CONST_INT
3416			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3417			       <= (GET_MODE_MASK (mode) >> 1)))
3418			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3419		      && (!(pred = insn_data[(int) code].operand[0].predicate)
3420			  || ((*pred) (target, BLKmode)))
3421		      && (!(pred = insn_data[(int) code].operand[1].predicate)
3422			  || ((*pred) (xinner, BLKmode)))
3423		      && (!(pred = insn_data[(int) code].operand[3].predicate)
3424			  || ((*pred) (opalign, VOIDmode))))
3425		    {
3426		      rtx op2 = convert_to_mode (mode, size, 1);
3427		      rtx last = get_last_insn ();
3428		      rtx pat;
3429
3430		      pred = insn_data[(int) code].operand[2].predicate;
3431		      if (pred != 0 && ! (*pred) (op2, mode))
3432			op2 = copy_to_mode_reg (mode, op2);
3433
3434		      pat = GEN_FCN ((int) code) (target, xinner,
3435						  op2, opalign);
3436		      if (pat)
3437			{
3438			  emit_insn (pat);
3439			  goto ret;
3440			}
3441		      else
3442			delete_insns_since (last);
3443		    }
3444		}
3445	    }
3446
3447	  if (!ACCUMULATE_OUTGOING_ARGS)
3448	    {
3449	      /* If the source is referenced relative to the stack pointer,
3450		 copy it to another register to stabilize it.  We do not need
3451		 to do this if we know that we won't be changing sp.  */
3452
3453	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3454		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3455		temp = copy_to_reg (temp);
3456	    }
3457
3458	  /* Make inhibit_defer_pop nonzero around the library call
3459	     to force it to pop the bcopy-arguments right away.  */
3460	  NO_DEFER_POP;
3461#ifdef TARGET_MEM_FUNCTIONS
3462	  emit_library_call (memcpy_libfunc, LCT_NORMAL,
3463			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3464			     convert_to_mode (TYPE_MODE (sizetype),
3465					      size, TREE_UNSIGNED (sizetype)),
3466			     TYPE_MODE (sizetype));
3467#else
3468	  emit_library_call (bcopy_libfunc, LCT_NORMAL,
3469			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3470			     convert_to_mode (TYPE_MODE (integer_type_node),
3471					      size,
3472					      TREE_UNSIGNED (integer_type_node)),
3473			     TYPE_MODE (integer_type_node));
3474#endif
3475	  OK_DEFER_POP;
3476	}
3477    }
3478  else if (partial > 0)
3479    {
3480      /* Scalar partly in registers.  */
3481
3482      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3483      int i;
3484      int not_stack;
3485      /* # words of start of argument
3486	 that we must make space for but need not store.  */
3487      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3488      int args_offset = INTVAL (args_so_far);
3489      int skip;
3490
3491      /* Push padding now if padding above and stack grows down,
3492	 or if padding below and stack grows up.
3493	 But if space already allocated, this has already been done.  */
3494      if (extra && args_addr == 0
3495	  && where_pad != none && where_pad != stack_direction)
3496	anti_adjust_stack (GEN_INT (extra));
3497
3498      /* If we make space by pushing it, we might as well push
3499	 the real data.  Otherwise, we can leave OFFSET nonzero
3500	 and leave the space uninitialized.  */
3501      if (args_addr == 0)
3502	offset = 0;
3503
3504      /* Now NOT_STACK gets the number of words that we don't need to
3505	 allocate on the stack.  */
3506      not_stack = partial - offset;
3507
3508      /* If the partial register-part of the arg counts in its stack size,
3509	 skip the part of stack space corresponding to the registers.
3510	 Otherwise, start copying to the beginning of the stack space,
3511	 by setting SKIP to 0.  */
3512      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3513
3514      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3515	x = validize_mem (force_const_mem (mode, x));
3516
3517      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3518	 SUBREGs of such registers are not allowed.  */
3519      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3520	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3521	x = copy_to_reg (x);
3522
3523      /* Loop over all the words allocated on the stack for this arg.  */
3524      /* We can do it by words, because any scalar bigger than a word
3525	 has a size a multiple of a word.  */
3526#ifndef PUSH_ARGS_REVERSED
3527      for (i = not_stack; i < size; i++)
3528#else
3529      for (i = size - 1; i >= not_stack; i--)
3530#endif
3531	if (i >= not_stack + offset)
3532	  emit_push_insn (operand_subword_force (x, i, mode),
3533			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3534			  0, args_addr,
3535			  GEN_INT (args_offset + ((i - not_stack + skip)
3536						  * UNITS_PER_WORD)),
3537			  reg_parm_stack_space, alignment_pad);
3538    }
3539  else
3540    {
3541      rtx addr;
3542      rtx target = NULL_RTX;
3543      rtx dest;
3544
3545      /* Push padding now if padding above and stack grows down,
3546	 or if padding below and stack grows up.
3547	 But if space already allocated, this has already been done.  */
3548      if (extra && args_addr == 0
3549	  && where_pad != none && where_pad != stack_direction)
3550	anti_adjust_stack (GEN_INT (extra));
3551
3552#ifdef PUSH_ROUNDING
3553      if (args_addr == 0 && PUSH_ARGS)
3554	emit_single_push_insn (mode, x, type);
3555      else
3556#endif
3557	{
3558	  if (GET_CODE (args_so_far) == CONST_INT)
3559	    addr
3560	      = memory_address (mode,
3561				plus_constant (args_addr,
3562					       INTVAL (args_so_far)));
3563	  else
3564	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3565						       args_so_far));
3566	  target = addr;
3567	  dest = gen_rtx_MEM (mode, addr);
3568	  if (type != 0)
3569	    {
3570	      set_mem_attributes (dest, type, 1);
3571	      /* Function incoming arguments may overlap with sibling call
3572		 outgoing arguments and we cannot allow reordering of reads
3573		 from function arguments with stores to outgoing arguments
3574		 of sibling calls.  */
3575	      set_mem_alias_set (dest, 0);
3576	    }
3577
3578	  emit_move_insn (dest, x);
3579	}
3580
3581    }
3582
3583 ret:
3584  /* If part should go in registers, copy that part
3585     into the appropriate registers.  Do this now, at the end,
3586     since mem-to-mem copies above may do function calls.  */
3587  if (partial > 0 && reg != 0)
3588    {
3589      /* Handle calls that pass values in multiple non-contiguous locations.
3590	 The Irix 6 ABI has examples of this.  */
3591      if (GET_CODE (reg) == PARALLEL)
3592	emit_group_load (reg, x, -1);  /* ??? size? */
3593      else
3594	move_block_to_reg (REGNO (reg), x, partial, mode);
3595    }
3596
3597  if (extra && args_addr == 0 && where_pad == stack_direction)
3598    anti_adjust_stack (GEN_INT (extra));
3599
3600  if (alignment_pad && args_addr == 0)
3601    anti_adjust_stack (alignment_pad);
3602}
3603
3604/* Return X if X can be used as a subtarget in a sequence of arithmetic
3605   operations.  */
3606
3607static rtx
3608get_subtarget (x)
3609     rtx x;
3610{
3611  return ((x == 0
3612	   /* Only registers can be subtargets.  */
3613	   || GET_CODE (x) != REG
3614	   /* If the register is readonly, it can't be set more than once.  */
3615	   || RTX_UNCHANGING_P (x)
3616	   /* Don't use hard regs to avoid extending their life.  */
3617	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3618	   /* Avoid subtargets inside loops,
3619	      since they hide some invariant expressions.  */
3620	   || preserve_subexpressions_p ())
3621	  ? 0 : x);
3622}
3623
3624/* Expand an assignment that stores the value of FROM into TO.
3625   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3626   (This may contain a QUEUED rtx;
3627   if the value is constant, this rtx is a constant.)
3628   Otherwise, the returned value is NULL_RTX.
3629
3630   SUGGEST_REG is no longer actually used.
3631   It used to mean, copy the value through a register
3632   and return that register, if that is possible.
3633   We now use WANT_VALUE to decide whether to do this.  */
3634
3635rtx
3636expand_assignment (to, from, want_value, suggest_reg)
3637     tree to, from;
3638     int want_value;
3639     int suggest_reg ATTRIBUTE_UNUSED;
3640{
3641  rtx to_rtx = 0;
3642  rtx result;
3643
3644  /* Don't crash if the lhs of the assignment was erroneous.  */
3645
3646  if (TREE_CODE (to) == ERROR_MARK)
3647    {
3648      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3649      return want_value ? result : NULL_RTX;
3650    }
3651
3652  /* Assignment of a structure component needs special treatment
3653     if the structure component's rtx is not simply a MEM.
3654     Assignment of an array element at a constant index, and assignment of
3655     an array element in an unaligned packed structure field, has the same
3656     problem.  */
3657
3658  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3659      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3660    {
3661      enum machine_mode mode1;
3662      HOST_WIDE_INT bitsize, bitpos;
3663      rtx orig_to_rtx;
3664      tree offset;
3665      int unsignedp;
3666      int volatilep = 0;
3667      tree tem;
3668
3669      push_temp_slots ();
3670      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3671				 &unsignedp, &volatilep);
3672
3673      /* If we are going to use store_bit_field and extract_bit_field,
3674	 make sure to_rtx will be safe for multiple use.  */
3675
3676      if (mode1 == VOIDmode && want_value)
3677	tem = stabilize_reference (tem);
3678
3679      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3680
3681      if (offset != 0)
3682	{
3683	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3684
3685	  if (GET_CODE (to_rtx) != MEM)
3686	    abort ();
3687
3688#ifdef POINTERS_EXTEND_UNSIGNED
3689	  if (GET_MODE (offset_rtx) != Pmode)
3690	    offset_rtx = convert_memory_address (Pmode, offset_rtx);
3691#else
3692	  if (GET_MODE (offset_rtx) != ptr_mode)
3693	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3694#endif
3695
3696	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3697	     to call force_reg for that case.  Avoid that case.  */
3698	  if (GET_CODE (to_rtx) == MEM
3699	      && GET_MODE (to_rtx) == BLKmode
3700	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3701	      && bitsize > 0
3702	      && (bitpos % bitsize) == 0
3703	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3704	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3705	    {
3706	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3707	      bitpos = 0;
3708	    }
3709
3710	  to_rtx = offset_address (to_rtx, offset_rtx,
3711				   highest_pow2_factor_for_type (TREE_TYPE (to),
3712								 offset));
3713	}
3714
3715      if (GET_CODE (to_rtx) == MEM)
3716	{
3717	  tree old_expr = MEM_EXPR (to_rtx);
3718
3719	  /* If the field is at offset zero, we could have been given the
3720	     DECL_RTX of the parent struct.  Don't munge it.  */
3721	  to_rtx = shallow_copy_rtx (to_rtx);
3722
3723	  set_mem_attributes (to_rtx, to, 0);
3724
3725	  /* If we changed MEM_EXPR, that means we're now referencing
3726	     the COMPONENT_REF, which means that MEM_OFFSET must be
3727	     relative to that field.  But we've not yet reflected BITPOS
3728	     in TO_RTX.  This will be done in store_field.  Adjust for
3729	     that by biasing MEM_OFFSET by -bitpos.  */
3730	  if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3731	      && (bitpos / BITS_PER_UNIT) != 0)
3732	    set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3733					     - (bitpos / BITS_PER_UNIT)));
3734	}
3735
3736      /* Deal with volatile and readonly fields.  The former is only done
3737	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
3738      if (volatilep && GET_CODE (to_rtx) == MEM)
3739	{
3740	  if (to_rtx == orig_to_rtx)
3741	    to_rtx = copy_rtx (to_rtx);
3742	  MEM_VOLATILE_P (to_rtx) = 1;
3743	}
3744
3745      if (TREE_CODE (to) == COMPONENT_REF
3746	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3747	{
3748	  if (to_rtx == orig_to_rtx)
3749	    to_rtx = copy_rtx (to_rtx);
3750	  RTX_UNCHANGING_P (to_rtx) = 1;
3751	}
3752
3753      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3754	{
3755	  if (to_rtx == orig_to_rtx)
3756	    to_rtx = copy_rtx (to_rtx);
3757	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3758	}
3759
3760      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3761			    (want_value
3762			     /* Spurious cast for HPUX compiler.  */
3763			     ? ((enum machine_mode)
3764				TYPE_MODE (TREE_TYPE (to)))
3765			     : VOIDmode),
3766			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
3767
3768      preserve_temp_slots (result);
3769      free_temp_slots ();
3770      pop_temp_slots ();
3771
3772      /* If the value is meaningful, convert RESULT to the proper mode.
3773	 Otherwise, return nothing.  */
3774      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3775					  TYPE_MODE (TREE_TYPE (from)),
3776					  result,
3777					  TREE_UNSIGNED (TREE_TYPE (to)))
3778	      : NULL_RTX);
3779    }
3780
3781  /* If the rhs is a function call and its value is not an aggregate,
3782     call the function before we start to compute the lhs.
3783     This is needed for correct code for cases such as
3784     val = setjmp (buf) on machines where reference to val
3785     requires loading up part of an address in a separate insn.
3786
3787     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3788     since it might be a promoted variable where the zero- or sign- extension
3789     needs to be done.  Handling this in the normal way is safe because no
3790     computation is done before the call.  */
3791  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3792      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3793      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3794	    && GET_CODE (DECL_RTL (to)) == REG))
3795    {
3796      rtx value;
3797
3798      push_temp_slots ();
3799      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3800      if (to_rtx == 0)
3801	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3802
3803      /* Handle calls that return values in multiple non-contiguous locations.
3804	 The Irix 6 ABI has examples of this.  */
3805      if (GET_CODE (to_rtx) == PARALLEL)
3806	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3807      else if (GET_MODE (to_rtx) == BLKmode)
3808	emit_block_move (to_rtx, value, expr_size (from));
3809      else
3810	{
3811#ifdef POINTERS_EXTEND_UNSIGNED
3812	  if (POINTER_TYPE_P (TREE_TYPE (to))
3813	      && GET_MODE (to_rtx) != GET_MODE (value))
3814	    value = convert_memory_address (GET_MODE (to_rtx), value);
3815#endif
3816	  emit_move_insn (to_rtx, value);
3817	}
3818      preserve_temp_slots (to_rtx);
3819      free_temp_slots ();
3820      pop_temp_slots ();
3821      return want_value ? to_rtx : NULL_RTX;
3822    }
3823
3824  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3825     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3826
3827  if (to_rtx == 0)
3828    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3829
3830  /* Don't move directly into a return register.  */
3831  if (TREE_CODE (to) == RESULT_DECL
3832      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3833    {
3834      rtx temp;
3835
3836      push_temp_slots ();
3837      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3838
3839      if (GET_CODE (to_rtx) == PARALLEL)
3840	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3841      else
3842	emit_move_insn (to_rtx, temp);
3843
3844      preserve_temp_slots (to_rtx);
3845      free_temp_slots ();
3846      pop_temp_slots ();
3847      return want_value ? to_rtx : NULL_RTX;
3848    }
3849
3850  /* In case we are returning the contents of an object which overlaps
3851     the place the value is being stored, use a safe function when copying
3852     a value through a pointer into a structure value return block.  */
3853  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3854      && current_function_returns_struct
3855      && !current_function_returns_pcc_struct)
3856    {
3857      rtx from_rtx, size;
3858
3859      push_temp_slots ();
3860      size = expr_size (from);
3861      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3862
3863#ifdef TARGET_MEM_FUNCTIONS
3864      emit_library_call (memmove_libfunc, LCT_NORMAL,
3865			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3866			 XEXP (from_rtx, 0), Pmode,
3867			 convert_to_mode (TYPE_MODE (sizetype),
3868					  size, TREE_UNSIGNED (sizetype)),
3869			 TYPE_MODE (sizetype));
3870#else
3871      emit_library_call (bcopy_libfunc, LCT_NORMAL,
3872			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3873			 XEXP (to_rtx, 0), Pmode,
3874			 convert_to_mode (TYPE_MODE (integer_type_node),
3875					  size, TREE_UNSIGNED (integer_type_node)),
3876			 TYPE_MODE (integer_type_node));
3877#endif
3878
3879      preserve_temp_slots (to_rtx);
3880      free_temp_slots ();
3881      pop_temp_slots ();
3882      return want_value ? to_rtx : NULL_RTX;
3883    }
3884
3885  /* Compute FROM and store the value in the rtx we got.  */
3886
3887  push_temp_slots ();
3888  result = store_expr (from, to_rtx, want_value);
3889  preserve_temp_slots (result);
3890  free_temp_slots ();
3891  pop_temp_slots ();
3892  return want_value ? result : NULL_RTX;
3893}
3894
3895/* Generate code for computing expression EXP,
3896   and storing the value into TARGET.
3897   TARGET may contain a QUEUED rtx.
3898
3899   If WANT_VALUE is nonzero, return a copy of the value
3900   not in TARGET, so that we can be sure to use the proper
3901   value in a containing expression even if TARGET has something
3902   else stored in it.  If possible, we copy the value through a pseudo
3903   and return that pseudo.  Or, if the value is constant, we try to
3904   return the constant.  In some cases, we return a pseudo
3905   copied *from* TARGET.
3906
3907   If the mode is BLKmode then we may return TARGET itself.
3908   It turns out that in BLKmode it doesn't cause a problem.
3909   because C has no operators that could combine two different
3910   assignments into the same BLKmode object with different values
3911   with no sequence point.  Will other languages need this to
3912   be more thorough?
3913
3914   If WANT_VALUE is 0, we return NULL, to make sure
3915   to catch quickly any cases where the caller uses the value
3916   and fails to set WANT_VALUE.  */
3917
3918rtx
3919store_expr (exp, target, want_value)
3920     tree exp;
3921     rtx target;
3922     int want_value;
3923{
3924  rtx temp;
3925  int dont_return_target = 0;
3926  int dont_store_target = 0;
3927
3928  if (TREE_CODE (exp) == COMPOUND_EXPR)
3929    {
3930      /* Perform first part of compound expression, then assign from second
3931	 part.  */
3932      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3933      emit_queue ();
3934      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3935    }
3936  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3937    {
3938      /* For conditional expression, get safe form of the target.  Then
3939	 test the condition, doing the appropriate assignment on either
3940	 side.  This avoids the creation of unnecessary temporaries.
3941	 For non-BLKmode, it is more efficient not to do this.  */
3942
3943      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3944
3945      emit_queue ();
3946      target = protect_from_queue (target, 1);
3947
3948      do_pending_stack_adjust ();
3949      NO_DEFER_POP;
3950      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3951      start_cleanup_deferral ();
3952      store_expr (TREE_OPERAND (exp, 1), target, 0);
3953      end_cleanup_deferral ();
3954      emit_queue ();
3955      emit_jump_insn (gen_jump (lab2));
3956      emit_barrier ();
3957      emit_label (lab1);
3958      start_cleanup_deferral ();
3959      store_expr (TREE_OPERAND (exp, 2), target, 0);
3960      end_cleanup_deferral ();
3961      emit_queue ();
3962      emit_label (lab2);
3963      OK_DEFER_POP;
3964
3965      return want_value ? target : NULL_RTX;
3966    }
3967  else if (queued_subexp_p (target))
3968    /* If target contains a postincrement, let's not risk
3969       using it as the place to generate the rhs.  */
3970    {
3971      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3972	{
3973	  /* Expand EXP into a new pseudo.  */
3974	  temp = gen_reg_rtx (GET_MODE (target));
3975	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3976	}
3977      else
3978	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3979
3980      /* If target is volatile, ANSI requires accessing the value
3981	 *from* the target, if it is accessed.  So make that happen.
3982	 In no case return the target itself.  */
3983      if (! MEM_VOLATILE_P (target) && want_value)
3984	dont_return_target = 1;
3985    }
3986  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3987	   && GET_MODE (target) != BLKmode)
3988    /* If target is in memory and caller wants value in a register instead,
3989       arrange that.  Pass TARGET as target for expand_expr so that,
3990       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3991       We know expand_expr will not use the target in that case.
3992       Don't do this if TARGET is volatile because we are supposed
3993       to write it and then read it.  */
3994    {
3995      temp = expand_expr (exp, target, GET_MODE (target), 0);
3996      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3997	{
3998	  /* If TEMP is already in the desired TARGET, only copy it from
3999	     memory and don't store it there again.  */
4000	  if (temp == target
4001	      || (rtx_equal_p (temp, target)
4002		  && ! side_effects_p (temp) && ! side_effects_p (target)))
4003	    dont_store_target = 1;
4004	  temp = copy_to_reg (temp);
4005	}
4006      dont_return_target = 1;
4007    }
4008  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4009    /* If this is an scalar in a register that is stored in a wider mode
4010       than the declared mode, compute the result into its declared mode
4011       and then convert to the wider mode.  Our value is the computed
4012       expression.  */
4013    {
4014      rtx inner_target = 0;
4015
4016      /* If we don't want a value, we can do the conversion inside EXP,
4017	 which will often result in some optimizations.  Do the conversion
4018	 in two steps: first change the signedness, if needed, then
4019	 the extend.  But don't do this if the type of EXP is a subtype
4020	 of something else since then the conversion might involve
4021	 more than just converting modes.  */
4022      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4023	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4024	{
4025	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4026	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4027	    exp
4028	      = convert
4029		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4030					  TREE_TYPE (exp)),
4031		 exp);
4032
4033	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4034					SUBREG_PROMOTED_UNSIGNED_P (target)),
4035			 exp);
4036
4037	  inner_target = SUBREG_REG (target);
4038	}
4039
4040      temp = expand_expr (exp, inner_target, VOIDmode, 0);
4041
4042      /* If TEMP is a volatile MEM and we want a result value, make
4043	 the access now so it gets done only once.  Likewise if
4044	 it contains TARGET.  */
4045      if (GET_CODE (temp) == MEM && want_value
4046	  && (MEM_VOLATILE_P (temp)
4047	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4048	temp = copy_to_reg (temp);
4049
4050      /* If TEMP is a VOIDmode constant, use convert_modes to make
4051	 sure that we properly convert it.  */
4052      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4053	{
4054	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4055				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4056	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4057			        GET_MODE (target), temp,
4058			        SUBREG_PROMOTED_UNSIGNED_P (target));
4059	}
4060
4061      convert_move (SUBREG_REG (target), temp,
4062		    SUBREG_PROMOTED_UNSIGNED_P (target));
4063
4064      /* If we promoted a constant, change the mode back down to match
4065	 target.  Otherwise, the caller might get confused by a result whose
4066	 mode is larger than expected.  */
4067
4068      if (want_value && GET_MODE (temp) != GET_MODE (target))
4069	{
4070	  if (GET_MODE (temp) != VOIDmode)
4071	    {
4072	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4073	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4074	      SUBREG_PROMOTED_UNSIGNED_P (temp)
4075		= SUBREG_PROMOTED_UNSIGNED_P (target);
4076	    }
4077	  else
4078	    temp = convert_modes (GET_MODE (target),
4079				  GET_MODE (SUBREG_REG (target)),
4080				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4081	}
4082
4083      return want_value ? temp : NULL_RTX;
4084    }
4085  else
4086    {
4087      temp = expand_expr (exp, target, GET_MODE (target), 0);
4088      /* Return TARGET if it's a specified hardware register.
4089	 If TARGET is a volatile mem ref, either return TARGET
4090	 or return a reg copied *from* TARGET; ANSI requires this.
4091
4092	 Otherwise, if TEMP is not TARGET, return TEMP
4093	 if it is constant (for efficiency),
4094	 or if we really want the correct value.  */
4095      if (!(target && GET_CODE (target) == REG
4096	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4097	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4098	  && ! rtx_equal_p (temp, target)
4099	  && (CONSTANT_P (temp) || want_value))
4100	dont_return_target = 1;
4101    }
4102
4103  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4104     the same as that of TARGET, adjust the constant.  This is needed, for
4105     example, in case it is a CONST_DOUBLE and we want only a word-sized
4106     value.  */
4107  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4108      && TREE_CODE (exp) != ERROR_MARK
4109      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4110    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4111			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4112
4113  /* If value was not generated in the target, store it there.
4114     Convert the value to TARGET's type first if necessary.
4115     If TEMP and TARGET compare equal according to rtx_equal_p, but
4116     one or both of them are volatile memory refs, we have to distinguish
4117     two cases:
4118     - expand_expr has used TARGET.  In this case, we must not generate
4119       another copy.  This can be detected by TARGET being equal according
4120       to == .
4121     - expand_expr has not used TARGET - that means that the source just
4122       happens to have the same RTX form.  Since temp will have been created
4123       by expand_expr, it will compare unequal according to == .
4124       We must generate a copy in this case, to reach the correct number
4125       of volatile memory references.  */
4126
4127  if ((! rtx_equal_p (temp, target)
4128       || (temp != target && (side_effects_p (temp)
4129			      || side_effects_p (target))))
4130      && TREE_CODE (exp) != ERROR_MARK
4131      && ! dont_store_target
4132	 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4133	    but TARGET is not valid memory reference, TEMP will differ
4134	    from TARGET although it is really the same location.  */
4135      && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4136	  || target != DECL_RTL_IF_SET (exp)))
4137    {
4138      target = protect_from_queue (target, 1);
4139      if (GET_MODE (temp) != GET_MODE (target)
4140	  && GET_MODE (temp) != VOIDmode)
4141	{
4142	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4143	  if (dont_return_target)
4144	    {
4145	      /* In this case, we will return TEMP,
4146		 so make sure it has the proper mode.
4147		 But don't forget to store the value into TARGET.  */
4148	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4149	      emit_move_insn (target, temp);
4150	    }
4151	  else
4152	    convert_move (target, temp, unsignedp);
4153	}
4154
4155      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4156	{
4157	  /* Handle copying a string constant into an array.  The string
4158	     constant may be shorter than the array.  So copy just the string's
4159	     actual length, and clear the rest.  First get the size of the data
4160	     type of the string, which is actually the size of the target.  */
4161	  rtx size = expr_size (exp);
4162
4163	  if (GET_CODE (size) == CONST_INT
4164	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4165	    emit_block_move (target, temp, size);
4166	  else
4167	    {
4168	      /* Compute the size of the data to copy from the string.  */
4169	      tree copy_size
4170		= size_binop (MIN_EXPR,
4171			      make_tree (sizetype, size),
4172			      size_int (TREE_STRING_LENGTH (exp)));
4173	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4174					       VOIDmode, 0);
4175	      rtx label = 0;
4176
4177	      /* Copy that much.  */
4178	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4179	      emit_block_move (target, temp, copy_size_rtx);
4180
4181	      /* Figure out how much is left in TARGET that we have to clear.
4182		 Do all calculations in ptr_mode.  */
4183	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4184		{
4185		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4186		  target = adjust_address (target, BLKmode,
4187					   INTVAL (copy_size_rtx));
4188		}
4189	      else
4190		{
4191		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4192				       copy_size_rtx, NULL_RTX, 0,
4193				       OPTAB_LIB_WIDEN);
4194
4195#ifdef POINTERS_EXTEND_UNSIGNED
4196		  if (GET_MODE (copy_size_rtx) != Pmode)
4197		    copy_size_rtx = convert_memory_address (Pmode,
4198							    copy_size_rtx);
4199#endif
4200
4201		  target = offset_address (target, copy_size_rtx,
4202					   highest_pow2_factor (copy_size));
4203		  label = gen_label_rtx ();
4204		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4205					   GET_MODE (size), 0, label);
4206		}
4207
4208	      if (size != const0_rtx)
4209		clear_storage (target, size);
4210
4211	      if (label)
4212		emit_label (label);
4213	    }
4214	}
4215      /* Handle calls that return values in multiple non-contiguous locations.
4216	 The Irix 6 ABI has examples of this.  */
4217      else if (GET_CODE (target) == PARALLEL)
4218	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4219      else if (GET_MODE (temp) == BLKmode)
4220	emit_block_move (target, temp, expr_size (exp));
4221      else
4222	emit_move_insn (target, temp);
4223    }
4224
4225  /* If we don't want a value, return NULL_RTX.  */
4226  if (! want_value)
4227    return NULL_RTX;
4228
4229  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4230     ??? The latter test doesn't seem to make sense.  */
4231  else if (dont_return_target && GET_CODE (temp) != MEM)
4232    return temp;
4233
4234  /* Return TARGET itself if it is a hard register.  */
4235  else if (want_value && GET_MODE (target) != BLKmode
4236	   && ! (GET_CODE (target) == REG
4237		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4238    return copy_to_reg (target);
4239
4240  else
4241    return target;
4242}
4243
4244/* Return 1 if EXP just contains zeros.  */
4245
4246static int
4247is_zeros_p (exp)
4248     tree exp;
4249{
4250  tree elt;
4251
4252  switch (TREE_CODE (exp))
4253    {
4254    case CONVERT_EXPR:
4255    case NOP_EXPR:
4256    case NON_LVALUE_EXPR:
4257    case VIEW_CONVERT_EXPR:
4258      return is_zeros_p (TREE_OPERAND (exp, 0));
4259
4260    case INTEGER_CST:
4261      return integer_zerop (exp);
4262
4263    case COMPLEX_CST:
4264      return
4265	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4266
4267    case REAL_CST:
4268      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4269
4270    case VECTOR_CST:
4271      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4272	   elt = TREE_CHAIN (elt))
4273	if (!is_zeros_p (TREE_VALUE (elt)))
4274	  return 0;
4275
4276      return 1;
4277
4278    case CONSTRUCTOR:
4279      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4280	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4281      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4282	if (! is_zeros_p (TREE_VALUE (elt)))
4283	  return 0;
4284
4285      return 1;
4286
4287    default:
4288      return 0;
4289    }
4290}
4291
4292/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4293
4294static int
4295mostly_zeros_p (exp)
4296     tree exp;
4297{
4298  if (TREE_CODE (exp) == CONSTRUCTOR)
4299    {
4300      int elts = 0, zeros = 0;
4301      tree elt = CONSTRUCTOR_ELTS (exp);
4302      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4303	{
4304	  /* If there are no ranges of true bits, it is all zero.  */
4305	  return elt == NULL_TREE;
4306	}
4307      for (; elt; elt = TREE_CHAIN (elt))
4308	{
4309	  /* We do not handle the case where the index is a RANGE_EXPR,
4310	     so the statistic will be somewhat inaccurate.
4311	     We do make a more accurate count in store_constructor itself,
4312	     so since this function is only used for nested array elements,
4313	     this should be close enough.  */
4314	  if (mostly_zeros_p (TREE_VALUE (elt)))
4315	    zeros++;
4316	  elts++;
4317	}
4318
4319      return 4 * zeros >= 3 * elts;
4320    }
4321
4322  return is_zeros_p (exp);
4323}
4324
4325/* Helper function for store_constructor.
4326   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4327   TYPE is the type of the CONSTRUCTOR, not the element type.
4328   CLEARED is as for store_constructor.
4329   ALIAS_SET is the alias set to use for any stores.
4330
4331   This provides a recursive shortcut back to store_constructor when it isn't
4332   necessary to go through store_field.  This is so that we can pass through
4333   the cleared field to let store_constructor know that we may not have to
4334   clear a substructure if the outer structure has already been cleared.  */
4335
4336static void
4337store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4338			 alias_set)
4339     rtx target;
4340     unsigned HOST_WIDE_INT bitsize;
4341     HOST_WIDE_INT bitpos;
4342     enum machine_mode mode;
4343     tree exp, type;
4344     int cleared;
4345     int alias_set;
4346{
4347  if (TREE_CODE (exp) == CONSTRUCTOR
4348      && bitpos % BITS_PER_UNIT == 0
4349      /* If we have a non-zero bitpos for a register target, then we just
4350	 let store_field do the bitfield handling.  This is unlikely to
4351	 generate unnecessary clear instructions anyways.  */
4352      && (bitpos == 0 || GET_CODE (target) == MEM))
4353    {
4354      if (GET_CODE (target) == MEM)
4355	target
4356	  = adjust_address (target,
4357			    GET_MODE (target) == BLKmode
4358			    || 0 != (bitpos
4359				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4360			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4361
4362
4363      /* Update the alias set, if required.  */
4364      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4365	  && MEM_ALIAS_SET (target) != 0)
4366	{
4367	  target = copy_rtx (target);
4368	  set_mem_alias_set (target, alias_set);
4369	}
4370
4371      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4372    }
4373  else
4374    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4375		 alias_set);
4376}
4377
4378/* Store the value of constructor EXP into the rtx TARGET.
4379   TARGET is either a REG or a MEM; we know it cannot conflict, since
4380   safe_from_p has been called.
4381   CLEARED is true if TARGET is known to have been zero'd.
4382   SIZE is the number of bytes of TARGET we are allowed to modify: this
4383   may not be the same as the size of EXP if we are assigning to a field
4384   which has been packed to exclude padding bits.  */
4385
4386static void
4387store_constructor (exp, target, cleared, size)
4388     tree exp;
4389     rtx target;
4390     int cleared;
4391     HOST_WIDE_INT size;
4392{
4393  tree type = TREE_TYPE (exp);
4394#ifdef WORD_REGISTER_OPERATIONS
4395  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4396#endif
4397
4398  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4399      || TREE_CODE (type) == QUAL_UNION_TYPE)
4400    {
4401      tree elt;
4402
4403      /* We either clear the aggregate or indicate the value is dead.  */
4404      if ((TREE_CODE (type) == UNION_TYPE
4405	   || TREE_CODE (type) == QUAL_UNION_TYPE)
4406	  && ! cleared
4407	  && ! CONSTRUCTOR_ELTS (exp))
4408	/* If the constructor is empty, clear the union.  */
4409	{
4410	  clear_storage (target, expr_size (exp));
4411	  cleared = 1;
4412	}
4413
4414      /* If we are building a static constructor into a register,
4415	 set the initial value as zero so we can fold the value into
4416	 a constant.  But if more than one register is involved,
4417	 this probably loses.  */
4418      else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4419	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4420	{
4421	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4422	  cleared = 1;
4423	}
4424
4425      /* If the constructor has fewer fields than the structure
4426	 or if we are initializing the structure to mostly zeros,
4427	 clear the whole structure first.  Don't do this if TARGET is a
4428	 register whose mode size isn't equal to SIZE since clear_storage
4429	 can't handle this case.  */
4430      else if (! cleared && size > 0
4431	       && ((list_length (CONSTRUCTOR_ELTS (exp))
4432		    != fields_length (type))
4433		   || mostly_zeros_p (exp))
4434	       && (GET_CODE (target) != REG
4435		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4436		       == size)))
4437	{
4438	  clear_storage (target, GEN_INT (size));
4439	  cleared = 1;
4440	}
4441
4442      if (! cleared)
4443	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4444
4445      /* Store each element of the constructor into
4446	 the corresponding field of TARGET.  */
4447
4448      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4449	{
4450	  tree field = TREE_PURPOSE (elt);
4451	  tree value = TREE_VALUE (elt);
4452	  enum machine_mode mode;
4453	  HOST_WIDE_INT bitsize;
4454	  HOST_WIDE_INT bitpos = 0;
4455	  int unsignedp;
4456	  tree offset;
4457	  rtx to_rtx = target;
4458
4459	  /* Just ignore missing fields.
4460	     We cleared the whole structure, above,
4461	     if any fields are missing.  */
4462	  if (field == 0)
4463	    continue;
4464
4465	  if (cleared && is_zeros_p (value))
4466	    continue;
4467
4468	  if (host_integerp (DECL_SIZE (field), 1))
4469	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4470	  else
4471	    bitsize = -1;
4472
4473	  unsignedp = TREE_UNSIGNED (field);
4474	  mode = DECL_MODE (field);
4475	  if (DECL_BIT_FIELD (field))
4476	    mode = VOIDmode;
4477
4478	  offset = DECL_FIELD_OFFSET (field);
4479	  if (host_integerp (offset, 0)
4480	      && host_integerp (bit_position (field), 0))
4481	    {
4482	      bitpos = int_bit_position (field);
4483	      offset = 0;
4484	    }
4485	  else
4486	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4487
4488	  if (offset)
4489	    {
4490	      rtx offset_rtx;
4491
4492	      if (contains_placeholder_p (offset))
4493		offset = build (WITH_RECORD_EXPR, sizetype,
4494				offset, make_tree (TREE_TYPE (exp), target));
4495
4496	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4497	      if (GET_CODE (to_rtx) != MEM)
4498		abort ();
4499
4500#ifdef POINTERS_EXTEND_UNSIGNED
4501	      if (GET_MODE (offset_rtx) != Pmode)
4502		offset_rtx = convert_memory_address (Pmode, offset_rtx);
4503#else
4504	      if (GET_MODE (offset_rtx) != ptr_mode)
4505		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4506#endif
4507
4508	      to_rtx = offset_address (to_rtx, offset_rtx,
4509				       highest_pow2_factor (offset));
4510	    }
4511
4512	  if (TREE_READONLY (field))
4513	    {
4514	      if (GET_CODE (to_rtx) == MEM)
4515		to_rtx = copy_rtx (to_rtx);
4516
4517	      RTX_UNCHANGING_P (to_rtx) = 1;
4518	    }
4519
4520#ifdef WORD_REGISTER_OPERATIONS
4521	  /* If this initializes a field that is smaller than a word, at the
4522	     start of a word, try to widen it to a full word.
4523	     This special case allows us to output C++ member function
4524	     initializations in a form that the optimizers can understand.  */
4525	  if (GET_CODE (target) == REG
4526	      && bitsize < BITS_PER_WORD
4527	      && bitpos % BITS_PER_WORD == 0
4528	      && GET_MODE_CLASS (mode) == MODE_INT
4529	      && TREE_CODE (value) == INTEGER_CST
4530	      && exp_size >= 0
4531	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4532	    {
4533	      tree type = TREE_TYPE (value);
4534
4535	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4536		{
4537		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4538		  value = convert (type, value);
4539		}
4540
4541	      if (BYTES_BIG_ENDIAN)
4542		value
4543		  = fold (build (LSHIFT_EXPR, type, value,
4544				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4545	      bitsize = BITS_PER_WORD;
4546	      mode = word_mode;
4547	    }
4548#endif
4549
4550	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4551	      && DECL_NONADDRESSABLE_P (field))
4552	    {
4553	      to_rtx = copy_rtx (to_rtx);
4554	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4555	    }
4556
4557	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4558				   value, type, cleared,
4559				   get_alias_set (TREE_TYPE (field)));
4560	}
4561    }
4562  else if (TREE_CODE (type) == ARRAY_TYPE
4563	   || TREE_CODE (type) == VECTOR_TYPE)
4564    {
4565      tree elt;
4566      int i;
4567      int need_to_clear;
4568      tree domain = TYPE_DOMAIN (type);
4569      tree elttype = TREE_TYPE (type);
4570      int const_bounds_p;
4571      HOST_WIDE_INT minelt = 0;
4572      HOST_WIDE_INT maxelt = 0;
4573
4574      /* Vectors are like arrays, but the domain is stored via an array
4575	 type indirectly.  */
4576      if (TREE_CODE (type) == VECTOR_TYPE)
4577	{
4578	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4579	     the same field as TYPE_DOMAIN, we are not guaranteed that
4580	     it always will.  */
4581	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4582	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4583	}
4584
4585      const_bounds_p = (TYPE_MIN_VALUE (domain)
4586			&& TYPE_MAX_VALUE (domain)
4587			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
4588			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
4589
4590      /* If we have constant bounds for the range of the type, get them.  */
4591      if (const_bounds_p)
4592	{
4593	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4594	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4595	}
4596
4597      /* If the constructor has fewer elements than the array,
4598         clear the whole array first.  Similarly if this is
4599         static constructor of a non-BLKmode object.  */
4600      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4601	need_to_clear = 1;
4602      else
4603	{
4604	  HOST_WIDE_INT count = 0, zero_count = 0;
4605	  need_to_clear = ! const_bounds_p;
4606
4607	  /* This loop is a more accurate version of the loop in
4608	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4609	     It is also needed to check for missing elements.  */
4610	  for (elt = CONSTRUCTOR_ELTS (exp);
4611	       elt != NULL_TREE && ! need_to_clear;
4612	       elt = TREE_CHAIN (elt))
4613	    {
4614	      tree index = TREE_PURPOSE (elt);
4615	      HOST_WIDE_INT this_node_count;
4616
4617	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4618		{
4619		  tree lo_index = TREE_OPERAND (index, 0);
4620		  tree hi_index = TREE_OPERAND (index, 1);
4621
4622		  if (! host_integerp (lo_index, 1)
4623		      || ! host_integerp (hi_index, 1))
4624		    {
4625		      need_to_clear = 1;
4626		      break;
4627		    }
4628
4629		  this_node_count = (tree_low_cst (hi_index, 1)
4630				     - tree_low_cst (lo_index, 1) + 1);
4631		}
4632	      else
4633		this_node_count = 1;
4634
4635	      count += this_node_count;
4636	      if (mostly_zeros_p (TREE_VALUE (elt)))
4637		zero_count += this_node_count;
4638	    }
4639
4640	  /* Clear the entire array first if there are any missing elements,
4641	     or if the incidence of zero elements is >= 75%.  */
4642	  if (! need_to_clear
4643	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4644	    need_to_clear = 1;
4645	}
4646
4647      if (need_to_clear && size > 0)
4648	{
4649	  if (! cleared)
4650	    {
4651	      if (REG_P (target))
4652		emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
4653	      else
4654		clear_storage (target, GEN_INT (size));
4655	    }
4656	  cleared = 1;
4657	}
4658      else if (REG_P (target))
4659	/* Inform later passes that the old value is dead.  */
4660	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4661
4662      /* Store each element of the constructor into
4663	 the corresponding element of TARGET, determined
4664	 by counting the elements.  */
4665      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4666	   elt;
4667	   elt = TREE_CHAIN (elt), i++)
4668	{
4669	  enum machine_mode mode;
4670	  HOST_WIDE_INT bitsize;
4671	  HOST_WIDE_INT bitpos;
4672	  int unsignedp;
4673	  tree value = TREE_VALUE (elt);
4674	  tree index = TREE_PURPOSE (elt);
4675	  rtx xtarget = target;
4676
4677	  if (cleared && is_zeros_p (value))
4678	    continue;
4679
4680	  unsignedp = TREE_UNSIGNED (elttype);
4681	  mode = TYPE_MODE (elttype);
4682	  if (mode == BLKmode)
4683	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4684		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
4685		       : -1);
4686	  else
4687	    bitsize = GET_MODE_BITSIZE (mode);
4688
4689	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4690	    {
4691	      tree lo_index = TREE_OPERAND (index, 0);
4692	      tree hi_index = TREE_OPERAND (index, 1);
4693	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4694	      struct nesting *loop;
4695	      HOST_WIDE_INT lo, hi, count;
4696	      tree position;
4697
4698	      /* If the range is constant and "small", unroll the loop.  */
4699	      if (const_bounds_p
4700		  && host_integerp (lo_index, 0)
4701		  && host_integerp (hi_index, 0)
4702		  && (lo = tree_low_cst (lo_index, 0),
4703		      hi = tree_low_cst (hi_index, 0),
4704		      count = hi - lo + 1,
4705		      (GET_CODE (target) != MEM
4706		       || count <= 2
4707		       || (host_integerp (TYPE_SIZE (elttype), 1)
4708			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4709			       <= 40 * 8)))))
4710		{
4711		  lo -= minelt;  hi -= minelt;
4712		  for (; lo <= hi; lo++)
4713		    {
4714		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4715
4716		      if (GET_CODE (target) == MEM
4717			  && !MEM_KEEP_ALIAS_SET_P (target)
4718			  && TREE_CODE (type) == ARRAY_TYPE
4719			  && TYPE_NONALIASED_COMPONENT (type))
4720			{
4721			  target = copy_rtx (target);
4722			  MEM_KEEP_ALIAS_SET_P (target) = 1;
4723			}
4724
4725		      store_constructor_field
4726			(target, bitsize, bitpos, mode, value, type, cleared,
4727			 get_alias_set (elttype));
4728		    }
4729		}
4730	      else
4731		{
4732		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4733		  loop_top = gen_label_rtx ();
4734		  loop_end = gen_label_rtx ();
4735
4736		  unsignedp = TREE_UNSIGNED (domain);
4737
4738		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4739
4740		  index_r
4741		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4742						 &unsignedp, 0));
4743		  SET_DECL_RTL (index, index_r);
4744		  if (TREE_CODE (value) == SAVE_EXPR
4745		      && SAVE_EXPR_RTL (value) == 0)
4746		    {
4747		      /* Make sure value gets expanded once before the
4748                         loop.  */
4749		      expand_expr (value, const0_rtx, VOIDmode, 0);
4750		      emit_queue ();
4751		    }
4752		  store_expr (lo_index, index_r, 0);
4753		  loop = expand_start_loop (0);
4754
4755		  /* Assign value to element index.  */
4756		  position
4757		    = convert (ssizetype,
4758			       fold (build (MINUS_EXPR, TREE_TYPE (index),
4759					    index, TYPE_MIN_VALUE (domain))));
4760		  position = size_binop (MULT_EXPR, position,
4761					 convert (ssizetype,
4762						  TYPE_SIZE_UNIT (elttype)));
4763
4764		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4765		  xtarget = offset_address (target, pos_rtx,
4766					    highest_pow2_factor (position));
4767		  xtarget = adjust_address (xtarget, mode, 0);
4768		  if (TREE_CODE (value) == CONSTRUCTOR)
4769		    store_constructor (value, xtarget, cleared,
4770				       bitsize / BITS_PER_UNIT);
4771		  else
4772		    store_expr (value, xtarget, 0);
4773
4774		  expand_exit_loop_if_false (loop,
4775					     build (LT_EXPR, integer_type_node,
4776						    index, hi_index));
4777
4778		  expand_increment (build (PREINCREMENT_EXPR,
4779					   TREE_TYPE (index),
4780					   index, integer_one_node), 0, 0);
4781		  expand_end_loop ();
4782		  emit_label (loop_end);
4783		}
4784	    }
4785	  else if ((index != 0 && ! host_integerp (index, 0))
4786		   || ! host_integerp (TYPE_SIZE (elttype), 1))
4787	    {
4788	      tree position;
4789
4790	      if (index == 0)
4791		index = ssize_int (1);
4792
4793	      if (minelt)
4794		index = convert (ssizetype,
4795				 fold (build (MINUS_EXPR, index,
4796					      TYPE_MIN_VALUE (domain))));
4797
4798	      position = size_binop (MULT_EXPR, index,
4799				     convert (ssizetype,
4800					      TYPE_SIZE_UNIT (elttype)));
4801	      xtarget = offset_address (target,
4802					expand_expr (position, 0, VOIDmode, 0),
4803					highest_pow2_factor (position));
4804	      xtarget = adjust_address (xtarget, mode, 0);
4805	      store_expr (value, xtarget, 0);
4806	    }
4807	  else
4808	    {
4809	      if (index != 0)
4810		bitpos = ((tree_low_cst (index, 0) - minelt)
4811			  * tree_low_cst (TYPE_SIZE (elttype), 1));
4812	      else
4813		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4814
4815	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4816		  && TREE_CODE (type) == ARRAY_TYPE
4817		  && TYPE_NONALIASED_COMPONENT (type))
4818		{
4819		  target = copy_rtx (target);
4820		  MEM_KEEP_ALIAS_SET_P (target) = 1;
4821		}
4822
4823	      store_constructor_field (target, bitsize, bitpos, mode, value,
4824				       type, cleared, get_alias_set (elttype));
4825
4826	    }
4827	}
4828    }
4829
4830  /* Set constructor assignments.  */
4831  else if (TREE_CODE (type) == SET_TYPE)
4832    {
4833      tree elt = CONSTRUCTOR_ELTS (exp);
4834      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4835      tree domain = TYPE_DOMAIN (type);
4836      tree domain_min, domain_max, bitlength;
4837
4838      /* The default implementation strategy is to extract the constant
4839	 parts of the constructor, use that to initialize the target,
4840	 and then "or" in whatever non-constant ranges we need in addition.
4841
4842	 If a large set is all zero or all ones, it is
4843	 probably better to set it using memset (if available) or bzero.
4844	 Also, if a large set has just a single range, it may also be
4845	 better to first clear all the first clear the set (using
4846	 bzero/memset), and set the bits we want.  */
4847
4848      /* Check for all zeros.  */
4849      if (elt == NULL_TREE && size > 0)
4850	{
4851	  if (!cleared)
4852	    clear_storage (target, GEN_INT (size));
4853	  return;
4854	}
4855
4856      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4857      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4858      bitlength = size_binop (PLUS_EXPR,
4859			      size_diffop (domain_max, domain_min),
4860			      ssize_int (1));
4861
4862      nbits = tree_low_cst (bitlength, 1);
4863
4864      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4865	 are "complicated" (more than one range), initialize (the
4866	 constant parts) by copying from a constant.  */
4867      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4868	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4869	{
4870	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4871	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4872	  char *bit_buffer = (char *) alloca (nbits);
4873	  HOST_WIDE_INT word = 0;
4874	  unsigned int bit_pos = 0;
4875	  unsigned int ibit = 0;
4876	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
4877
4878	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4879	  for (;;)
4880	    {
4881	      if (bit_buffer[ibit])
4882		{
4883		  if (BYTES_BIG_ENDIAN)
4884		    word |= (1 << (set_word_size - 1 - bit_pos));
4885		  else
4886		    word |= 1 << bit_pos;
4887		}
4888
4889	      bit_pos++;  ibit++;
4890	      if (bit_pos >= set_word_size || ibit == nbits)
4891		{
4892		  if (word != 0 || ! cleared)
4893		    {
4894		      rtx datum = GEN_INT (word);
4895		      rtx to_rtx;
4896
4897		      /* The assumption here is that it is safe to use
4898			 XEXP if the set is multi-word, but not if
4899			 it's single-word.  */
4900		      if (GET_CODE (target) == MEM)
4901			to_rtx = adjust_address (target, mode, offset);
4902		      else if (offset == 0)
4903			to_rtx = target;
4904		      else
4905			abort ();
4906		      emit_move_insn (to_rtx, datum);
4907		    }
4908
4909		  if (ibit == nbits)
4910		    break;
4911		  word = 0;
4912		  bit_pos = 0;
4913		  offset += set_word_size / BITS_PER_UNIT;
4914		}
4915	    }
4916	}
4917      else if (!cleared)
4918	/* Don't bother clearing storage if the set is all ones.  */
4919	if (TREE_CHAIN (elt) != NULL_TREE
4920	    || (TREE_PURPOSE (elt) == NULL_TREE
4921		? nbits != 1
4922		: ( ! host_integerp (TREE_VALUE (elt), 0)
4923		   || ! host_integerp (TREE_PURPOSE (elt), 0)
4924		   || (tree_low_cst (TREE_VALUE (elt), 0)
4925		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4926		       != (HOST_WIDE_INT) nbits))))
4927	  clear_storage (target, expr_size (exp));
4928
4929      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4930	{
4931	  /* Start of range of element or NULL.  */
4932	  tree startbit = TREE_PURPOSE (elt);
4933	  /* End of range of element, or element value.  */
4934	  tree endbit   = TREE_VALUE (elt);
4935#ifdef TARGET_MEM_FUNCTIONS
4936	  HOST_WIDE_INT startb, endb;
4937#endif
4938	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4939
4940	  bitlength_rtx = expand_expr (bitlength,
4941				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4942
4943	  /* Handle non-range tuple element like [ expr ].  */
4944	  if (startbit == NULL_TREE)
4945	    {
4946	      startbit = save_expr (endbit);
4947	      endbit = startbit;
4948	    }
4949
4950	  startbit = convert (sizetype, startbit);
4951	  endbit = convert (sizetype, endbit);
4952	  if (! integer_zerop (domain_min))
4953	    {
4954	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4955	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4956	    }
4957	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4958				      EXPAND_CONST_ADDRESS);
4959	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4960				    EXPAND_CONST_ADDRESS);
4961
4962	  if (REG_P (target))
4963	    {
4964	      targetx
4965		= assign_temp
4966		  ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4967					  TYPE_QUAL_CONST)),
4968		   0, 1, 1);
4969	      emit_move_insn (targetx, target);
4970	    }
4971
4972	  else if (GET_CODE (target) == MEM)
4973	    targetx = target;
4974	  else
4975	    abort ();
4976
4977#ifdef TARGET_MEM_FUNCTIONS
4978	  /* Optimization:  If startbit and endbit are
4979	     constants divisible by BITS_PER_UNIT,
4980	     call memset instead.  */
4981	  if (TREE_CODE (startbit) == INTEGER_CST
4982	      && TREE_CODE (endbit) == INTEGER_CST
4983	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4984	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4985	    {
4986	      emit_library_call (memset_libfunc, LCT_NORMAL,
4987				 VOIDmode, 3,
4988				 plus_constant (XEXP (targetx, 0),
4989						startb / BITS_PER_UNIT),
4990				 Pmode,
4991				 constm1_rtx, TYPE_MODE (integer_type_node),
4992				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4993				 TYPE_MODE (sizetype));
4994	    }
4995	  else
4996#endif
4997	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4998			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4999			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5000			       startbit_rtx, TYPE_MODE (sizetype),
5001			       endbit_rtx, TYPE_MODE (sizetype));
5002
5003	  if (REG_P (target))
5004	    emit_move_insn (target, targetx);
5005	}
5006    }
5007
5008  else
5009    abort ();
5010}
5011
5012/* Store the value of EXP (an expression tree)
5013   into a subfield of TARGET which has mode MODE and occupies
5014   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5015   If MODE is VOIDmode, it means that we are storing into a bit-field.
5016
5017   If VALUE_MODE is VOIDmode, return nothing in particular.
5018   UNSIGNEDP is not used in this case.
5019
5020   Otherwise, return an rtx for the value stored.  This rtx
5021   has mode VALUE_MODE if that is convenient to do.
5022   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5023
5024   TYPE is the type of the underlying object,
5025
5026   ALIAS_SET is the alias set for the destination.  This value will
5027   (in general) be different from that for TARGET, since TARGET is a
5028   reference to the containing structure.  */
5029
5030static rtx
5031store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5032	     alias_set)
5033     rtx target;
5034     HOST_WIDE_INT bitsize;
5035     HOST_WIDE_INT bitpos;
5036     enum machine_mode mode;
5037     tree exp;
5038     enum machine_mode value_mode;
5039     int unsignedp;
5040     tree type;
5041     int alias_set;
5042{
5043  HOST_WIDE_INT width_mask = 0;
5044
5045  if (TREE_CODE (exp) == ERROR_MARK)
5046    return const0_rtx;
5047
5048  /* If we have nothing to store, do nothing unless the expression has
5049     side-effects.  */
5050  if (bitsize == 0)
5051    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5052  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5053    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5054
5055  /* If we are storing into an unaligned field of an aligned union that is
5056     in a register, we may have the mode of TARGET being an integer mode but
5057     MODE == BLKmode.  In that case, get an aligned object whose size and
5058     alignment are the same as TARGET and store TARGET into it (we can avoid
5059     the store if the field being stored is the entire width of TARGET).  Then
5060     call ourselves recursively to store the field into a BLKmode version of
5061     that object.  Finally, load from the object into TARGET.  This is not
5062     very efficient in general, but should only be slightly more expensive
5063     than the otherwise-required unaligned accesses.  Perhaps this can be
5064     cleaned up later.  */
5065
5066  if (mode == BLKmode
5067      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5068    {
5069      rtx object
5070	= assign_temp
5071	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5072	   0, 1, 1);
5073      rtx blk_object = adjust_address (object, BLKmode, 0);
5074
5075      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5076	emit_move_insn (object, target);
5077
5078      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5079		   alias_set);
5080
5081      emit_move_insn (target, object);
5082
5083      /* We want to return the BLKmode version of the data.  */
5084      return blk_object;
5085    }
5086
5087  if (GET_CODE (target) == CONCAT)
5088    {
5089      /* We're storing into a struct containing a single __complex.  */
5090
5091      if (bitpos != 0)
5092	abort ();
5093      return store_expr (exp, target, 0);
5094    }
5095
5096  /* If the structure is in a register or if the component
5097     is a bit field, we cannot use addressing to access it.
5098     Use bit-field techniques or SUBREG to store in it.  */
5099
5100  if (mode == VOIDmode
5101      || (mode != BLKmode && ! direct_store[(int) mode]
5102	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5103	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5104      || GET_CODE (target) == REG
5105      || GET_CODE (target) == SUBREG
5106      /* If the field isn't aligned enough to store as an ordinary memref,
5107	 store it as a bit field.  */
5108      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5109	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5110	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5111      /* If the RHS and field are a constant size and the size of the
5112	 RHS isn't the same size as the bitfield, we must use bitfield
5113	 operations.  */
5114      || (bitsize >= 0
5115	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5116	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5117    {
5118      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5119
5120      /* If BITSIZE is narrower than the size of the type of EXP
5121	 we will be narrowing TEMP.  Normally, what's wanted are the
5122	 low-order bits.  However, if EXP's type is a record and this is
5123	 big-endian machine, we want the upper BITSIZE bits.  */
5124      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5125	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5126	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5127	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5128			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5129				       - bitsize),
5130			     temp, 1);
5131
5132      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5133	 MODE.  */
5134      if (mode != VOIDmode && mode != BLKmode
5135	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5136	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5137
5138      /* If the modes of TARGET and TEMP are both BLKmode, both
5139	 must be in memory and BITPOS must be aligned on a byte
5140	 boundary.  If so, we simply do a block copy.  */
5141      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5142	{
5143	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5144	      || bitpos % BITS_PER_UNIT != 0)
5145	    abort ();
5146
5147	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5148	  emit_block_move (target, temp,
5149			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5150				    / BITS_PER_UNIT));
5151
5152	  return value_mode == VOIDmode ? const0_rtx : target;
5153	}
5154
5155      /* Store the value in the bitfield.  */
5156      store_bit_field (target, bitsize, bitpos, mode, temp,
5157		       int_size_in_bytes (type));
5158
5159      if (value_mode != VOIDmode)
5160	{
5161	  /* The caller wants an rtx for the value.
5162	     If possible, avoid refetching from the bitfield itself.  */
5163	  if (width_mask != 0
5164	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5165	    {
5166	      tree count;
5167	      enum machine_mode tmode;
5168
5169	      tmode = GET_MODE (temp);
5170	      if (tmode == VOIDmode)
5171		tmode = value_mode;
5172
5173	      if (unsignedp)
5174		return expand_and (tmode, temp,
5175				   GEN_INT (trunc_int_for_mode (width_mask,
5176								tmode)),
5177				   NULL_RTX);
5178
5179	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5180	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5181	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5182	    }
5183
5184	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5185				    NULL_RTX, value_mode, VOIDmode,
5186				    int_size_in_bytes (type));
5187	}
5188      return const0_rtx;
5189    }
5190  else
5191    {
5192      rtx addr = XEXP (target, 0);
5193      rtx to_rtx = target;
5194
5195      /* If a value is wanted, it must be the lhs;
5196	 so make the address stable for multiple use.  */
5197
5198      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5199	  && ! CONSTANT_ADDRESS_P (addr)
5200	  /* A frame-pointer reference is already stable.  */
5201	  && ! (GET_CODE (addr) == PLUS
5202		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5203		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5204		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5205	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5206
5207      /* Now build a reference to just the desired component.  */
5208
5209      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5210
5211      if (to_rtx == target)
5212	to_rtx = copy_rtx (to_rtx);
5213
5214      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5215      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5216	set_mem_alias_set (to_rtx, alias_set);
5217
5218      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5219    }
5220}
5221
5222/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5223   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5224   codes and find the ultimate containing object, which we return.
5225
5226   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5227   bit position, and *PUNSIGNEDP to the signedness of the field.
5228   If the position of the field is variable, we store a tree
5229   giving the variable offset (in units) in *POFFSET.
5230   This offset is in addition to the bit position.
5231   If the position is not variable, we store 0 in *POFFSET.
5232
5233   If any of the extraction expressions is volatile,
5234   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5235
5236   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5237   is a mode that can be used to access the field.  In that case, *PBITSIZE
5238   is redundant.
5239
5240   If the field describes a variable-sized object, *PMODE is set to
5241   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5242   this case, but the address of the object can be found.  */
5243
5244tree
5245get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5246		     punsignedp, pvolatilep)
5247     tree exp;
5248     HOST_WIDE_INT *pbitsize;
5249     HOST_WIDE_INT *pbitpos;
5250     tree *poffset;
5251     enum machine_mode *pmode;
5252     int *punsignedp;
5253     int *pvolatilep;
5254{
5255  tree size_tree = 0;
5256  enum machine_mode mode = VOIDmode;
5257  tree offset = size_zero_node;
5258  tree bit_offset = bitsize_zero_node;
5259  tree placeholder_ptr = 0;
5260  tree tem;
5261
5262  /* First get the mode, signedness, and size.  We do this from just the
5263     outermost expression.  */
5264  if (TREE_CODE (exp) == COMPONENT_REF)
5265    {
5266      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5267      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5268	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5269
5270      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5271    }
5272  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5273    {
5274      size_tree = TREE_OPERAND (exp, 1);
5275      *punsignedp = TREE_UNSIGNED (exp);
5276    }
5277  else
5278    {
5279      mode = TYPE_MODE (TREE_TYPE (exp));
5280      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5281
5282      if (mode == BLKmode)
5283	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5284      else
5285	*pbitsize = GET_MODE_BITSIZE (mode);
5286    }
5287
5288  if (size_tree != 0)
5289    {
5290      if (! host_integerp (size_tree, 1))
5291	mode = BLKmode, *pbitsize = -1;
5292      else
5293	*pbitsize = tree_low_cst (size_tree, 1);
5294    }
5295
5296  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5297     and find the ultimate containing object.  */
5298  while (1)
5299    {
5300      if (TREE_CODE (exp) == BIT_FIELD_REF)
5301	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5302      else if (TREE_CODE (exp) == COMPONENT_REF)
5303	{
5304	  tree field = TREE_OPERAND (exp, 1);
5305	  tree this_offset = DECL_FIELD_OFFSET (field);
5306
5307	  /* If this field hasn't been filled in yet, don't go
5308	     past it.  This should only happen when folding expressions
5309	     made during type construction.  */
5310	  if (this_offset == 0)
5311	    break;
5312	  else if (! TREE_CONSTANT (this_offset)
5313		   && contains_placeholder_p (this_offset))
5314	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5315
5316	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5317	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5318				   DECL_FIELD_BIT_OFFSET (field));
5319
5320	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5321	}
5322
5323      else if (TREE_CODE (exp) == ARRAY_REF
5324	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5325	{
5326	  tree index = TREE_OPERAND (exp, 1);
5327	  tree array = TREE_OPERAND (exp, 0);
5328	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5329	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5330	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5331
5332	  /* We assume all arrays have sizes that are a multiple of a byte.
5333	     First subtract the lower bound, if any, in the type of the
5334	     index, then convert to sizetype and multiply by the size of the
5335	     array element.  */
5336	  if (low_bound != 0 && ! integer_zerop (low_bound))
5337	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5338				 index, low_bound));
5339
5340	  /* If the index has a self-referential type, pass it to a
5341	     WITH_RECORD_EXPR; if the component size is, pass our
5342	     component to one.  */
5343	  if (! TREE_CONSTANT (index)
5344	      && contains_placeholder_p (index))
5345	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5346	  if (! TREE_CONSTANT (unit_size)
5347	      && contains_placeholder_p (unit_size))
5348	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5349
5350	  offset = size_binop (PLUS_EXPR, offset,
5351			       size_binop (MULT_EXPR,
5352					   convert (sizetype, index),
5353					   unit_size));
5354	}
5355
5356      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5357	{
5358	  tree new = find_placeholder (exp, &placeholder_ptr);
5359
5360	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5361	     We might have been called from tree optimization where we
5362	     haven't set up an object yet.  */
5363	  if (new == 0)
5364	    break;
5365	  else
5366	    exp = new;
5367
5368	  continue;
5369	}
5370      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5371	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5372	       && ! ((TREE_CODE (exp) == NOP_EXPR
5373		      || TREE_CODE (exp) == CONVERT_EXPR)
5374		     && (TYPE_MODE (TREE_TYPE (exp))
5375			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5376	break;
5377
5378      /* If any reference in the chain is volatile, the effect is volatile.  */
5379      if (TREE_THIS_VOLATILE (exp))
5380	*pvolatilep = 1;
5381
5382      exp = TREE_OPERAND (exp, 0);
5383    }
5384
5385  /* If OFFSET is constant, see if we can return the whole thing as a
5386     constant bit position.  Otherwise, split it up.  */
5387  if (host_integerp (offset, 0)
5388      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5389				 bitsize_unit_node))
5390      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5391      && host_integerp (tem, 0))
5392    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5393  else
5394    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5395
5396  *pmode = mode;
5397  return exp;
5398}
5399
5400/* Return 1 if T is an expression that get_inner_reference handles.  */
5401
5402int
5403handled_component_p (t)
5404     tree t;
5405{
5406  switch (TREE_CODE (t))
5407    {
5408    case BIT_FIELD_REF:
5409    case COMPONENT_REF:
5410    case ARRAY_REF:
5411    case ARRAY_RANGE_REF:
5412    case NON_LVALUE_EXPR:
5413    case VIEW_CONVERT_EXPR:
5414      return 1;
5415
5416    case NOP_EXPR:
5417    case CONVERT_EXPR:
5418      return (TYPE_MODE (TREE_TYPE (t))
5419	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5420
5421    default:
5422      return 0;
5423    }
5424}
5425
5426/* Given an rtx VALUE that may contain additions and multiplications, return
5427   an equivalent value that just refers to a register, memory, or constant.
5428   This is done by generating instructions to perform the arithmetic and
5429   returning a pseudo-register containing the value.
5430
5431   The returned value may be a REG, SUBREG, MEM or constant.  */
5432
5433rtx
5434force_operand (value, target)
5435     rtx value, target;
5436{
5437  optab binoptab = 0;
5438  /* Use a temporary to force order of execution of calls to
5439     `force_operand'.  */
5440  rtx tmp;
5441  rtx op2;
5442  /* Use subtarget as the target for operand 0 of a binary operation.  */
5443  rtx subtarget = get_subtarget (target);
5444
5445  /* Check for a PIC address load.  */
5446  if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5447      && XEXP (value, 0) == pic_offset_table_rtx
5448      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5449	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5450	  || GET_CODE (XEXP (value, 1)) == CONST))
5451    {
5452      if (!subtarget)
5453	subtarget = gen_reg_rtx (GET_MODE (value));
5454      emit_move_insn (subtarget, value);
5455      return subtarget;
5456    }
5457
5458  if (GET_CODE (value) == PLUS)
5459    binoptab = add_optab;
5460  else if (GET_CODE (value) == MINUS)
5461    binoptab = sub_optab;
5462  else if (GET_CODE (value) == MULT)
5463    {
5464      op2 = XEXP (value, 1);
5465      if (!CONSTANT_P (op2)
5466	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5467	subtarget = 0;
5468      tmp = force_operand (XEXP (value, 0), subtarget);
5469      return expand_mult (GET_MODE (value), tmp,
5470			  force_operand (op2, NULL_RTX),
5471			  target, 1);
5472    }
5473
5474  if (binoptab)
5475    {
5476      op2 = XEXP (value, 1);
5477      if (!CONSTANT_P (op2)
5478	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5479	subtarget = 0;
5480      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5481	{
5482	  binoptab = add_optab;
5483	  op2 = negate_rtx (GET_MODE (value), op2);
5484	}
5485
5486      /* Check for an addition with OP2 a constant integer and our first
5487	 operand a PLUS of a virtual register and something else.  In that
5488	 case, we want to emit the sum of the virtual register and the
5489	 constant first and then add the other value.  This allows virtual
5490	 register instantiation to simply modify the constant rather than
5491	 creating another one around this addition.  */
5492      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5493	  && GET_CODE (XEXP (value, 0)) == PLUS
5494	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5495	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5496	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5497	{
5498	  rtx temp = expand_binop (GET_MODE (value), binoptab,
5499				   XEXP (XEXP (value, 0), 0), op2,
5500				   subtarget, 0, OPTAB_LIB_WIDEN);
5501	  return expand_binop (GET_MODE (value), binoptab, temp,
5502			       force_operand (XEXP (XEXP (value, 0), 1), 0),
5503			       target, 0, OPTAB_LIB_WIDEN);
5504	}
5505
5506      tmp = force_operand (XEXP (value, 0), subtarget);
5507      return expand_binop (GET_MODE (value), binoptab, tmp,
5508			   force_operand (op2, NULL_RTX),
5509			   target, 0, OPTAB_LIB_WIDEN);
5510      /* We give UNSIGNEDP = 0 to expand_binop
5511	 because the only operations we are expanding here are signed ones.  */
5512    }
5513
5514#ifdef INSN_SCHEDULING
5515  /* On machines that have insn scheduling, we want all memory reference to be
5516     explicit, so we need to deal with such paradoxical SUBREGs.  */
5517  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5518      && (GET_MODE_SIZE (GET_MODE (value))
5519	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5520    value
5521      = simplify_gen_subreg (GET_MODE (value),
5522			     force_reg (GET_MODE (SUBREG_REG (value)),
5523					force_operand (SUBREG_REG (value),
5524						       NULL_RTX)),
5525			     GET_MODE (SUBREG_REG (value)),
5526			     SUBREG_BYTE (value));
5527#endif
5528
5529  return value;
5530}
5531
5532/* Subroutine of expand_expr: return nonzero iff there is no way that
5533   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5534   call is going to be used to determine whether we need a temporary
5535   for EXP, as opposed to a recursive call to this function.
5536
5537   It is always safe for this routine to return zero since it merely
5538   searches for optimization opportunities.  */
5539
5540int
5541safe_from_p (x, exp, top_p)
5542     rtx x;
5543     tree exp;
5544     int top_p;
5545{
5546  rtx exp_rtl = 0;
5547  int i, nops;
5548  static tree save_expr_list;
5549
5550  if (x == 0
5551      /* If EXP has varying size, we MUST use a target since we currently
5552	 have no way of allocating temporaries of variable size
5553	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5554	 So we assume here that something at a higher level has prevented a
5555	 clash.  This is somewhat bogus, but the best we can do.  Only
5556	 do this when X is BLKmode and when we are at the top level.  */
5557      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5558	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5559	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5560	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5561	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5562	      != INTEGER_CST)
5563	  && GET_MODE (x) == BLKmode)
5564      /* If X is in the outgoing argument area, it is always safe.  */
5565      || (GET_CODE (x) == MEM
5566	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
5567	      || (GET_CODE (XEXP (x, 0)) == PLUS
5568		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5569    return 1;
5570
5571  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5572     find the underlying pseudo.  */
5573  if (GET_CODE (x) == SUBREG)
5574    {
5575      x = SUBREG_REG (x);
5576      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5577	return 0;
5578    }
5579
5580  /* A SAVE_EXPR might appear many times in the expression passed to the
5581     top-level safe_from_p call, and if it has a complex subexpression,
5582     examining it multiple times could result in a combinatorial explosion.
5583     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5584     with optimization took about 28 minutes to compile -- even though it was
5585     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
5586     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
5587     we have processed.  Note that the only test of top_p was above.  */
5588
5589  if (top_p)
5590    {
5591      int rtn;
5592      tree t;
5593
5594      save_expr_list = 0;
5595
5596      rtn = safe_from_p (x, exp, 0);
5597
5598      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5599	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5600
5601      return rtn;
5602    }
5603
5604  /* Now look at our tree code and possibly recurse.  */
5605  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5606    {
5607    case 'd':
5608      exp_rtl = DECL_RTL_IF_SET (exp);
5609      break;
5610
5611    case 'c':
5612      return 1;
5613
5614    case 'x':
5615      if (TREE_CODE (exp) == TREE_LIST)
5616	return ((TREE_VALUE (exp) == 0
5617		 || safe_from_p (x, TREE_VALUE (exp), 0))
5618		&& (TREE_CHAIN (exp) == 0
5619		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5620      else if (TREE_CODE (exp) == ERROR_MARK)
5621	return 1;	/* An already-visited SAVE_EXPR? */
5622      else
5623	return 0;
5624
5625    case '1':
5626      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5627
5628    case '2':
5629    case '<':
5630      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5631	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5632
5633    case 'e':
5634    case 'r':
5635      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5636	 the expression.  If it is set, we conflict iff we are that rtx or
5637	 both are in memory.  Otherwise, we check all operands of the
5638	 expression recursively.  */
5639
5640      switch (TREE_CODE (exp))
5641	{
5642	case ADDR_EXPR:
5643	  /* If the operand is static or we are static, we can't conflict.
5644	     Likewise if we don't conflict with the operand at all.  */
5645	  if (staticp (TREE_OPERAND (exp, 0))
5646	      || TREE_STATIC (exp)
5647	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5648	    return 1;
5649
5650	  /* Otherwise, the only way this can conflict is if we are taking
5651	     the address of a DECL a that address if part of X, which is
5652	     very rare.  */
5653	  exp = TREE_OPERAND (exp, 0);
5654	  if (DECL_P (exp))
5655	    {
5656	      if (!DECL_RTL_SET_P (exp)
5657		  || GET_CODE (DECL_RTL (exp)) != MEM)
5658		return 0;
5659	      else
5660		exp_rtl = XEXP (DECL_RTL (exp), 0);
5661	    }
5662	  break;
5663
5664	case INDIRECT_REF:
5665	  if (GET_CODE (x) == MEM
5666	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5667					get_alias_set (exp)))
5668	    return 0;
5669	  break;
5670
5671	case CALL_EXPR:
5672	  /* Assume that the call will clobber all hard registers and
5673	     all of memory.  */
5674	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5675	      || GET_CODE (x) == MEM)
5676	    return 0;
5677	  break;
5678
5679	case RTL_EXPR:
5680	  /* If a sequence exists, we would have to scan every instruction
5681	     in the sequence to see if it was safe.  This is probably not
5682	     worthwhile.  */
5683	  if (RTL_EXPR_SEQUENCE (exp))
5684	    return 0;
5685
5686	  exp_rtl = RTL_EXPR_RTL (exp);
5687	  break;
5688
5689	case WITH_CLEANUP_EXPR:
5690	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5691	  break;
5692
5693	case CLEANUP_POINT_EXPR:
5694	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5695
5696	case SAVE_EXPR:
5697	  exp_rtl = SAVE_EXPR_RTL (exp);
5698	  if (exp_rtl)
5699	    break;
5700
5701	  /* If we've already scanned this, don't do it again.  Otherwise,
5702	     show we've scanned it and record for clearing the flag if we're
5703	     going on.  */
5704	  if (TREE_PRIVATE (exp))
5705	    return 1;
5706
5707	  TREE_PRIVATE (exp) = 1;
5708	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5709	    {
5710	      TREE_PRIVATE (exp) = 0;
5711	      return 0;
5712	    }
5713
5714	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5715	  return 1;
5716
5717	case BIND_EXPR:
5718	  /* The only operand we look at is operand 1.  The rest aren't
5719	     part of the expression.  */
5720	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5721
5722	case METHOD_CALL_EXPR:
5723	  /* This takes an rtx argument, but shouldn't appear here.  */
5724	  abort ();
5725
5726	default:
5727	  break;
5728	}
5729
5730      /* If we have an rtx, we do not need to scan our operands.  */
5731      if (exp_rtl)
5732	break;
5733
5734      nops = first_rtl_op (TREE_CODE (exp));
5735      for (i = 0; i < nops; i++)
5736	if (TREE_OPERAND (exp, i) != 0
5737	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5738	  return 0;
5739
5740      /* If this is a language-specific tree code, it may require
5741	 special handling.  */
5742      if ((unsigned int) TREE_CODE (exp)
5743	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5744	  && !(*lang_hooks.safe_from_p) (x, exp))
5745	return 0;
5746    }
5747
5748  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5749     with it.  */
5750  if (exp_rtl)
5751    {
5752      if (GET_CODE (exp_rtl) == SUBREG)
5753	{
5754	  exp_rtl = SUBREG_REG (exp_rtl);
5755	  if (GET_CODE (exp_rtl) == REG
5756	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5757	    return 0;
5758	}
5759
5760      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5761	 are memory and they conflict.  */
5762      return ! (rtx_equal_p (x, exp_rtl)
5763		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5764		    && true_dependence (exp_rtl, VOIDmode, x,
5765					rtx_addr_varies_p)));
5766    }
5767
5768  /* If we reach here, it is safe.  */
5769  return 1;
5770}
5771
5772/* Subroutine of expand_expr: return rtx if EXP is a
5773   variable or parameter; else return 0.  */
5774
5775static rtx
5776var_rtx (exp)
5777     tree exp;
5778{
5779  STRIP_NOPS (exp);
5780  switch (TREE_CODE (exp))
5781    {
5782    case PARM_DECL:
5783    case VAR_DECL:
5784      return DECL_RTL (exp);
5785    default:
5786      return 0;
5787    }
5788}
5789
5790#ifdef MAX_INTEGER_COMPUTATION_MODE
5791
5792void
5793check_max_integer_computation_mode (exp)
5794     tree exp;
5795{
5796  enum tree_code code;
5797  enum machine_mode mode;
5798
5799  /* Strip any NOPs that don't change the mode.  */
5800  STRIP_NOPS (exp);
5801  code = TREE_CODE (exp);
5802
5803  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5804  if (code == NOP_EXPR
5805      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5806    return;
5807
5808  /* First check the type of the overall operation.   We need only look at
5809     unary, binary and relational operations.  */
5810  if (TREE_CODE_CLASS (code) == '1'
5811      || TREE_CODE_CLASS (code) == '2'
5812      || TREE_CODE_CLASS (code) == '<')
5813    {
5814      mode = TYPE_MODE (TREE_TYPE (exp));
5815      if (GET_MODE_CLASS (mode) == MODE_INT
5816	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5817	internal_error ("unsupported wide integer operation");
5818    }
5819
5820  /* Check operand of a unary op.  */
5821  if (TREE_CODE_CLASS (code) == '1')
5822    {
5823      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5824      if (GET_MODE_CLASS (mode) == MODE_INT
5825	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5826	internal_error ("unsupported wide integer operation");
5827    }
5828
5829  /* Check operands of a binary/comparison op.  */
5830  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5831    {
5832      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5833      if (GET_MODE_CLASS (mode) == MODE_INT
5834	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5835	internal_error ("unsupported wide integer operation");
5836
5837      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5838      if (GET_MODE_CLASS (mode) == MODE_INT
5839	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5840	internal_error ("unsupported wide integer operation");
5841    }
5842}
5843#endif
5844
5845/* Return the highest power of two that EXP is known to be a multiple of.
5846   This is used in updating alignment of MEMs in array references.  */
5847
5848static HOST_WIDE_INT
5849highest_pow2_factor (exp)
5850     tree exp;
5851{
5852  HOST_WIDE_INT c0, c1;
5853
5854  switch (TREE_CODE (exp))
5855    {
5856    case INTEGER_CST:
5857      /* We can find the lowest bit that's a one.  If the low
5858	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5859	 We need to handle this case since we can find it in a COND_EXPR,
5860	 a MIN_EXPR, or a MAX_EXPR.  If the constant overlows, we have an
5861	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5862	 later ICE.  */
5863      if (TREE_CONSTANT_OVERFLOW (exp))
5864	return BIGGEST_ALIGNMENT;
5865      else
5866	{
5867	  /* Note: tree_low_cst is intentionally not used here,
5868	     we don't care about the upper bits.  */
5869	  c0 = TREE_INT_CST_LOW (exp);
5870	  c0 &= -c0;
5871	  return c0 ? c0 : BIGGEST_ALIGNMENT;
5872	}
5873      break;
5874
5875    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
5876      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5877      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5878      return MIN (c0, c1);
5879
5880    case MULT_EXPR:
5881      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5882      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5883      return c0 * c1;
5884
5885    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
5886    case CEIL_DIV_EXPR:
5887      if (integer_pow2p (TREE_OPERAND (exp, 1))
5888	  && host_integerp (TREE_OPERAND (exp, 1), 1))
5889	{
5890	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5891	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5892	  return MAX (1, c0 / c1);
5893	}
5894      break;
5895
5896    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
5897    case SAVE_EXPR: case WITH_RECORD_EXPR:
5898      return highest_pow2_factor (TREE_OPERAND (exp, 0));
5899
5900    case COMPOUND_EXPR:
5901      return highest_pow2_factor (TREE_OPERAND (exp, 1));
5902
5903    case COND_EXPR:
5904      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5905      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5906      return MIN (c0, c1);
5907
5908    default:
5909      break;
5910    }
5911
5912  return 1;
5913}
5914
5915/* Similar, except that it is known that the expression must be a multiple
5916   of the alignment of TYPE.  */
5917
5918static HOST_WIDE_INT
5919highest_pow2_factor_for_type (type, exp)
5920     tree type;
5921     tree exp;
5922{
5923  HOST_WIDE_INT type_align, factor;
5924
5925  factor = highest_pow2_factor (exp);
5926  type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
5927  return MAX (factor, type_align);
5928}
5929
5930/* Return an object on the placeholder list that matches EXP, a
5931   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
5932   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
5933   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
5934   is a location which initially points to a starting location in the
5935   placeholder list (zero means start of the list) and where a pointer into
5936   the placeholder list at which the object is found is placed.  */
5937
5938tree
5939find_placeholder (exp, plist)
5940     tree exp;
5941     tree *plist;
5942{
5943  tree type = TREE_TYPE (exp);
5944  tree placeholder_expr;
5945
5946  for (placeholder_expr
5947       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5948       placeholder_expr != 0;
5949       placeholder_expr = TREE_CHAIN (placeholder_expr))
5950    {
5951      tree need_type = TYPE_MAIN_VARIANT (type);
5952      tree elt;
5953
5954      /* Find the outermost reference that is of the type we want.  If none,
5955	 see if any object has a type that is a pointer to the type we
5956	 want.  */
5957      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5958	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5959		   || TREE_CODE (elt) == COND_EXPR)
5960		  ? TREE_OPERAND (elt, 1)
5961		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5962		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5963		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5964		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5965		  ? TREE_OPERAND (elt, 0) : 0))
5966	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5967	  {
5968	    if (plist)
5969	      *plist = placeholder_expr;
5970	    return elt;
5971	  }
5972
5973      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5974	   elt
5975	   = ((TREE_CODE (elt) == COMPOUND_EXPR
5976	       || TREE_CODE (elt) == COND_EXPR)
5977	      ? TREE_OPERAND (elt, 1)
5978	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5979		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5980		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5981		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5982	      ? TREE_OPERAND (elt, 0) : 0))
5983	if (POINTER_TYPE_P (TREE_TYPE (elt))
5984	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5985		== need_type))
5986	  {
5987	    if (plist)
5988	      *plist = placeholder_expr;
5989	    return build1 (INDIRECT_REF, need_type, elt);
5990	  }
5991    }
5992
5993  return 0;
5994}
5995
5996/* expand_expr: generate code for computing expression EXP.
5997   An rtx for the computed value is returned.  The value is never null.
5998   In the case of a void EXP, const0_rtx is returned.
5999
6000   The value may be stored in TARGET if TARGET is nonzero.
6001   TARGET is just a suggestion; callers must assume that
6002   the rtx returned may not be the same as TARGET.
6003
6004   If TARGET is CONST0_RTX, it means that the value will be ignored.
6005
6006   If TMODE is not VOIDmode, it suggests generating the
6007   result in mode TMODE.  But this is done only when convenient.
6008   Otherwise, TMODE is ignored and the value generated in its natural mode.
6009   TMODE is just a suggestion; callers must assume that
6010   the rtx returned may not have mode TMODE.
6011
6012   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6013   probably will not be used.
6014
6015   If MODIFIER is EXPAND_SUM then when EXP is an addition
6016   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6017   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6018   products as above, or REG or MEM, or constant.
6019   Ordinarily in such cases we would output mul or add instructions
6020   and then return a pseudo reg containing the sum.
6021
6022   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6023   it also marks a label as absolutely required (it can't be dead).
6024   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6025   This is used for outputting expressions used in initializers.
6026
6027   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6028   with a constant address even if that address is not normally legitimate.
6029   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
6030
6031rtx
6032expand_expr (exp, target, tmode, modifier)
6033     tree exp;
6034     rtx target;
6035     enum machine_mode tmode;
6036     enum expand_modifier modifier;
6037{
6038  rtx op0, op1, temp;
6039  tree type = TREE_TYPE (exp);
6040  int unsignedp = TREE_UNSIGNED (type);
6041  enum machine_mode mode;
6042  enum tree_code code = TREE_CODE (exp);
6043  optab this_optab;
6044  rtx subtarget, original_target;
6045  int ignore;
6046  tree context;
6047
6048  /* Handle ERROR_MARK before anybody tries to access its type.  */
6049  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6050    {
6051      op0 = CONST0_RTX (tmode);
6052      if (op0 != 0)
6053	return op0;
6054      return const0_rtx;
6055    }
6056
6057  mode = TYPE_MODE (type);
6058  /* Use subtarget as the target for operand 0 of a binary operation.  */
6059  subtarget = get_subtarget (target);
6060  original_target = target;
6061  ignore = (target == const0_rtx
6062	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6063		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6064		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6065		&& TREE_CODE (type) == VOID_TYPE));
6066
6067  /* If we are going to ignore this result, we need only do something
6068     if there is a side-effect somewhere in the expression.  If there
6069     is, short-circuit the most common cases here.  Note that we must
6070     not call expand_expr with anything but const0_rtx in case this
6071     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6072
6073  if (ignore)
6074    {
6075      if (! TREE_SIDE_EFFECTS (exp))
6076	return const0_rtx;
6077
6078      /* Ensure we reference a volatile object even if value is ignored, but
6079	 don't do this if all we are doing is taking its address.  */
6080      if (TREE_THIS_VOLATILE (exp)
6081	  && TREE_CODE (exp) != FUNCTION_DECL
6082	  && mode != VOIDmode && mode != BLKmode
6083	  && modifier != EXPAND_CONST_ADDRESS)
6084	{
6085	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6086	  if (GET_CODE (temp) == MEM)
6087	    temp = copy_to_reg (temp);
6088	  return const0_rtx;
6089	}
6090
6091      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6092	  || code == INDIRECT_REF || code == BUFFER_REF)
6093	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6094			    modifier);
6095
6096      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6097	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6098	{
6099	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6100	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6101	  return const0_rtx;
6102	}
6103      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6104	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6105	/* If the second operand has no side effects, just evaluate
6106	   the first.  */
6107	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6108			    modifier);
6109      else if (code == BIT_FIELD_REF)
6110	{
6111	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6112	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6113	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6114	  return const0_rtx;
6115	}
6116
6117      target = 0;
6118    }
6119
6120#ifdef MAX_INTEGER_COMPUTATION_MODE
6121  /* Only check stuff here if the mode we want is different from the mode
6122     of the expression; if it's the same, check_max_integer_computiation_mode
6123     will handle it.  Do we really need to check this stuff at all?  */
6124
6125  if (target
6126      && GET_MODE (target) != mode
6127      && TREE_CODE (exp) != INTEGER_CST
6128      && TREE_CODE (exp) != PARM_DECL
6129      && TREE_CODE (exp) != ARRAY_REF
6130      && TREE_CODE (exp) != ARRAY_RANGE_REF
6131      && TREE_CODE (exp) != COMPONENT_REF
6132      && TREE_CODE (exp) != BIT_FIELD_REF
6133      && TREE_CODE (exp) != INDIRECT_REF
6134      && TREE_CODE (exp) != CALL_EXPR
6135      && TREE_CODE (exp) != VAR_DECL
6136      && TREE_CODE (exp) != RTL_EXPR)
6137    {
6138      enum machine_mode mode = GET_MODE (target);
6139
6140      if (GET_MODE_CLASS (mode) == MODE_INT
6141	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6142	internal_error ("unsupported wide integer operation");
6143    }
6144
6145  if (tmode != mode
6146      && TREE_CODE (exp) != INTEGER_CST
6147      && TREE_CODE (exp) != PARM_DECL
6148      && TREE_CODE (exp) != ARRAY_REF
6149      && TREE_CODE (exp) != ARRAY_RANGE_REF
6150      && TREE_CODE (exp) != COMPONENT_REF
6151      && TREE_CODE (exp) != BIT_FIELD_REF
6152      && TREE_CODE (exp) != INDIRECT_REF
6153      && TREE_CODE (exp) != VAR_DECL
6154      && TREE_CODE (exp) != CALL_EXPR
6155      && TREE_CODE (exp) != RTL_EXPR
6156      && GET_MODE_CLASS (tmode) == MODE_INT
6157      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6158    internal_error ("unsupported wide integer operation");
6159
6160  check_max_integer_computation_mode (exp);
6161#endif
6162
6163  /* If will do cse, generate all results into pseudo registers
6164     since 1) that allows cse to find more things
6165     and 2) otherwise cse could produce an insn the machine
6166     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6167     MEM: that's much more likely to be most efficient into the MEM.  */
6168
6169  if (! cse_not_expected && mode != BLKmode && target
6170      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6171      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6172    target = subtarget;
6173
6174  switch (code)
6175    {
6176    case LABEL_DECL:
6177      {
6178	tree function = decl_function_context (exp);
6179	/* Handle using a label in a containing function.  */
6180	if (function != current_function_decl
6181	    && function != inline_function_decl && function != 0)
6182	  {
6183	    struct function *p = find_function_data (function);
6184	    p->expr->x_forced_labels
6185	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6186				   p->expr->x_forced_labels);
6187	  }
6188	else
6189	  {
6190	    if (modifier == EXPAND_INITIALIZER)
6191	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6192						 label_rtx (exp),
6193						 forced_labels);
6194	  }
6195
6196	temp = gen_rtx_MEM (FUNCTION_MODE,
6197			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6198	if (function != current_function_decl
6199	    && function != inline_function_decl && function != 0)
6200	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6201	return temp;
6202      }
6203
6204    case PARM_DECL:
6205      if (DECL_RTL (exp) == 0)
6206	{
6207	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6208	  return CONST0_RTX (mode);
6209	}
6210
6211      /* ... fall through ...  */
6212
6213    case VAR_DECL:
6214      /* If a static var's type was incomplete when the decl was written,
6215	 but the type is complete now, lay out the decl now.  */
6216      if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6217	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6218	{
6219	  rtx value = DECL_RTL_IF_SET (exp);
6220
6221	  layout_decl (exp, 0);
6222
6223	  /* If the RTL was already set, update its mode and memory
6224	     attributes.  */
6225	  if (value != 0)
6226	    {
6227	      PUT_MODE (value, DECL_MODE (exp));
6228	      SET_DECL_RTL (exp, 0);
6229	      set_mem_attributes (value, exp, 1);
6230	      SET_DECL_RTL (exp, value);
6231	    }
6232	}
6233
6234      /* ... fall through ...  */
6235
6236    case FUNCTION_DECL:
6237    case RESULT_DECL:
6238      if (DECL_RTL (exp) == 0)
6239	abort ();
6240
6241      /* Ensure variable marked as used even if it doesn't go through
6242	 a parser.  If it hasn't be used yet, write out an external
6243	 definition.  */
6244      if (! TREE_USED (exp))
6245	{
6246	  assemble_external (exp);
6247	  TREE_USED (exp) = 1;
6248	}
6249
6250      /* Show we haven't gotten RTL for this yet.  */
6251      temp = 0;
6252
6253      /* Handle variables inherited from containing functions.  */
6254      context = decl_function_context (exp);
6255
6256      /* We treat inline_function_decl as an alias for the current function
6257	 because that is the inline function whose vars, types, etc.
6258	 are being merged into the current function.
6259	 See expand_inline_function.  */
6260
6261      if (context != 0 && context != current_function_decl
6262	  && context != inline_function_decl
6263	  /* If var is static, we don't need a static chain to access it.  */
6264	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6265		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6266	{
6267	  rtx addr;
6268
6269	  /* Mark as non-local and addressable.  */
6270	  DECL_NONLOCAL (exp) = 1;
6271	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6272	    abort ();
6273	  mark_addressable (exp);
6274	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6275	    abort ();
6276	  addr = XEXP (DECL_RTL (exp), 0);
6277	  if (GET_CODE (addr) == MEM)
6278	    addr
6279	      = replace_equiv_address (addr,
6280				       fix_lexical_addr (XEXP (addr, 0), exp));
6281	  else
6282	    addr = fix_lexical_addr (addr, exp);
6283
6284	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6285	}
6286
6287      /* This is the case of an array whose size is to be determined
6288	 from its initializer, while the initializer is still being parsed.
6289	 See expand_decl.  */
6290
6291      else if (GET_CODE (DECL_RTL (exp)) == MEM
6292	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6293	temp = validize_mem (DECL_RTL (exp));
6294
6295      /* If DECL_RTL is memory, we are in the normal case and either
6296	 the address is not valid or it is not a register and -fforce-addr
6297	 is specified, get the address into a register.  */
6298
6299      else if (GET_CODE (DECL_RTL (exp)) == MEM
6300	       && modifier != EXPAND_CONST_ADDRESS
6301	       && modifier != EXPAND_SUM
6302	       && modifier != EXPAND_INITIALIZER
6303	       && (! memory_address_p (DECL_MODE (exp),
6304				       XEXP (DECL_RTL (exp), 0))
6305		   || (flag_force_addr
6306		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6307	temp = replace_equiv_address (DECL_RTL (exp),
6308				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6309
6310      /* If we got something, return it.  But first, set the alignment
6311	 if the address is a register.  */
6312      if (temp != 0)
6313	{
6314	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6315	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6316
6317	  return temp;
6318	}
6319
6320      /* If the mode of DECL_RTL does not match that of the decl, it
6321	 must be a promoted value.  We return a SUBREG of the wanted mode,
6322	 but mark it so that we know that it was already extended.  */
6323
6324      if (GET_CODE (DECL_RTL (exp)) == REG
6325	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6326	{
6327	  /* Get the signedness used for this variable.  Ensure we get the
6328	     same mode we got when the variable was declared.  */
6329	  if (GET_MODE (DECL_RTL (exp))
6330	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6331			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6332	    abort ();
6333
6334	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6335	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6336	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6337	  return temp;
6338	}
6339
6340      return DECL_RTL (exp);
6341
6342    case INTEGER_CST:
6343      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6344				 TREE_INT_CST_HIGH (exp), mode);
6345
6346      /* ??? If overflow is set, fold will have done an incomplete job,
6347	 which can result in (plus xx (const_int 0)), which can get
6348	 simplified by validate_replace_rtx during virtual register
6349	 instantiation, which can result in unrecognizable insns.
6350	 Avoid this by forcing all overflows into registers.  */
6351      if (TREE_CONSTANT_OVERFLOW (exp)
6352	  && modifier != EXPAND_INITIALIZER)
6353	temp = force_reg (mode, temp);
6354
6355      return temp;
6356
6357    case CONST_DECL:
6358      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6359
6360    case REAL_CST:
6361      /* If optimized, generate immediate CONST_DOUBLE
6362	 which will be turned into memory by reload if necessary.
6363
6364	 We used to force a register so that loop.c could see it.  But
6365	 this does not allow gen_* patterns to perform optimizations with
6366	 the constants.  It also produces two insns in cases like "x = 1.0;".
6367	 On most machines, floating-point constants are not permitted in
6368	 many insns, so we'd end up copying it to a register in any case.
6369
6370	 Now, we do the copying in expand_binop, if appropriate.  */
6371      return immed_real_const (exp);
6372
6373    case COMPLEX_CST:
6374    case STRING_CST:
6375      if (! TREE_CST_RTL (exp))
6376	output_constant_def (exp, 1);
6377
6378      /* TREE_CST_RTL probably contains a constant address.
6379	 On RISC machines where a constant address isn't valid,
6380	 make some insns to get that address into a register.  */
6381      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6382	  && modifier != EXPAND_CONST_ADDRESS
6383	  && modifier != EXPAND_INITIALIZER
6384	  && modifier != EXPAND_SUM
6385	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6386	      || (flag_force_addr
6387		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6388	return replace_equiv_address (TREE_CST_RTL (exp),
6389				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6390      return TREE_CST_RTL (exp);
6391
6392    case EXPR_WITH_FILE_LOCATION:
6393      {
6394	rtx to_return;
6395	const char *saved_input_filename = input_filename;
6396	int saved_lineno = lineno;
6397	input_filename = EXPR_WFL_FILENAME (exp);
6398	lineno = EXPR_WFL_LINENO (exp);
6399	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6400	  emit_line_note (input_filename, lineno);
6401	/* Possibly avoid switching back and forth here.  */
6402	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6403	input_filename = saved_input_filename;
6404	lineno = saved_lineno;
6405	return to_return;
6406      }
6407
6408    case SAVE_EXPR:
6409      context = decl_function_context (exp);
6410
6411      /* If this SAVE_EXPR was at global context, assume we are an
6412	 initialization function and move it into our context.  */
6413      if (context == 0)
6414	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6415
6416      /* We treat inline_function_decl as an alias for the current function
6417	 because that is the inline function whose vars, types, etc.
6418	 are being merged into the current function.
6419	 See expand_inline_function.  */
6420      if (context == current_function_decl || context == inline_function_decl)
6421	context = 0;
6422
6423      /* If this is non-local, handle it.  */
6424      if (context)
6425	{
6426	  /* The following call just exists to abort if the context is
6427	     not of a containing function.  */
6428	  find_function_data (context);
6429
6430	  temp = SAVE_EXPR_RTL (exp);
6431	  if (temp && GET_CODE (temp) == REG)
6432	    {
6433	      put_var_into_stack (exp);
6434	      temp = SAVE_EXPR_RTL (exp);
6435	    }
6436	  if (temp == 0 || GET_CODE (temp) != MEM)
6437	    abort ();
6438	  return
6439	    replace_equiv_address (temp,
6440				   fix_lexical_addr (XEXP (temp, 0), exp));
6441	}
6442      if (SAVE_EXPR_RTL (exp) == 0)
6443	{
6444	  if (mode == VOIDmode)
6445	    temp = const0_rtx;
6446	  else
6447	    temp = assign_temp (build_qualified_type (type,
6448						      (TYPE_QUALS (type)
6449						       | TYPE_QUAL_CONST)),
6450				3, 0, 0);
6451
6452	  SAVE_EXPR_RTL (exp) = temp;
6453	  if (!optimize && GET_CODE (temp) == REG)
6454	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6455						save_expr_regs);
6456
6457	  /* If the mode of TEMP does not match that of the expression, it
6458	     must be a promoted value.  We pass store_expr a SUBREG of the
6459	     wanted mode but mark it so that we know that it was already
6460	     extended.  Note that `unsignedp' was modified above in
6461	     this case.  */
6462
6463	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6464	    {
6465	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6466	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6467	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6468	    }
6469
6470	  if (temp == const0_rtx)
6471	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6472	  else
6473	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6474
6475	  TREE_USED (exp) = 1;
6476	}
6477
6478      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6479	 must be a promoted value.  We return a SUBREG of the wanted mode,
6480	 but mark it so that we know that it was already extended.  */
6481
6482      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6483	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6484	{
6485	  /* Compute the signedness and make the proper SUBREG.  */
6486	  promote_mode (type, mode, &unsignedp, 0);
6487	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6488	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6489	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6490	  return temp;
6491	}
6492
6493      return SAVE_EXPR_RTL (exp);
6494
6495    case UNSAVE_EXPR:
6496      {
6497	rtx temp;
6498	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6499	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6500	return temp;
6501      }
6502
6503    case PLACEHOLDER_EXPR:
6504      {
6505	tree old_list = placeholder_list;
6506	tree placeholder_expr = 0;
6507
6508	exp = find_placeholder (exp, &placeholder_expr);
6509	if (exp == 0)
6510	  abort ();
6511
6512	placeholder_list = TREE_CHAIN (placeholder_expr);
6513	temp = expand_expr (exp, original_target, tmode, modifier);
6514	placeholder_list = old_list;
6515	return temp;
6516      }
6517
6518      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6519      abort ();
6520
6521    case WITH_RECORD_EXPR:
6522      /* Put the object on the placeholder list, expand our first operand,
6523	 and pop the list.  */
6524      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6525				    placeholder_list);
6526      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6527			    modifier);
6528      placeholder_list = TREE_CHAIN (placeholder_list);
6529      return target;
6530
6531    case GOTO_EXPR:
6532      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6533	expand_goto (TREE_OPERAND (exp, 0));
6534      else
6535	expand_computed_goto (TREE_OPERAND (exp, 0));
6536      return const0_rtx;
6537
6538    case EXIT_EXPR:
6539      expand_exit_loop_if_false (NULL,
6540				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6541      return const0_rtx;
6542
6543    case LABELED_BLOCK_EXPR:
6544      if (LABELED_BLOCK_BODY (exp))
6545	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6546      /* Should perhaps use expand_label, but this is simpler and safer.  */
6547      do_pending_stack_adjust ();
6548      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6549      return const0_rtx;
6550
6551    case EXIT_BLOCK_EXPR:
6552      if (EXIT_BLOCK_RETURN (exp))
6553	sorry ("returned value in block_exit_expr");
6554      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6555      return const0_rtx;
6556
6557    case LOOP_EXPR:
6558      push_temp_slots ();
6559      expand_start_loop (1);
6560      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6561      expand_end_loop ();
6562      pop_temp_slots ();
6563
6564      return const0_rtx;
6565
6566    case BIND_EXPR:
6567      {
6568	tree vars = TREE_OPERAND (exp, 0);
6569	int vars_need_expansion = 0;
6570
6571	/* Need to open a binding contour here because
6572	   if there are any cleanups they must be contained here.  */
6573	expand_start_bindings (2);
6574
6575	/* Mark the corresponding BLOCK for output in its proper place.  */
6576	if (TREE_OPERAND (exp, 2) != 0
6577	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6578	  insert_block (TREE_OPERAND (exp, 2));
6579
6580	/* If VARS have not yet been expanded, expand them now.  */
6581	while (vars)
6582	  {
6583	    if (!DECL_RTL_SET_P (vars))
6584	      {
6585		vars_need_expansion = 1;
6586		expand_decl (vars);
6587	      }
6588	    expand_decl_init (vars);
6589	    vars = TREE_CHAIN (vars);
6590	  }
6591
6592	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6593
6594	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6595
6596	return temp;
6597      }
6598
6599    case RTL_EXPR:
6600      if (RTL_EXPR_SEQUENCE (exp))
6601	{
6602	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6603	    abort ();
6604	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6605	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6606	}
6607      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6608      free_temps_for_rtl_expr (exp);
6609      return RTL_EXPR_RTL (exp);
6610
6611    case CONSTRUCTOR:
6612      /* If we don't need the result, just ensure we evaluate any
6613	 subexpressions.  */
6614      if (ignore)
6615	{
6616	  tree elt;
6617
6618	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6619	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6620
6621	  return const0_rtx;
6622	}
6623
6624      /* All elts simple constants => refer to a constant in memory.  But
6625	 if this is a non-BLKmode mode, let it store a field at a time
6626	 since that should make a CONST_INT or CONST_DOUBLE when we
6627	 fold.  Likewise, if we have a target we can use, it is best to
6628	 store directly into the target unless the type is large enough
6629	 that memcpy will be used.  If we are making an initializer and
6630	 all operands are constant, put it in memory as well.  */
6631      else if ((TREE_STATIC (exp)
6632		&& ((mode == BLKmode
6633		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6634		    || TREE_ADDRESSABLE (exp)
6635		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6636			&& (! MOVE_BY_PIECES_P
6637			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6638			     TYPE_ALIGN (type)))
6639			&& ! mostly_zeros_p (exp))))
6640	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6641	{
6642	  rtx constructor = output_constant_def (exp, 1);
6643
6644	  if (modifier != EXPAND_CONST_ADDRESS
6645	      && modifier != EXPAND_INITIALIZER
6646	      && modifier != EXPAND_SUM)
6647	    constructor = validize_mem (constructor);
6648
6649	  return constructor;
6650	}
6651      else
6652	{
6653	  /* Handle calls that pass values in multiple non-contiguous
6654	     locations.  The Irix 6 ABI has examples of this.  */
6655	  if (target == 0 || ! safe_from_p (target, exp, 1)
6656	      || GET_CODE (target) == PARALLEL)
6657	    target
6658	      = assign_temp (build_qualified_type (type,
6659						   (TYPE_QUALS (type)
6660						    | (TREE_READONLY (exp)
6661						       * TYPE_QUAL_CONST))),
6662			     0, TREE_ADDRESSABLE (exp), 1);
6663
6664	  store_constructor (exp, target, 0, int_expr_size (exp));
6665	  return target;
6666	}
6667
6668    case INDIRECT_REF:
6669      {
6670	tree exp1 = TREE_OPERAND (exp, 0);
6671	tree index;
6672	tree string = string_constant (exp1, &index);
6673
6674	/* Try to optimize reads from const strings.  */
6675 	if (string
6676 	    && TREE_CODE (string) == STRING_CST
6677 	    && TREE_CODE (index) == INTEGER_CST
6678	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6679 	    && GET_MODE_CLASS (mode) == MODE_INT
6680 	    && GET_MODE_SIZE (mode) == 1
6681	    && modifier != EXPAND_WRITE)
6682 	  return
6683	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (string)
6684					 [TREE_INT_CST_LOW (index)], mode));
6685
6686	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6687	op0 = memory_address (mode, op0);
6688	temp = gen_rtx_MEM (mode, op0);
6689	set_mem_attributes (temp, exp, 0);
6690
6691	/* If we are writing to this object and its type is a record with
6692	   readonly fields, we must mark it as readonly so it will
6693	   conflict with readonly references to those fields.  */
6694	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6695	  RTX_UNCHANGING_P (temp) = 1;
6696
6697	return temp;
6698      }
6699
6700    case ARRAY_REF:
6701      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6702	abort ();
6703
6704      {
6705	tree array = TREE_OPERAND (exp, 0);
6706	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6707	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6708	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6709	HOST_WIDE_INT i;
6710
6711	/* Optimize the special-case of a zero lower bound.
6712
6713	   We convert the low_bound to sizetype to avoid some problems
6714	   with constant folding.  (E.g. suppose the lower bound is 1,
6715	   and its mode is QI.  Without the conversion,  (ARRAY
6716	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6717	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
6718
6719	if (! integer_zerop (low_bound))
6720	  index = size_diffop (index, convert (sizetype, low_bound));
6721
6722	/* Fold an expression like: "foo"[2].
6723	   This is not done in fold so it won't happen inside &.
6724	   Don't fold if this is for wide characters since it's too
6725	   difficult to do correctly and this is a very rare case.  */
6726
6727	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6728	    && TREE_CODE (array) == STRING_CST
6729	    && TREE_CODE (index) == INTEGER_CST
6730	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6731	    && GET_MODE_CLASS (mode) == MODE_INT
6732	    && GET_MODE_SIZE (mode) == 1)
6733	  return
6734	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (array)
6735					 [TREE_INT_CST_LOW (index)], mode));
6736
6737	/* If this is a constant index into a constant array,
6738	   just get the value from the array.  Handle both the cases when
6739	   we have an explicit constructor and when our operand is a variable
6740	   that was declared const.  */
6741
6742	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6743	    && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6744	    && TREE_CODE (index) == INTEGER_CST
6745	    && 0 > compare_tree_int (index,
6746				     list_length (CONSTRUCTOR_ELTS
6747						  (TREE_OPERAND (exp, 0)))))
6748	  {
6749	    tree elem;
6750
6751	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6752		 i = TREE_INT_CST_LOW (index);
6753		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6754	      ;
6755
6756	    if (elem)
6757	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6758				  modifier);
6759	  }
6760
6761	else if (optimize >= 1
6762		 && modifier != EXPAND_CONST_ADDRESS
6763		 && modifier != EXPAND_INITIALIZER
6764		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6765		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6766		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6767	  {
6768	    if (TREE_CODE (index) == INTEGER_CST)
6769	      {
6770		tree init = DECL_INITIAL (array);
6771
6772		if (TREE_CODE (init) == CONSTRUCTOR)
6773		  {
6774		    tree elem;
6775
6776		    for (elem = CONSTRUCTOR_ELTS (init);
6777			 (elem
6778			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6779			 elem = TREE_CHAIN (elem))
6780		      ;
6781
6782		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6783		      return expand_expr (fold (TREE_VALUE (elem)), target,
6784					  tmode, modifier);
6785		  }
6786		else if (TREE_CODE (init) == STRING_CST
6787			 && 0 > compare_tree_int (index,
6788						  TREE_STRING_LENGTH (init)))
6789		  {
6790		    tree type = TREE_TYPE (TREE_TYPE (init));
6791		    enum machine_mode mode = TYPE_MODE (type);
6792
6793		    if (GET_MODE_CLASS (mode) == MODE_INT
6794			&& GET_MODE_SIZE (mode) == 1)
6795		      return GEN_INT (trunc_int_for_mode
6796				      (TREE_STRING_POINTER (init)
6797				       [TREE_INT_CST_LOW (index)], mode));
6798		  }
6799	      }
6800	  }
6801      }
6802      /* Fall through.  */
6803
6804    case COMPONENT_REF:
6805    case BIT_FIELD_REF:
6806    case ARRAY_RANGE_REF:
6807      /* If the operand is a CONSTRUCTOR, we can just extract the
6808	 appropriate field if it is present.  Don't do this if we have
6809	 already written the data since we want to refer to that copy
6810	 and varasm.c assumes that's what we'll do.  */
6811      if (code == COMPONENT_REF
6812	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6813	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6814	{
6815	  tree elt;
6816
6817	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6818	       elt = TREE_CHAIN (elt))
6819	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6820		/* We can normally use the value of the field in the
6821		   CONSTRUCTOR.  However, if this is a bitfield in
6822		   an integral mode that we can fit in a HOST_WIDE_INT,
6823		   we must mask only the number of bits in the bitfield,
6824		   since this is done implicitly by the constructor.  If
6825		   the bitfield does not meet either of those conditions,
6826		   we can't do this optimization.  */
6827		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6828		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6829			 == MODE_INT)
6830			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6831			    <= HOST_BITS_PER_WIDE_INT))))
6832	      {
6833		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6834		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6835		  {
6836		    HOST_WIDE_INT bitsize
6837		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6838		    enum machine_mode imode
6839		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6840
6841		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6842		      {
6843			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6844			op0 = expand_and (imode, op0, op1, target);
6845		      }
6846		    else
6847		      {
6848			tree count
6849			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6850					 0);
6851
6852			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6853					    target, 0);
6854			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6855					    target, 0);
6856		      }
6857		  }
6858
6859		return op0;
6860	      }
6861	}
6862
6863      {
6864	enum machine_mode mode1;
6865	HOST_WIDE_INT bitsize, bitpos;
6866	tree offset;
6867	int volatilep = 0;
6868	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6869					&mode1, &unsignedp, &volatilep);
6870	rtx orig_op0;
6871
6872	/* If we got back the original object, something is wrong.  Perhaps
6873	   we are evaluating an expression too early.  In any event, don't
6874	   infinitely recurse.  */
6875	if (tem == exp)
6876	  abort ();
6877
6878	/* If TEM's type is a union of variable size, pass TARGET to the inner
6879	   computation, since it will need a temporary and TARGET is known
6880	   to have to do.  This occurs in unchecked conversion in Ada.  */
6881
6882	orig_op0 = op0
6883	  = expand_expr (tem,
6884			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6885			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6886			      != INTEGER_CST)
6887			  ? target : NULL_RTX),
6888			 VOIDmode,
6889			 (modifier == EXPAND_INITIALIZER
6890			  || modifier == EXPAND_CONST_ADDRESS)
6891			 ? modifier : EXPAND_NORMAL);
6892
6893	/* If this is a constant, put it into a register if it is a
6894	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
6895	if (CONSTANT_P (op0))
6896	  {
6897	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6898	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6899		&& offset == 0)
6900	      op0 = force_reg (mode, op0);
6901	    else
6902	      op0 = validize_mem (force_const_mem (mode, op0));
6903	  }
6904
6905	if (offset != 0)
6906	  {
6907	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6908
6909	    /* If this object is in a register, put it into memory.
6910	       This case can't occur in C, but can in Ada if we have
6911	       unchecked conversion of an expression from a scalar type to
6912	       an array or record type.  */
6913	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6914		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6915	      {
6916		/* If the operand is a SAVE_EXPR, we can deal with this by
6917		   forcing the SAVE_EXPR into memory.  */
6918		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6919		  {
6920		    put_var_into_stack (TREE_OPERAND (exp, 0));
6921		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6922		  }
6923		else
6924		  {
6925		    tree nt
6926		      = build_qualified_type (TREE_TYPE (tem),
6927					      (TYPE_QUALS (TREE_TYPE (tem))
6928					       | TYPE_QUAL_CONST));
6929		    rtx memloc = assign_temp (nt, 1, 1, 1);
6930
6931		    emit_move_insn (memloc, op0);
6932		    op0 = memloc;
6933		  }
6934	      }
6935
6936	    if (GET_CODE (op0) != MEM)
6937	      abort ();
6938
6939#ifdef POINTERS_EXTEND_UNSIGNED
6940	    if (GET_MODE (offset_rtx) != Pmode)
6941	      offset_rtx = convert_memory_address (Pmode, offset_rtx);
6942#else
6943	    if (GET_MODE (offset_rtx) != ptr_mode)
6944	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6945#endif
6946
6947	    /* A constant address in OP0 can have VOIDmode, we must not try
6948	       to call force_reg for that case.  Avoid that case.  */
6949	    if (GET_CODE (op0) == MEM
6950		&& GET_MODE (op0) == BLKmode
6951		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6952		&& bitsize != 0
6953		&& (bitpos % bitsize) == 0
6954		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6955		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6956	      {
6957		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6958		bitpos = 0;
6959	      }
6960
6961	    op0 = offset_address (op0, offset_rtx,
6962				  highest_pow2_factor (offset));
6963	  }
6964
6965	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6966	   record its alignment as BIGGEST_ALIGNMENT.  */
6967	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
6968	    && is_aligning_offset (offset, tem))
6969	  set_mem_align (op0, BIGGEST_ALIGNMENT);
6970
6971	/* Don't forget about volatility even if this is a bitfield.  */
6972	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6973	  {
6974	    if (op0 == orig_op0)
6975	      op0 = copy_rtx (op0);
6976
6977	    MEM_VOLATILE_P (op0) = 1;
6978	  }
6979
6980	/* The following code doesn't handle CONCAT.
6981	   Assume only bitpos == 0 can be used for CONCAT, due to
6982	   one element arrays having the same mode as its element.  */
6983	if (GET_CODE (op0) == CONCAT)
6984	  {
6985	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6986	      abort ();
6987	    return op0;
6988	  }
6989
6990	/* In cases where an aligned union has an unaligned object
6991	   as a field, we might be extracting a BLKmode value from
6992	   an integer-mode (e.g., SImode) object.  Handle this case
6993	   by doing the extract into an object as wide as the field
6994	   (which we know to be the width of a basic mode), then
6995	   storing into memory, and changing the mode to BLKmode.  */
6996	if (mode1 == VOIDmode
6997	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6998	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
6999		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7000		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7001		&& modifier != EXPAND_CONST_ADDRESS
7002		&& modifier != EXPAND_INITIALIZER)
7003	    /* If the field isn't aligned enough to fetch as a memref,
7004	       fetch it as a bit field.  */
7005	    || (mode1 != BLKmode
7006		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7007		&& ((TYPE_ALIGN (TREE_TYPE (tem))
7008		     < GET_MODE_ALIGNMENT (mode))
7009		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7010	    /* If the type and the field are a constant size and the
7011	       size of the type isn't the same size as the bitfield,
7012	       we must use bitfield operations.  */
7013	    || (bitsize >= 0
7014		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7015		    == INTEGER_CST)
7016		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7017					  bitsize)))
7018	  {
7019	    enum machine_mode ext_mode = mode;
7020
7021	    if (ext_mode == BLKmode
7022		&& ! (target != 0 && GET_CODE (op0) == MEM
7023		      && GET_CODE (target) == MEM
7024		      && bitpos % BITS_PER_UNIT == 0))
7025	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7026
7027	    if (ext_mode == BLKmode)
7028	      {
7029		/* In this case, BITPOS must start at a byte boundary and
7030		   TARGET, if specified, must be a MEM.  */
7031		if (GET_CODE (op0) != MEM
7032		    || (target != 0 && GET_CODE (target) != MEM)
7033		    || bitpos % BITS_PER_UNIT != 0)
7034		  abort ();
7035
7036		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7037		if (target == 0)
7038		  target = assign_temp (type, 0, 1, 1);
7039
7040		emit_block_move (target, op0,
7041				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7042					  / BITS_PER_UNIT));
7043
7044		return target;
7045	      }
7046
7047	    op0 = validize_mem (op0);
7048
7049	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7050	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7051
7052	    op0 = extract_bit_field (op0, bitsize, bitpos,
7053				     unsignedp, target, ext_mode, ext_mode,
7054				     int_size_in_bytes (TREE_TYPE (tem)));
7055
7056	    /* If the result is a record type and BITSIZE is narrower than
7057	       the mode of OP0, an integral mode, and this is a big endian
7058	       machine, we must put the field into the high-order bits.  */
7059	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7060		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7061		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7062	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7063				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7064					    - bitsize),
7065				  op0, 1);
7066
7067	    if (mode == BLKmode)
7068	      {
7069		rtx new = assign_temp (build_qualified_type
7070				       (type_for_mode (ext_mode, 0),
7071					TYPE_QUAL_CONST), 0, 1, 1);
7072
7073		emit_move_insn (new, op0);
7074		op0 = copy_rtx (new);
7075		PUT_MODE (op0, BLKmode);
7076		set_mem_attributes (op0, exp, 1);
7077	      }
7078
7079	    return op0;
7080	  }
7081
7082	/* If the result is BLKmode, use that to access the object
7083	   now as well.  */
7084	if (mode == BLKmode)
7085	  mode1 = BLKmode;
7086
7087	/* Get a reference to just this component.  */
7088	if (modifier == EXPAND_CONST_ADDRESS
7089	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7090	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7091	else
7092	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7093
7094	if (op0 == orig_op0)
7095	  op0 = copy_rtx (op0);
7096
7097	set_mem_attributes (op0, exp, 0);
7098	if (GET_CODE (XEXP (op0, 0)) == REG)
7099	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7100
7101	MEM_VOLATILE_P (op0) |= volatilep;
7102	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7103	    || modifier == EXPAND_CONST_ADDRESS
7104	    || modifier == EXPAND_INITIALIZER)
7105	  return op0;
7106	else if (target == 0)
7107	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7108
7109	convert_move (target, op0, unsignedp);
7110	return target;
7111      }
7112
7113    case VTABLE_REF:
7114      {
7115	rtx insn, before = get_last_insn (), vtbl_ref;
7116
7117	/* Evaluate the interior expression.  */
7118	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7119				 tmode, modifier);
7120
7121	/* Get or create an instruction off which to hang a note.  */
7122	if (REG_P (subtarget))
7123	  {
7124	    target = subtarget;
7125	    insn = get_last_insn ();
7126	    if (insn == before)
7127	      abort ();
7128	    if (! INSN_P (insn))
7129	      insn = prev_nonnote_insn (insn);
7130	  }
7131	else
7132	  {
7133	    target = gen_reg_rtx (GET_MODE (subtarget));
7134	    insn = emit_move_insn (target, subtarget);
7135	  }
7136
7137	/* Collect the data for the note.  */
7138	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7139	vtbl_ref = plus_constant (vtbl_ref,
7140				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7141	/* Discard the initial CONST that was added.  */
7142	vtbl_ref = XEXP (vtbl_ref, 0);
7143
7144	REG_NOTES (insn)
7145	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7146
7147	return target;
7148      }
7149
7150      /* Intended for a reference to a buffer of a file-object in Pascal.
7151	 But it's not certain that a special tree code will really be
7152	 necessary for these.  INDIRECT_REF might work for them.  */
7153    case BUFFER_REF:
7154      abort ();
7155
7156    case IN_EXPR:
7157      {
7158	/* Pascal set IN expression.
7159
7160	   Algorithm:
7161	       rlo       = set_low - (set_low%bits_per_word);
7162	       the_word  = set [ (index - rlo)/bits_per_word ];
7163	       bit_index = index % bits_per_word;
7164	       bitmask   = 1 << bit_index;
7165	       return !!(the_word & bitmask);  */
7166
7167	tree set = TREE_OPERAND (exp, 0);
7168	tree index = TREE_OPERAND (exp, 1);
7169	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7170	tree set_type = TREE_TYPE (set);
7171	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7172	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7173	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7174	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7175	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7176	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7177	rtx setaddr = XEXP (setval, 0);
7178	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7179	rtx rlow;
7180	rtx diff, quo, rem, addr, bit, result;
7181
7182	/* If domain is empty, answer is no.  Likewise if index is constant
7183	   and out of bounds.  */
7184	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7185	     && TREE_CODE (set_low_bound) == INTEGER_CST
7186	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7187	     || (TREE_CODE (index) == INTEGER_CST
7188		 && TREE_CODE (set_low_bound) == INTEGER_CST
7189		 && tree_int_cst_lt (index, set_low_bound))
7190	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7191		 && TREE_CODE (index) == INTEGER_CST
7192		 && tree_int_cst_lt (set_high_bound, index))))
7193	  return const0_rtx;
7194
7195	if (target == 0)
7196	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7197
7198	/* If we get here, we have to generate the code for both cases
7199	   (in range and out of range).  */
7200
7201	op0 = gen_label_rtx ();
7202	op1 = gen_label_rtx ();
7203
7204	if (! (GET_CODE (index_val) == CONST_INT
7205	       && GET_CODE (lo_r) == CONST_INT))
7206	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7207				   GET_MODE (index_val), iunsignedp, op1);
7208
7209	if (! (GET_CODE (index_val) == CONST_INT
7210	       && GET_CODE (hi_r) == CONST_INT))
7211	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7212				   GET_MODE (index_val), iunsignedp, op1);
7213
7214	/* Calculate the element number of bit zero in the first word
7215	   of the set.  */
7216	if (GET_CODE (lo_r) == CONST_INT)
7217	  rlow = GEN_INT (INTVAL (lo_r)
7218			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7219	else
7220	  rlow = expand_binop (index_mode, and_optab, lo_r,
7221			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7222			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7223
7224	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7225			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7226
7227	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7228			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7229	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7230			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7231
7232	addr = memory_address (byte_mode,
7233			       expand_binop (index_mode, add_optab, diff,
7234					     setaddr, NULL_RTX, iunsignedp,
7235					     OPTAB_LIB_WIDEN));
7236
7237	/* Extract the bit we want to examine.  */
7238	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7239			    gen_rtx_MEM (byte_mode, addr),
7240			    make_tree (TREE_TYPE (index), rem),
7241			    NULL_RTX, 1);
7242	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7243			       GET_MODE (target) == byte_mode ? target : 0,
7244			       1, OPTAB_LIB_WIDEN);
7245
7246	if (result != target)
7247	  convert_move (target, result, 1);
7248
7249	/* Output the code to handle the out-of-range case.  */
7250	emit_jump (op0);
7251	emit_label (op1);
7252	emit_move_insn (target, const0_rtx);
7253	emit_label (op0);
7254	return target;
7255      }
7256
7257    case WITH_CLEANUP_EXPR:
7258      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7259	{
7260	  WITH_CLEANUP_EXPR_RTL (exp)
7261	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7262	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7263
7264	  /* That's it for this cleanup.  */
7265	  TREE_OPERAND (exp, 1) = 0;
7266	}
7267      return WITH_CLEANUP_EXPR_RTL (exp);
7268
7269    case CLEANUP_POINT_EXPR:
7270      {
7271	/* Start a new binding layer that will keep track of all cleanup
7272	   actions to be performed.  */
7273	expand_start_bindings (2);
7274
7275	target_temp_slot_level = temp_slot_level;
7276
7277	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7278	/* If we're going to use this value, load it up now.  */
7279	if (! ignore)
7280	  op0 = force_not_mem (op0);
7281	preserve_temp_slots (op0);
7282	expand_end_bindings (NULL_TREE, 0, 0);
7283      }
7284      return op0;
7285
7286    case CALL_EXPR:
7287      /* Check for a built-in function.  */
7288      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7289	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7290	      == FUNCTION_DECL)
7291	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7292        {
7293	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7294	      == BUILT_IN_FRONTEND)
7295	    return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7296	  else
7297	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7298	}
7299
7300      return expand_call (exp, target, ignore);
7301
7302    case NON_LVALUE_EXPR:
7303    case NOP_EXPR:
7304    case CONVERT_EXPR:
7305    case REFERENCE_EXPR:
7306      if (TREE_OPERAND (exp, 0) == error_mark_node)
7307	return const0_rtx;
7308
7309      if (TREE_CODE (type) == UNION_TYPE)
7310	{
7311	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7312
7313	  /* If both input and output are BLKmode, this conversion isn't doing
7314	     anything except possibly changing memory attribute.  */
7315	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7316	    {
7317	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7318					modifier);
7319
7320	      result = copy_rtx (result);
7321	      set_mem_attributes (result, exp, 0);
7322	      return result;
7323	    }
7324
7325	  if (target == 0)
7326	    target = assign_temp (type, 0, 1, 1);
7327
7328	  if (GET_CODE (target) == MEM)
7329	    /* Store data into beginning of memory target.  */
7330	    store_expr (TREE_OPERAND (exp, 0),
7331			adjust_address (target, TYPE_MODE (valtype), 0), 0);
7332
7333	  else if (GET_CODE (target) == REG)
7334	    /* Store this field into a union of the proper type.  */
7335	    store_field (target,
7336			 MIN ((int_size_in_bytes (TREE_TYPE
7337						  (TREE_OPERAND (exp, 0)))
7338			       * BITS_PER_UNIT),
7339			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7340			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7341			 VOIDmode, 0, type, 0);
7342	  else
7343	    abort ();
7344
7345	  /* Return the entire union.  */
7346	  return target;
7347	}
7348
7349      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7350	{
7351	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7352			     modifier);
7353
7354	  /* If the signedness of the conversion differs and OP0 is
7355	     a promoted SUBREG, clear that indication since we now
7356	     have to do the proper extension.  */
7357	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7358	      && GET_CODE (op0) == SUBREG)
7359	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7360
7361	  return op0;
7362	}
7363
7364      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7365      if (GET_MODE (op0) == mode)
7366	return op0;
7367
7368      /* If OP0 is a constant, just convert it into the proper mode.  */
7369      if (CONSTANT_P (op0))
7370	{
7371	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7372	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7373
7374          if (modifier == EXPAND_INITIALIZER)
7375	    return simplify_gen_subreg (mode, op0, inner_mode,
7376					subreg_lowpart_offset (mode,
7377							       inner_mode));
7378	  else
7379	    return convert_modes (mode, inner_mode, op0,
7380				  TREE_UNSIGNED (inner_type));
7381	}
7382
7383      if (modifier == EXPAND_INITIALIZER)
7384	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7385
7386      if (target == 0)
7387	return
7388	  convert_to_mode (mode, op0,
7389			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7390      else
7391	convert_move (target, op0,
7392		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7393      return target;
7394
7395    case VIEW_CONVERT_EXPR:
7396      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7397
7398      /* If the input and output modes are both the same, we are done.
7399	 Otherwise, if neither mode is BLKmode and both are within a word, we
7400	 can use gen_lowpart.  If neither is true, make sure the operand is
7401	 in memory and convert the MEM to the new mode.  */
7402      if (TYPE_MODE (type) == GET_MODE (op0))
7403	;
7404      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7405	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7406	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7407	op0 = gen_lowpart (TYPE_MODE (type), op0);
7408      else if (GET_CODE (op0) != MEM)
7409	{
7410	  /* If the operand is not a MEM, force it into memory.  Since we
7411	     are going to be be changing the mode of the MEM, don't call
7412	     force_const_mem for constants because we don't allow pool
7413	     constants to change mode.  */
7414	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7415
7416	  if (TREE_ADDRESSABLE (exp))
7417	    abort ();
7418
7419	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7420	    target
7421	      = assign_stack_temp_for_type
7422		(TYPE_MODE (inner_type),
7423		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7424
7425	  emit_move_insn (target, op0);
7426	  op0 = target;
7427	}
7428
7429      /* At this point, OP0 is in the correct mode.  If the output type is such
7430	 that the operand is known to be aligned, indicate that it is.
7431	 Otherwise, we need only be concerned about alignment for non-BLKmode
7432	 results.  */
7433      if (GET_CODE (op0) == MEM)
7434	{
7435	  op0 = copy_rtx (op0);
7436
7437	  if (TYPE_ALIGN_OK (type))
7438	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7439	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7440		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7441	    {
7442	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7443	      HOST_WIDE_INT temp_size
7444		= MAX (int_size_in_bytes (inner_type),
7445		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7446	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7447						    temp_size, 0, type);
7448	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7449
7450	      if (TREE_ADDRESSABLE (exp))
7451		abort ();
7452
7453	      if (GET_MODE (op0) == BLKmode)
7454		emit_block_move (new_with_op0_mode, op0,
7455				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7456	      else
7457		emit_move_insn (new_with_op0_mode, op0);
7458
7459	      op0 = new;
7460	    }
7461
7462	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7463	}
7464
7465      return op0;
7466
7467    case PLUS_EXPR:
7468      /* We come here from MINUS_EXPR when the second operand is a
7469         constant.  */
7470    plus_expr:
7471      this_optab = ! unsignedp && flag_trapv
7472                   && (GET_MODE_CLASS (mode) == MODE_INT)
7473                   ? addv_optab : add_optab;
7474
7475      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7476	 something else, make sure we add the register to the constant and
7477	 then to the other thing.  This case can occur during strength
7478	 reduction and doing it this way will produce better code if the
7479	 frame pointer or argument pointer is eliminated.
7480
7481	 fold-const.c will ensure that the constant is always in the inner
7482	 PLUS_EXPR, so the only case we need to do anything about is if
7483	 sp, ap, or fp is our second argument, in which case we must swap
7484	 the innermost first argument and our second argument.  */
7485
7486      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7487	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7488	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7489	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7490	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7491	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7492	{
7493	  tree t = TREE_OPERAND (exp, 1);
7494
7495	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7496	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7497	}
7498
7499      /* If the result is to be ptr_mode and we are adding an integer to
7500	 something, we might be forming a constant.  So try to use
7501	 plus_constant.  If it produces a sum and we can't accept it,
7502	 use force_operand.  This allows P = &ARR[const] to generate
7503	 efficient code on machines where a SYMBOL_REF is not a valid
7504	 address.
7505
7506	 If this is an EXPAND_SUM call, always return the sum.  */
7507      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7508          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7509	{
7510	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7511	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7512	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7513	    {
7514	      rtx constant_part;
7515
7516	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7517				 EXPAND_SUM);
7518	      /* Use immed_double_const to ensure that the constant is
7519		 truncated according to the mode of OP1, then sign extended
7520		 to a HOST_WIDE_INT.  Using the constant directly can result
7521		 in non-canonical RTL in a 64x32 cross compile.  */
7522	      constant_part
7523		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7524				      (HOST_WIDE_INT) 0,
7525				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7526	      op1 = plus_constant (op1, INTVAL (constant_part));
7527	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7528		op1 = force_operand (op1, target);
7529	      return op1;
7530	    }
7531
7532	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7533		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7534		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7535	    {
7536	      rtx constant_part;
7537
7538	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7539				 (modifier == EXPAND_INITIALIZER
7540				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7541	      if (! CONSTANT_P (op0))
7542		{
7543		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7544				     VOIDmode, modifier);
7545		  /* Don't go to both_summands if modifier
7546		     says it's not right to return a PLUS.  */
7547		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7548		    goto binop2;
7549		  goto both_summands;
7550		}
7551	      /* Use immed_double_const to ensure that the constant is
7552		 truncated according to the mode of OP1, then sign extended
7553		 to a HOST_WIDE_INT.  Using the constant directly can result
7554		 in non-canonical RTL in a 64x32 cross compile.  */
7555	      constant_part
7556		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7557				      (HOST_WIDE_INT) 0,
7558				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7559	      op0 = plus_constant (op0, INTVAL (constant_part));
7560	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7561		op0 = force_operand (op0, target);
7562	      return op0;
7563	    }
7564	}
7565
7566      /* No sense saving up arithmetic to be done
7567	 if it's all in the wrong mode to form part of an address.
7568	 And force_operand won't know whether to sign-extend or
7569	 zero-extend.  */
7570      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7571	  || mode != ptr_mode)
7572	goto binop;
7573
7574      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7575	subtarget = 0;
7576
7577      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7578      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7579
7580    both_summands:
7581      /* Make sure any term that's a sum with a constant comes last.  */
7582      if (GET_CODE (op0) == PLUS
7583	  && CONSTANT_P (XEXP (op0, 1)))
7584	{
7585	  temp = op0;
7586	  op0 = op1;
7587	  op1 = temp;
7588	}
7589      /* If adding to a sum including a constant,
7590	 associate it to put the constant outside.  */
7591      if (GET_CODE (op1) == PLUS
7592	  && CONSTANT_P (XEXP (op1, 1)))
7593	{
7594	  rtx constant_term = const0_rtx;
7595
7596	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7597	  if (temp != 0)
7598	    op0 = temp;
7599	  /* Ensure that MULT comes first if there is one.  */
7600	  else if (GET_CODE (op0) == MULT)
7601	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7602	  else
7603	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7604
7605	  /* Let's also eliminate constants from op0 if possible.  */
7606	  op0 = eliminate_constant_term (op0, &constant_term);
7607
7608	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7609	     their sum should be a constant.  Form it into OP1, since the
7610	     result we want will then be OP0 + OP1.  */
7611
7612	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7613					    XEXP (op1, 1));
7614	  if (temp != 0)
7615	    op1 = temp;
7616	  else
7617	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7618	}
7619
7620      /* Put a constant term last and put a multiplication first.  */
7621      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7622	temp = op1, op1 = op0, op0 = temp;
7623
7624      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7625      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7626
7627    case MINUS_EXPR:
7628      /* For initializers, we are allowed to return a MINUS of two
7629	 symbolic constants.  Here we handle all cases when both operands
7630	 are constant.  */
7631      /* Handle difference of two symbolic constants,
7632	 for the sake of an initializer.  */
7633      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7634	  && really_constant_p (TREE_OPERAND (exp, 0))
7635	  && really_constant_p (TREE_OPERAND (exp, 1)))
7636	{
7637	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7638				 modifier);
7639	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7640				 modifier);
7641
7642	  /* If the last operand is a CONST_INT, use plus_constant of
7643	     the negated constant.  Else make the MINUS.  */
7644	  if (GET_CODE (op1) == CONST_INT)
7645	    return plus_constant (op0, - INTVAL (op1));
7646	  else
7647	    return gen_rtx_MINUS (mode, op0, op1);
7648	}
7649      /* Convert A - const to A + (-const).  */
7650      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7651	{
7652	  tree negated = fold (build1 (NEGATE_EXPR, type,
7653				       TREE_OPERAND (exp, 1)));
7654
7655	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7656	    /* If we can't negate the constant in TYPE, leave it alone and
7657	       expand_binop will negate it for us.  We used to try to do it
7658	       here in the signed version of TYPE, but that doesn't work
7659	       on POINTER_TYPEs.  */;
7660	  else
7661	    {
7662	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7663	      goto plus_expr;
7664	    }
7665	}
7666      this_optab = ! unsignedp && flag_trapv
7667                   && (GET_MODE_CLASS(mode) == MODE_INT)
7668                   ? subv_optab : sub_optab;
7669      goto binop;
7670
7671    case MULT_EXPR:
7672      /* If first operand is constant, swap them.
7673	 Thus the following special case checks need only
7674	 check the second operand.  */
7675      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7676	{
7677	  tree t1 = TREE_OPERAND (exp, 0);
7678	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7679	  TREE_OPERAND (exp, 1) = t1;
7680	}
7681
7682      /* Attempt to return something suitable for generating an
7683	 indexed address, for machines that support that.  */
7684
7685      if (modifier == EXPAND_SUM && mode == ptr_mode
7686	  && host_integerp (TREE_OPERAND (exp, 1), 0))
7687	{
7688	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7689			     EXPAND_SUM);
7690
7691	  /* If we knew for certain that this is arithmetic for an array
7692	     reference, and we knew the bounds of the array, then we could
7693	     apply the distributive law across (PLUS X C) for constant C.
7694	     Without such knowledge, we risk overflowing the computation
7695	     when both X and C are large, but X+C isn't.  */
7696	  /* ??? Could perhaps special-case EXP being unsigned and C being
7697	     positive.  In that case we are certain that X+C is no smaller
7698	     than X and so the transformed expression will overflow iff the
7699	     original would have.  */
7700
7701	  if (GET_CODE (op0) != REG)
7702	    op0 = force_operand (op0, NULL_RTX);
7703	  if (GET_CODE (op0) != REG)
7704	    op0 = copy_to_mode_reg (mode, op0);
7705
7706	  return
7707	    gen_rtx_MULT (mode, op0,
7708			  GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7709	}
7710
7711      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7712	subtarget = 0;
7713
7714      /* Check for multiplying things that have been extended
7715	 from a narrower type.  If this machine supports multiplying
7716	 in that narrower type with a result in the desired type,
7717	 do it that way, and avoid the explicit type-conversion.  */
7718      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7719	  && TREE_CODE (type) == INTEGER_TYPE
7720	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7721	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7722	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7723	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7724				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7725	       /* Don't use a widening multiply if a shift will do.  */
7726	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7727		    > HOST_BITS_PER_WIDE_INT)
7728		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7729	      ||
7730	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7731	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7732		   ==
7733		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7734	       /* If both operands are extended, they must either both
7735		  be zero-extended or both be sign-extended.  */
7736	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7737		   ==
7738		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7739	{
7740	  enum machine_mode innermode
7741	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7742	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7743			? smul_widen_optab : umul_widen_optab);
7744	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7745			? umul_widen_optab : smul_widen_optab);
7746	  if (mode == GET_MODE_WIDER_MODE (innermode))
7747	    {
7748	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7749		{
7750		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7751				     NULL_RTX, VOIDmode, 0);
7752		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7753		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7754				       VOIDmode, 0);
7755		  else
7756		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7757				       NULL_RTX, VOIDmode, 0);
7758		  goto binop2;
7759		}
7760	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7761		       && innermode == word_mode)
7762		{
7763		  rtx htem;
7764		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7765				     NULL_RTX, VOIDmode, 0);
7766		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7767		    op1 = convert_modes (innermode, mode,
7768					 expand_expr (TREE_OPERAND (exp, 1),
7769						      NULL_RTX, VOIDmode, 0),
7770					 unsignedp);
7771		  else
7772		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7773				       NULL_RTX, VOIDmode, 0);
7774		  temp = expand_binop (mode, other_optab, op0, op1, target,
7775				       unsignedp, OPTAB_LIB_WIDEN);
7776		  htem = expand_mult_highpart_adjust (innermode,
7777						      gen_highpart (innermode, temp),
7778						      op0, op1,
7779						      gen_highpart (innermode, temp),
7780						      unsignedp);
7781		  emit_move_insn (gen_highpart (innermode, temp), htem);
7782		  return temp;
7783		}
7784	    }
7785	}
7786      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7787      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7788      return expand_mult (mode, op0, op1, target, unsignedp);
7789
7790    case TRUNC_DIV_EXPR:
7791    case FLOOR_DIV_EXPR:
7792    case CEIL_DIV_EXPR:
7793    case ROUND_DIV_EXPR:
7794    case EXACT_DIV_EXPR:
7795      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7796	subtarget = 0;
7797      /* Possible optimization: compute the dividend with EXPAND_SUM
7798	 then if the divisor is constant can optimize the case
7799	 where some terms of the dividend have coeffs divisible by it.  */
7800      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7801      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7802      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7803
7804    case RDIV_EXPR:
7805      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
7806         expensive divide.  If not, combine will rebuild the original
7807         computation.  */
7808      if (flag_unsafe_math_optimizations && optimize && !optimize_size
7809	  && TREE_CODE (type) == REAL_TYPE
7810	  && !real_onep (TREE_OPERAND (exp, 0)))
7811        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7812				   build (RDIV_EXPR, type,
7813					  build_real (type, dconst1),
7814					  TREE_OPERAND (exp, 1))),
7815			    target, tmode, unsignedp);
7816      this_optab = sdiv_optab;
7817      goto binop;
7818
7819    case TRUNC_MOD_EXPR:
7820    case FLOOR_MOD_EXPR:
7821    case CEIL_MOD_EXPR:
7822    case ROUND_MOD_EXPR:
7823      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7824	subtarget = 0;
7825      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7826      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7827      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7828
7829    case FIX_ROUND_EXPR:
7830    case FIX_FLOOR_EXPR:
7831    case FIX_CEIL_EXPR:
7832      abort ();			/* Not used for C.  */
7833
7834    case FIX_TRUNC_EXPR:
7835      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7836      if (target == 0)
7837	target = gen_reg_rtx (mode);
7838      expand_fix (target, op0, unsignedp);
7839      return target;
7840
7841    case FLOAT_EXPR:
7842      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7843      if (target == 0)
7844	target = gen_reg_rtx (mode);
7845      /* expand_float can't figure out what to do if FROM has VOIDmode.
7846	 So give it the correct mode.  With -O, cse will optimize this.  */
7847      if (GET_MODE (op0) == VOIDmode)
7848	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7849				op0);
7850      expand_float (target, op0,
7851		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7852      return target;
7853
7854    case NEGATE_EXPR:
7855      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7856      temp = expand_unop (mode,
7857                          ! unsignedp && flag_trapv
7858                          && (GET_MODE_CLASS(mode) == MODE_INT)
7859                          ? negv_optab : neg_optab, op0, target, 0);
7860      if (temp == 0)
7861	abort ();
7862      return temp;
7863
7864    case ABS_EXPR:
7865      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7866
7867      /* Handle complex values specially.  */
7868      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7869	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7870	return expand_complex_abs (mode, op0, target, unsignedp);
7871
7872      /* Unsigned abs is simply the operand.  Testing here means we don't
7873	 risk generating incorrect code below.  */
7874      if (TREE_UNSIGNED (type))
7875	return op0;
7876
7877      return expand_abs (mode, op0, target, unsignedp,
7878			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7879
7880    case MAX_EXPR:
7881    case MIN_EXPR:
7882      target = original_target;
7883      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7884	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7885	  || GET_MODE (target) != mode
7886	  || (GET_CODE (target) == REG
7887	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7888	target = gen_reg_rtx (mode);
7889      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7890      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7891
7892      /* First try to do it with a special MIN or MAX instruction.
7893	 If that does not win, use a conditional jump to select the proper
7894	 value.  */
7895      this_optab = (TREE_UNSIGNED (type)
7896		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7897		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7898
7899      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7900			   OPTAB_WIDEN);
7901      if (temp != 0)
7902	return temp;
7903
7904      /* At this point, a MEM target is no longer useful; we will get better
7905	 code without it.  */
7906
7907      if (GET_CODE (target) == MEM)
7908	target = gen_reg_rtx (mode);
7909
7910      if (target != op0)
7911	emit_move_insn (target, op0);
7912
7913      op0 = gen_label_rtx ();
7914
7915      /* If this mode is an integer too wide to compare properly,
7916	 compare word by word.  Rely on cse to optimize constant cases.  */
7917      if (GET_MODE_CLASS (mode) == MODE_INT
7918	  && ! can_compare_p (GE, mode, ccp_jump))
7919	{
7920	  if (code == MAX_EXPR)
7921	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7922					  target, op1, NULL_RTX, op0);
7923	  else
7924	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7925					  op1, target, NULL_RTX, op0);
7926	}
7927      else
7928	{
7929	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7930	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7931				   unsignedp, mode, NULL_RTX, NULL_RTX,
7932				   op0);
7933	}
7934      emit_move_insn (target, op1);
7935      emit_label (op0);
7936      return target;
7937
7938    case BIT_NOT_EXPR:
7939      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7940      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7941      if (temp == 0)
7942	abort ();
7943      return temp;
7944
7945    case FFS_EXPR:
7946      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7947      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7948      if (temp == 0)
7949	abort ();
7950      return temp;
7951
7952      /* ??? Can optimize bitwise operations with one arg constant.
7953	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7954	 and (a bitwise1 b) bitwise2 b (etc)
7955	 but that is probably not worth while.  */
7956
7957      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7958	 boolean values when we want in all cases to compute both of them.  In
7959	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7960	 as actual zero-or-1 values and then bitwise anding.  In cases where
7961	 there cannot be any side effects, better code would be made by
7962	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7963	 how to recognize those cases.  */
7964
7965    case TRUTH_AND_EXPR:
7966    case BIT_AND_EXPR:
7967      this_optab = and_optab;
7968      goto binop;
7969
7970    case TRUTH_OR_EXPR:
7971    case BIT_IOR_EXPR:
7972      this_optab = ior_optab;
7973      goto binop;
7974
7975    case TRUTH_XOR_EXPR:
7976    case BIT_XOR_EXPR:
7977      this_optab = xor_optab;
7978      goto binop;
7979
7980    case LSHIFT_EXPR:
7981    case RSHIFT_EXPR:
7982    case LROTATE_EXPR:
7983    case RROTATE_EXPR:
7984      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7985	subtarget = 0;
7986      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7987      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7988			   unsignedp);
7989
7990      /* Could determine the answer when only additive constants differ.  Also,
7991	 the addition of one can be handled by changing the condition.  */
7992    case LT_EXPR:
7993    case LE_EXPR:
7994    case GT_EXPR:
7995    case GE_EXPR:
7996    case EQ_EXPR:
7997    case NE_EXPR:
7998    case UNORDERED_EXPR:
7999    case ORDERED_EXPR:
8000    case UNLT_EXPR:
8001    case UNLE_EXPR:
8002    case UNGT_EXPR:
8003    case UNGE_EXPR:
8004    case UNEQ_EXPR:
8005      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8006      if (temp != 0)
8007	return temp;
8008
8009      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8010      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8011	  && original_target
8012	  && GET_CODE (original_target) == REG
8013	  && (GET_MODE (original_target)
8014	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8015	{
8016	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8017			      VOIDmode, 0);
8018
8019	  /* If temp is constant, we can just compute the result.  */
8020	  if (GET_CODE (temp) == CONST_INT)
8021	    {
8022	      if (INTVAL (temp) != 0)
8023	        emit_move_insn (target, const1_rtx);
8024	      else
8025	        emit_move_insn (target, const0_rtx);
8026
8027	      return target;
8028	    }
8029
8030	  if (temp != original_target)
8031	    {
8032	      enum machine_mode mode1 = GET_MODE (temp);
8033	      if (mode1 == VOIDmode)
8034		mode1 = tmode != VOIDmode ? tmode : mode;
8035
8036	      temp = copy_to_mode_reg (mode1, temp);
8037	    }
8038
8039	  op1 = gen_label_rtx ();
8040	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8041				   GET_MODE (temp), unsignedp, op1);
8042	  emit_move_insn (temp, const1_rtx);
8043	  emit_label (op1);
8044	  return temp;
8045	}
8046
8047      /* If no set-flag instruction, must generate a conditional
8048	 store into a temporary variable.  Drop through
8049	 and handle this like && and ||.  */
8050
8051    case TRUTH_ANDIF_EXPR:
8052    case TRUTH_ORIF_EXPR:
8053      if (! ignore
8054	  && (target == 0 || ! safe_from_p (target, exp, 1)
8055	      /* Make sure we don't have a hard reg (such as function's return
8056		 value) live across basic blocks, if not optimizing.  */
8057	      || (!optimize && GET_CODE (target) == REG
8058		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8059	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8060
8061      if (target)
8062	emit_clr_insn (target);
8063
8064      op1 = gen_label_rtx ();
8065      jumpifnot (exp, op1);
8066
8067      if (target)
8068	emit_0_to_1_insn (target);
8069
8070      emit_label (op1);
8071      return ignore ? const0_rtx : target;
8072
8073    case TRUTH_NOT_EXPR:
8074      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8075      /* The parser is careful to generate TRUTH_NOT_EXPR
8076	 only with operands that are always zero or one.  */
8077      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8078			   target, 1, OPTAB_LIB_WIDEN);
8079      if (temp == 0)
8080	abort ();
8081      return temp;
8082
8083    case COMPOUND_EXPR:
8084      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8085      emit_queue ();
8086      return expand_expr (TREE_OPERAND (exp, 1),
8087			  (ignore ? const0_rtx : target),
8088			  VOIDmode, 0);
8089
8090    case COND_EXPR:
8091      /* If we would have a "singleton" (see below) were it not for a
8092	 conversion in each arm, bring that conversion back out.  */
8093      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8094	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8095	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8096	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8097	{
8098	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8099	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8100
8101	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8102	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8103	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8104		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8105	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8106		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8107	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8108		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8109	    return expand_expr (build1 (NOP_EXPR, type,
8110					build (COND_EXPR, TREE_TYPE (iftrue),
8111					       TREE_OPERAND (exp, 0),
8112					       iftrue, iffalse)),
8113				target, tmode, modifier);
8114	}
8115
8116      {
8117	/* Note that COND_EXPRs whose type is a structure or union
8118	   are required to be constructed to contain assignments of
8119	   a temporary variable, so that we can evaluate them here
8120	   for side effect only.  If type is void, we must do likewise.  */
8121
8122	/* If an arm of the branch requires a cleanup,
8123	   only that cleanup is performed.  */
8124
8125	tree singleton = 0;
8126	tree binary_op = 0, unary_op = 0;
8127
8128	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8129	   convert it to our mode, if necessary.  */
8130	if (integer_onep (TREE_OPERAND (exp, 1))
8131	    && integer_zerop (TREE_OPERAND (exp, 2))
8132	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8133	  {
8134	    if (ignore)
8135	      {
8136		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8137			     modifier);
8138		return const0_rtx;
8139	      }
8140
8141	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8142	    if (GET_MODE (op0) == mode)
8143	      return op0;
8144
8145	    if (target == 0)
8146	      target = gen_reg_rtx (mode);
8147	    convert_move (target, op0, unsignedp);
8148	    return target;
8149	  }
8150
8151	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8152	   output and conditionally add B.  Similarly for unary operations.
8153	   Don't do this if X has side-effects because those side effects
8154	   might affect A or B and the "?" operation is a sequence point in
8155	   ANSI.  (operand_equal_p tests for side effects.)  */
8156
8157	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8158	    && operand_equal_p (TREE_OPERAND (exp, 2),
8159				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8160	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8161	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8162		 && operand_equal_p (TREE_OPERAND (exp, 1),
8163				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8164	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8165	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8166		 && operand_equal_p (TREE_OPERAND (exp, 2),
8167				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8168	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8169	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8170		 && operand_equal_p (TREE_OPERAND (exp, 1),
8171				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8172	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8173
8174	/* If we are not to produce a result, we have no target.  Otherwise,
8175	   if a target was specified use it; it will not be used as an
8176	   intermediate target unless it is safe.  If no target, use a
8177	   temporary.  */
8178
8179	if (ignore)
8180	  temp = 0;
8181	else if (original_target
8182		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8183		     || (singleton && GET_CODE (original_target) == REG
8184			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8185			 && original_target == var_rtx (singleton)))
8186		 && GET_MODE (original_target) == mode
8187#ifdef HAVE_conditional_move
8188		 && (! can_conditionally_move_p (mode)
8189		     || GET_CODE (original_target) == REG
8190		     || TREE_ADDRESSABLE (type))
8191#endif
8192		 && (GET_CODE (original_target) != MEM
8193		     || TREE_ADDRESSABLE (type)))
8194	  temp = original_target;
8195	else if (TREE_ADDRESSABLE (type))
8196	  abort ();
8197	else
8198	  temp = assign_temp (type, 0, 0, 1);
8199
8200	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8201	   do the test of X as a store-flag operation, do this as
8202	   A + ((X != 0) << log C).  Similarly for other simple binary
8203	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8204	if (temp && singleton && binary_op
8205	    && (TREE_CODE (binary_op) == PLUS_EXPR
8206		|| TREE_CODE (binary_op) == MINUS_EXPR
8207		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8208		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8209	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8210		: integer_onep (TREE_OPERAND (binary_op, 1)))
8211	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8212	  {
8213	    rtx result;
8214	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8215                            ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8216                               ? addv_optab : add_optab)
8217                            : TREE_CODE (binary_op) == MINUS_EXPR
8218                              ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8219                                 ? subv_optab : sub_optab)
8220                            : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8221                            : xor_optab);
8222
8223	    /* If we had X ? A : A + 1, do this as A + (X == 0).
8224
8225	       We have to invert the truth value here and then put it
8226	       back later if do_store_flag fails.  We cannot simply copy
8227	       TREE_OPERAND (exp, 0) to another variable and modify that
8228	       because invert_truthvalue can modify the tree pointed to
8229	       by its argument.  */
8230	    if (singleton == TREE_OPERAND (exp, 1))
8231	      TREE_OPERAND (exp, 0)
8232		= invert_truthvalue (TREE_OPERAND (exp, 0));
8233
8234	    result = do_store_flag (TREE_OPERAND (exp, 0),
8235				    (safe_from_p (temp, singleton, 1)
8236				     ? temp : NULL_RTX),
8237				    mode, BRANCH_COST <= 1);
8238
8239	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8240	      result = expand_shift (LSHIFT_EXPR, mode, result,
8241				     build_int_2 (tree_log2
8242						  (TREE_OPERAND
8243						   (binary_op, 1)),
8244						  0),
8245				     (safe_from_p (temp, singleton, 1)
8246				      ? temp : NULL_RTX), 0);
8247
8248	    if (result)
8249	      {
8250		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8251		return expand_binop (mode, boptab, op1, result, temp,
8252				     unsignedp, OPTAB_LIB_WIDEN);
8253	      }
8254	    else if (singleton == TREE_OPERAND (exp, 1))
8255	      TREE_OPERAND (exp, 0)
8256		= invert_truthvalue (TREE_OPERAND (exp, 0));
8257	  }
8258
8259	do_pending_stack_adjust ();
8260	NO_DEFER_POP;
8261	op0 = gen_label_rtx ();
8262
8263	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8264	  {
8265	    if (temp != 0)
8266	      {
8267		/* If the target conflicts with the other operand of the
8268		   binary op, we can't use it.  Also, we can't use the target
8269		   if it is a hard register, because evaluating the condition
8270		   might clobber it.  */
8271		if ((binary_op
8272		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8273		    || (GET_CODE (temp) == REG
8274			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8275		  temp = gen_reg_rtx (mode);
8276		store_expr (singleton, temp, 0);
8277	      }
8278	    else
8279	      expand_expr (singleton,
8280			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8281	    if (singleton == TREE_OPERAND (exp, 1))
8282	      jumpif (TREE_OPERAND (exp, 0), op0);
8283	    else
8284	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8285
8286	    start_cleanup_deferral ();
8287	    if (binary_op && temp == 0)
8288	      /* Just touch the other operand.  */
8289	      expand_expr (TREE_OPERAND (binary_op, 1),
8290			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8291	    else if (binary_op)
8292	      store_expr (build (TREE_CODE (binary_op), type,
8293				 make_tree (type, temp),
8294				 TREE_OPERAND (binary_op, 1)),
8295			  temp, 0);
8296	    else
8297	      store_expr (build1 (TREE_CODE (unary_op), type,
8298				  make_tree (type, temp)),
8299			  temp, 0);
8300	    op1 = op0;
8301	  }
8302	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8303	   comparison operator.  If we have one of these cases, set the
8304	   output to A, branch on A (cse will merge these two references),
8305	   then set the output to FOO.  */
8306	else if (temp
8307		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8308		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8309		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8310				     TREE_OPERAND (exp, 1), 0)
8311		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8312		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8313		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8314	  {
8315	    if (GET_CODE (temp) == REG
8316		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8317	      temp = gen_reg_rtx (mode);
8318	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8319	    jumpif (TREE_OPERAND (exp, 0), op0);
8320
8321	    start_cleanup_deferral ();
8322	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8323	    op1 = op0;
8324	  }
8325	else if (temp
8326		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8327		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8328		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8329				     TREE_OPERAND (exp, 2), 0)
8330		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8331		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8332		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8333	  {
8334	    if (GET_CODE (temp) == REG
8335		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8336	      temp = gen_reg_rtx (mode);
8337	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8338	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8339
8340	    start_cleanup_deferral ();
8341	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8342	    op1 = op0;
8343	  }
8344	else
8345	  {
8346	    op1 = gen_label_rtx ();
8347	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8348
8349	    start_cleanup_deferral ();
8350
8351	    /* One branch of the cond can be void, if it never returns. For
8352	       example A ? throw : E  */
8353	    if (temp != 0
8354		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8355	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
8356	    else
8357	      expand_expr (TREE_OPERAND (exp, 1),
8358			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8359	    end_cleanup_deferral ();
8360	    emit_queue ();
8361	    emit_jump_insn (gen_jump (op1));
8362	    emit_barrier ();
8363	    emit_label (op0);
8364	    start_cleanup_deferral ();
8365	    if (temp != 0
8366		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8367	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
8368	    else
8369	      expand_expr (TREE_OPERAND (exp, 2),
8370			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8371	  }
8372
8373	end_cleanup_deferral ();
8374
8375	emit_queue ();
8376	emit_label (op1);
8377	OK_DEFER_POP;
8378
8379	return temp;
8380      }
8381
8382    case TARGET_EXPR:
8383      {
8384	/* Something needs to be initialized, but we didn't know
8385	   where that thing was when building the tree.  For example,
8386	   it could be the return value of a function, or a parameter
8387	   to a function which lays down in the stack, or a temporary
8388	   variable which must be passed by reference.
8389
8390	   We guarantee that the expression will either be constructed
8391	   or copied into our original target.  */
8392
8393	tree slot = TREE_OPERAND (exp, 0);
8394	tree cleanups = NULL_TREE;
8395	tree exp1;
8396
8397	if (TREE_CODE (slot) != VAR_DECL)
8398	  abort ();
8399
8400	if (! ignore)
8401	  target = original_target;
8402
8403	/* Set this here so that if we get a target that refers to a
8404	   register variable that's already been used, put_reg_into_stack
8405	   knows that it should fix up those uses.  */
8406	TREE_USED (slot) = 1;
8407
8408	if (target == 0)
8409	  {
8410	    if (DECL_RTL_SET_P (slot))
8411	      {
8412		target = DECL_RTL (slot);
8413		/* If we have already expanded the slot, so don't do
8414		   it again.  (mrs)  */
8415		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8416		  return target;
8417	      }
8418	    else
8419	      {
8420		target = assign_temp (type, 2, 0, 1);
8421		/* All temp slots at this level must not conflict.  */
8422		preserve_temp_slots (target);
8423		SET_DECL_RTL (slot, target);
8424		if (TREE_ADDRESSABLE (slot))
8425		  put_var_into_stack (slot);
8426
8427		/* Since SLOT is not known to the called function
8428		   to belong to its stack frame, we must build an explicit
8429		   cleanup.  This case occurs when we must build up a reference
8430		   to pass the reference as an argument.  In this case,
8431		   it is very likely that such a reference need not be
8432		   built here.  */
8433
8434		if (TREE_OPERAND (exp, 2) == 0)
8435		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8436		cleanups = TREE_OPERAND (exp, 2);
8437	      }
8438	  }
8439	else
8440	  {
8441	    /* This case does occur, when expanding a parameter which
8442	       needs to be constructed on the stack.  The target
8443	       is the actual stack address that we want to initialize.
8444	       The function we call will perform the cleanup in this case.  */
8445
8446	    /* If we have already assigned it space, use that space,
8447	       not target that we were passed in, as our target
8448	       parameter is only a hint.  */
8449	    if (DECL_RTL_SET_P (slot))
8450	      {
8451		target = DECL_RTL (slot);
8452		/* If we have already expanded the slot, so don't do
8453                   it again.  (mrs)  */
8454		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8455		  return target;
8456	      }
8457	    else
8458	      {
8459		SET_DECL_RTL (slot, target);
8460		/* If we must have an addressable slot, then make sure that
8461		   the RTL that we just stored in slot is OK.  */
8462		if (TREE_ADDRESSABLE (slot))
8463		  put_var_into_stack (slot);
8464	      }
8465	  }
8466
8467	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8468	/* Mark it as expanded.  */
8469	TREE_OPERAND (exp, 1) = NULL_TREE;
8470
8471	store_expr (exp1, target, 0);
8472
8473	expand_decl_cleanup (NULL_TREE, cleanups);
8474
8475	return target;
8476      }
8477
8478    case INIT_EXPR:
8479      {
8480	tree lhs = TREE_OPERAND (exp, 0);
8481	tree rhs = TREE_OPERAND (exp, 1);
8482
8483	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8484	return temp;
8485      }
8486
8487    case MODIFY_EXPR:
8488      {
8489	/* If lhs is complex, expand calls in rhs before computing it.
8490	   That's so we don't compute a pointer and save it over a
8491	   call.  If lhs is simple, compute it first so we can give it
8492	   as a target if the rhs is just a call.  This avoids an
8493	   extra temp and copy and that prevents a partial-subsumption
8494	   which makes bad code.  Actually we could treat
8495	   component_ref's of vars like vars.  */
8496
8497	tree lhs = TREE_OPERAND (exp, 0);
8498	tree rhs = TREE_OPERAND (exp, 1);
8499
8500	temp = 0;
8501
8502	/* Check for |= or &= of a bitfield of size one into another bitfield
8503	   of size 1.  In this case, (unless we need the result of the
8504	   assignment) we can do this more efficiently with a
8505	   test followed by an assignment, if necessary.
8506
8507	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8508	   things change so we do, this code should be enhanced to
8509	   support it.  */
8510	if (ignore
8511	    && TREE_CODE (lhs) == COMPONENT_REF
8512	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8513		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8514	    && TREE_OPERAND (rhs, 0) == lhs
8515	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8516	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8517	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8518	  {
8519	    rtx label = gen_label_rtx ();
8520
8521	    do_jump (TREE_OPERAND (rhs, 1),
8522		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8523		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8524	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8525					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8526					      ? integer_one_node
8527					      : integer_zero_node)),
8528			       0, 0);
8529	    do_pending_stack_adjust ();
8530	    emit_label (label);
8531	    return const0_rtx;
8532	  }
8533
8534	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8535
8536	return temp;
8537      }
8538
8539    case RETURN_EXPR:
8540      if (!TREE_OPERAND (exp, 0))
8541	expand_null_return ();
8542      else
8543	expand_return (TREE_OPERAND (exp, 0));
8544      return const0_rtx;
8545
8546    case PREINCREMENT_EXPR:
8547    case PREDECREMENT_EXPR:
8548      return expand_increment (exp, 0, ignore);
8549
8550    case POSTINCREMENT_EXPR:
8551    case POSTDECREMENT_EXPR:
8552      /* Faster to treat as pre-increment if result is not used.  */
8553      return expand_increment (exp, ! ignore, ignore);
8554
8555    case ADDR_EXPR:
8556      /* Are we taking the address of a nested function?  */
8557      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8558	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8559	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8560	  && ! TREE_STATIC (exp))
8561	{
8562	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8563	  op0 = force_operand (op0, target);
8564	}
8565      /* If we are taking the address of something erroneous, just
8566	 return a zero.  */
8567      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8568	return const0_rtx;
8569      /* If we are taking the address of a constant and are at the
8570	 top level, we have to use output_constant_def since we can't
8571	 call force_const_mem at top level.  */
8572      else if (cfun == 0
8573	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8574		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8575		       == 'c')))
8576	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8577      else
8578	{
8579	  /* We make sure to pass const0_rtx down if we came in with
8580	     ignore set, to avoid doing the cleanups twice for something.  */
8581	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8582			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8583			     (modifier == EXPAND_INITIALIZER
8584			      ? modifier : EXPAND_CONST_ADDRESS));
8585
8586	  /* If we are going to ignore the result, OP0 will have been set
8587	     to const0_rtx, so just return it.  Don't get confused and
8588	     think we are taking the address of the constant.  */
8589	  if (ignore)
8590	    return op0;
8591
8592	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8593	     clever and returns a REG when given a MEM.  */
8594	  op0 = protect_from_queue (op0, 1);
8595
8596	  /* We would like the object in memory.  If it is a constant, we can
8597	     have it be statically allocated into memory.  For a non-constant,
8598	     we need to allocate some memory and store the value into it.  */
8599
8600	  if (CONSTANT_P (op0))
8601	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8602				   op0);
8603	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8604		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8605		   || GET_CODE (op0) == PARALLEL)
8606	    {
8607	      /* If the operand is a SAVE_EXPR, we can deal with this by
8608		 forcing the SAVE_EXPR into memory.  */
8609	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8610		{
8611		  put_var_into_stack (TREE_OPERAND (exp, 0));
8612		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8613		}
8614	      else
8615		{
8616		  /* If this object is in a register, it can't be BLKmode.  */
8617		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8618		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
8619
8620		  if (GET_CODE (op0) == PARALLEL)
8621		    /* Handle calls that pass values in multiple
8622		       non-contiguous locations.  The Irix 6 ABI has examples
8623		       of this.  */
8624		    emit_group_store (memloc, op0,
8625				      int_size_in_bytes (inner_type));
8626		  else
8627		    emit_move_insn (memloc, op0);
8628
8629		  op0 = memloc;
8630		}
8631	    }
8632
8633	  if (GET_CODE (op0) != MEM)
8634	    abort ();
8635
8636	  mark_temp_addr_taken (op0);
8637	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8638	    {
8639	      op0 = XEXP (op0, 0);
8640#ifdef POINTERS_EXTEND_UNSIGNED
8641	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8642		  && mode == ptr_mode)
8643		op0 = convert_memory_address (ptr_mode, op0);
8644#endif
8645	      return op0;
8646	    }
8647
8648	  /* If OP0 is not aligned as least as much as the type requires, we
8649	     need to make a temporary, copy OP0 to it, and take the address of
8650	     the temporary.  We want to use the alignment of the type, not of
8651	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
8652	     the test for BLKmode means that can't happen.  The test for
8653	     BLKmode is because we never make mis-aligned MEMs with
8654	     non-BLKmode.
8655
8656	     We don't need to do this at all if the machine doesn't have
8657	     strict alignment.  */
8658	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8659	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8660		  > MEM_ALIGN (op0))
8661	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8662	    {
8663	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8664	      rtx new
8665		= assign_stack_temp_for_type
8666		  (TYPE_MODE (inner_type),
8667		   MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8668		   : int_size_in_bytes (inner_type),
8669		   1, build_qualified_type (inner_type,
8670					    (TYPE_QUALS (inner_type)
8671					     | TYPE_QUAL_CONST)));
8672
8673	      if (TYPE_ALIGN_OK (inner_type))
8674		abort ();
8675
8676	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8677	      op0 = new;
8678	    }
8679
8680	  op0 = force_operand (XEXP (op0, 0), target);
8681	}
8682
8683      if (flag_force_addr
8684	  && GET_CODE (op0) != REG
8685	  && modifier != EXPAND_CONST_ADDRESS
8686	  && modifier != EXPAND_INITIALIZER
8687	  && modifier != EXPAND_SUM)
8688	op0 = force_reg (Pmode, op0);
8689
8690      if (GET_CODE (op0) == REG
8691	  && ! REG_USERVAR_P (op0))
8692	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8693
8694#ifdef POINTERS_EXTEND_UNSIGNED
8695      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8696	  && mode == ptr_mode)
8697	op0 = convert_memory_address (ptr_mode, op0);
8698#endif
8699
8700      return op0;
8701
8702    case ENTRY_VALUE_EXPR:
8703      abort ();
8704
8705    /* COMPLEX type for Extended Pascal & Fortran  */
8706    case COMPLEX_EXPR:
8707      {
8708	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8709	rtx insns;
8710
8711	/* Get the rtx code of the operands.  */
8712	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8713	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8714
8715	if (! target)
8716	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8717
8718	start_sequence ();
8719
8720	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8721	emit_move_insn (gen_realpart (mode, target), op0);
8722	emit_move_insn (gen_imagpart (mode, target), op1);
8723
8724	insns = get_insns ();
8725	end_sequence ();
8726
8727	/* Complex construction should appear as a single unit.  */
8728	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8729	   each with a separate pseudo as destination.
8730	   It's not correct for flow to treat them as a unit.  */
8731	if (GET_CODE (target) != CONCAT)
8732	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8733	else
8734	  emit_insns (insns);
8735
8736	return target;
8737      }
8738
8739    case REALPART_EXPR:
8740      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8741      return gen_realpart (mode, op0);
8742
8743    case IMAGPART_EXPR:
8744      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8745      return gen_imagpart (mode, op0);
8746
8747    case CONJ_EXPR:
8748      {
8749	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8750	rtx imag_t;
8751	rtx insns;
8752
8753	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8754
8755	if (! target)
8756	  target = gen_reg_rtx (mode);
8757
8758	start_sequence ();
8759
8760	/* Store the realpart and the negated imagpart to target.  */
8761	emit_move_insn (gen_realpart (partmode, target),
8762			gen_realpart (partmode, op0));
8763
8764	imag_t = gen_imagpart (partmode, target);
8765	temp = expand_unop (partmode,
8766                            ! unsignedp && flag_trapv
8767                            && (GET_MODE_CLASS(partmode) == MODE_INT)
8768                            ? negv_optab : neg_optab,
8769			    gen_imagpart (partmode, op0), imag_t, 0);
8770	if (temp != imag_t)
8771	  emit_move_insn (imag_t, temp);
8772
8773	insns = get_insns ();
8774	end_sequence ();
8775
8776	/* Conjugate should appear as a single unit
8777	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8778	   each with a separate pseudo as destination.
8779	   It's not correct for flow to treat them as a unit.  */
8780	if (GET_CODE (target) != CONCAT)
8781	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8782	else
8783	  emit_insns (insns);
8784
8785	return target;
8786      }
8787
8788    case TRY_CATCH_EXPR:
8789      {
8790	tree handler = TREE_OPERAND (exp, 1);
8791
8792	expand_eh_region_start ();
8793
8794	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8795
8796	expand_eh_region_end_cleanup (handler);
8797
8798	return op0;
8799      }
8800
8801    case TRY_FINALLY_EXPR:
8802      {
8803	tree try_block = TREE_OPERAND (exp, 0);
8804	tree finally_block = TREE_OPERAND (exp, 1);
8805	rtx finally_label = gen_label_rtx ();
8806	rtx done_label = gen_label_rtx ();
8807	rtx return_link = gen_reg_rtx (Pmode);
8808	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8809			      (tree) finally_label, (tree) return_link);
8810	TREE_SIDE_EFFECTS (cleanup) = 1;
8811
8812	/* Start a new binding layer that will keep track of all cleanup
8813	   actions to be performed.  */
8814	expand_start_bindings (2);
8815
8816	target_temp_slot_level = temp_slot_level;
8817
8818	expand_decl_cleanup (NULL_TREE, cleanup);
8819	op0 = expand_expr (try_block, target, tmode, modifier);
8820
8821	preserve_temp_slots (op0);
8822	expand_end_bindings (NULL_TREE, 0, 0);
8823	emit_jump (done_label);
8824	emit_label (finally_label);
8825	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8826	emit_indirect_jump (return_link);
8827	emit_label (done_label);
8828	return op0;
8829      }
8830
8831    case GOTO_SUBROUTINE_EXPR:
8832      {
8833	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8834	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8835	rtx return_address = gen_label_rtx ();
8836	emit_move_insn (return_link,
8837			gen_rtx_LABEL_REF (Pmode, return_address));
8838	emit_jump (subr);
8839	emit_label (return_address);
8840	return const0_rtx;
8841      }
8842
8843    case VA_ARG_EXPR:
8844      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8845
8846    case EXC_PTR_EXPR:
8847      return get_exception_pointer (cfun);
8848
8849    case FDESC_EXPR:
8850      /* Function descriptors are not valid except for as
8851	 initialization constants, and should not be expanded.  */
8852      abort ();
8853
8854    default:
8855      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8856    }
8857
8858  /* Here to do an ordinary binary operator, generating an instruction
8859     from the optab already placed in `this_optab'.  */
8860 binop:
8861  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8862    subtarget = 0;
8863  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8864  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8865 binop2:
8866  temp = expand_binop (mode, this_optab, op0, op1, target,
8867		       unsignedp, OPTAB_LIB_WIDEN);
8868  if (temp == 0)
8869    abort ();
8870  return temp;
8871}
8872
8873/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8874   when applied to the address of EXP produces an address known to be
8875   aligned more than BIGGEST_ALIGNMENT.  */
8876
8877static int
8878is_aligning_offset (offset, exp)
8879     tree offset;
8880     tree exp;
8881{
8882  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
8883  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8884	 || TREE_CODE (offset) == NOP_EXPR
8885	 || TREE_CODE (offset) == CONVERT_EXPR
8886	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
8887    offset = TREE_OPERAND (offset, 0);
8888
8889  /* We must now have a BIT_AND_EXPR with a constant that is one less than
8890     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8891  if (TREE_CODE (offset) != BIT_AND_EXPR
8892      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8893      || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
8894      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8895    return 0;
8896
8897  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8898     It must be NEGATE_EXPR.  Then strip any more conversions.  */
8899  offset = TREE_OPERAND (offset, 0);
8900  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8901	 || TREE_CODE (offset) == NOP_EXPR
8902	 || TREE_CODE (offset) == CONVERT_EXPR)
8903    offset = TREE_OPERAND (offset, 0);
8904
8905  if (TREE_CODE (offset) != NEGATE_EXPR)
8906    return 0;
8907
8908  offset = TREE_OPERAND (offset, 0);
8909  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8910	 || TREE_CODE (offset) == NOP_EXPR
8911	 || TREE_CODE (offset) == CONVERT_EXPR)
8912    offset = TREE_OPERAND (offset, 0);
8913
8914  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8915     whose type is the same as EXP.  */
8916  return (TREE_CODE (offset) == ADDR_EXPR
8917	  && (TREE_OPERAND (offset, 0) == exp
8918	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
8919		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
8920		      == TREE_TYPE (exp)))));
8921}
8922
8923/* Return the tree node if a ARG corresponds to a string constant or zero
8924   if it doesn't.  If we return non-zero, set *PTR_OFFSET to the offset
8925   in bytes within the string that ARG is accessing.  The type of the
8926   offset will be `sizetype'.  */
8927
8928tree
8929string_constant (arg, ptr_offset)
8930     tree arg;
8931     tree *ptr_offset;
8932{
8933  STRIP_NOPS (arg);
8934
8935  if (TREE_CODE (arg) == ADDR_EXPR
8936      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8937    {
8938      *ptr_offset = size_zero_node;
8939      return TREE_OPERAND (arg, 0);
8940    }
8941  else if (TREE_CODE (arg) == PLUS_EXPR)
8942    {
8943      tree arg0 = TREE_OPERAND (arg, 0);
8944      tree arg1 = TREE_OPERAND (arg, 1);
8945
8946      STRIP_NOPS (arg0);
8947      STRIP_NOPS (arg1);
8948
8949      if (TREE_CODE (arg0) == ADDR_EXPR
8950	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8951	{
8952	  *ptr_offset = convert (sizetype, arg1);
8953	  return TREE_OPERAND (arg0, 0);
8954	}
8955      else if (TREE_CODE (arg1) == ADDR_EXPR
8956	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8957	{
8958	  *ptr_offset = convert (sizetype, arg0);
8959	  return TREE_OPERAND (arg1, 0);
8960	}
8961    }
8962
8963  return 0;
8964}
8965
8966/* Expand code for a post- or pre- increment or decrement
8967   and return the RTX for the result.
8968   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
8969
8970static rtx
8971expand_increment (exp, post, ignore)
8972     tree exp;
8973     int post, ignore;
8974{
8975  rtx op0, op1;
8976  rtx temp, value;
8977  tree incremented = TREE_OPERAND (exp, 0);
8978  optab this_optab = add_optab;
8979  int icode;
8980  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8981  int op0_is_copy = 0;
8982  int single_insn = 0;
8983  /* 1 means we can't store into OP0 directly,
8984     because it is a subreg narrower than a word,
8985     and we don't dare clobber the rest of the word.  */
8986  int bad_subreg = 0;
8987
8988  /* Stabilize any component ref that might need to be
8989     evaluated more than once below.  */
8990  if (!post
8991      || TREE_CODE (incremented) == BIT_FIELD_REF
8992      || (TREE_CODE (incremented) == COMPONENT_REF
8993	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8994	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8995    incremented = stabilize_reference (incremented);
8996  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
8997     ones into save exprs so that they don't accidentally get evaluated
8998     more than once by the code below.  */
8999  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9000      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9001    incremented = save_expr (incremented);
9002
9003  /* Compute the operands as RTX.
9004     Note whether OP0 is the actual lvalue or a copy of it:
9005     I believe it is a copy iff it is a register or subreg
9006     and insns were generated in computing it.  */
9007
9008  temp = get_last_insn ();
9009  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9010
9011  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9012     in place but instead must do sign- or zero-extension during assignment,
9013     so we copy it into a new register and let the code below use it as
9014     a copy.
9015
9016     Note that we can safely modify this SUBREG since it is know not to be
9017     shared (it was made by the expand_expr call above).  */
9018
9019  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9020    {
9021      if (post)
9022	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9023      else
9024	bad_subreg = 1;
9025    }
9026  else if (GET_CODE (op0) == SUBREG
9027	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9028    {
9029      /* We cannot increment this SUBREG in place.  If we are
9030	 post-incrementing, get a copy of the old value.  Otherwise,
9031	 just mark that we cannot increment in place.  */
9032      if (post)
9033	op0 = copy_to_reg (op0);
9034      else
9035	bad_subreg = 1;
9036    }
9037
9038  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9039		 && temp != get_last_insn ());
9040  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9041
9042  /* Decide whether incrementing or decrementing.  */
9043  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9044      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9045    this_optab = sub_optab;
9046
9047  /* Convert decrement by a constant into a negative increment.  */
9048  if (this_optab == sub_optab
9049      && GET_CODE (op1) == CONST_INT)
9050    {
9051      op1 = GEN_INT (-INTVAL (op1));
9052      this_optab = add_optab;
9053    }
9054
9055  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9056    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9057
9058  /* For a preincrement, see if we can do this with a single instruction.  */
9059  if (!post)
9060    {
9061      icode = (int) this_optab->handlers[(int) mode].insn_code;
9062      if (icode != (int) CODE_FOR_nothing
9063	  /* Make sure that OP0 is valid for operands 0 and 1
9064	     of the insn we want to queue.  */
9065	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9066	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9067	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9068	single_insn = 1;
9069    }
9070
9071  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9072     then we cannot just increment OP0.  We must therefore contrive to
9073     increment the original value.  Then, for postincrement, we can return
9074     OP0 since it is a copy of the old value.  For preincrement, expand here
9075     unless we can do it with a single insn.
9076
9077     Likewise if storing directly into OP0 would clobber high bits
9078     we need to preserve (bad_subreg).  */
9079  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9080    {
9081      /* This is the easiest way to increment the value wherever it is.
9082	 Problems with multiple evaluation of INCREMENTED are prevented
9083	 because either (1) it is a component_ref or preincrement,
9084	 in which case it was stabilized above, or (2) it is an array_ref
9085	 with constant index in an array in a register, which is
9086	 safe to reevaluate.  */
9087      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9088			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9089			    ? MINUS_EXPR : PLUS_EXPR),
9090			   TREE_TYPE (exp),
9091			   incremented,
9092			   TREE_OPERAND (exp, 1));
9093
9094      while (TREE_CODE (incremented) == NOP_EXPR
9095	     || TREE_CODE (incremented) == CONVERT_EXPR)
9096	{
9097	  newexp = convert (TREE_TYPE (incremented), newexp);
9098	  incremented = TREE_OPERAND (incremented, 0);
9099	}
9100
9101      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9102      return post ? op0 : temp;
9103    }
9104
9105  if (post)
9106    {
9107      /* We have a true reference to the value in OP0.
9108	 If there is an insn to add or subtract in this mode, queue it.
9109	 Queueing the increment insn avoids the register shuffling
9110	 that often results if we must increment now and first save
9111	 the old value for subsequent use.  */
9112
9113#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9114      op0 = stabilize (op0);
9115#endif
9116
9117      icode = (int) this_optab->handlers[(int) mode].insn_code;
9118      if (icode != (int) CODE_FOR_nothing
9119	  /* Make sure that OP0 is valid for operands 0 and 1
9120	     of the insn we want to queue.  */
9121	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9122	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9123	{
9124	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9125	    op1 = force_reg (mode, op1);
9126
9127	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9128	}
9129      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9130	{
9131	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9132		      ? force_reg (Pmode, XEXP (op0, 0))
9133		      : copy_to_reg (XEXP (op0, 0)));
9134	  rtx temp, result;
9135
9136	  op0 = replace_equiv_address (op0, addr);
9137	  temp = force_reg (GET_MODE (op0), op0);
9138	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9139	    op1 = force_reg (mode, op1);
9140
9141	  /* The increment queue is LIFO, thus we have to `queue'
9142	     the instructions in reverse order.  */
9143	  enqueue_insn (op0, gen_move_insn (op0, temp));
9144	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9145	  return result;
9146	}
9147    }
9148
9149  /* Preincrement, or we can't increment with one simple insn.  */
9150  if (post)
9151    /* Save a copy of the value before inc or dec, to return it later.  */
9152    temp = value = copy_to_reg (op0);
9153  else
9154    /* Arrange to return the incremented value.  */
9155    /* Copy the rtx because expand_binop will protect from the queue,
9156       and the results of that would be invalid for us to return
9157       if our caller does emit_queue before using our result.  */
9158    temp = copy_rtx (value = op0);
9159
9160  /* Increment however we can.  */
9161  op1 = expand_binop (mode, this_optab, value, op1, op0,
9162		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9163
9164  /* Make sure the value is stored into OP0.  */
9165  if (op1 != op0)
9166    emit_move_insn (op0, op1);
9167
9168  return temp;
9169}
9170
9171/* At the start of a function, record that we have no previously-pushed
9172   arguments waiting to be popped.  */
9173
9174void
9175init_pending_stack_adjust ()
9176{
9177  pending_stack_adjust = 0;
9178}
9179
9180/* When exiting from function, if safe, clear out any pending stack adjust
9181   so the adjustment won't get done.
9182
9183   Note, if the current function calls alloca, then it must have a
9184   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9185
9186void
9187clear_pending_stack_adjust ()
9188{
9189#ifdef EXIT_IGNORE_STACK
9190  if (optimize > 0
9191      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9192      && EXIT_IGNORE_STACK
9193      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9194      && ! flag_inline_functions)
9195    {
9196      stack_pointer_delta -= pending_stack_adjust,
9197      pending_stack_adjust = 0;
9198    }
9199#endif
9200}
9201
9202/* Pop any previously-pushed arguments that have not been popped yet.  */
9203
9204void
9205do_pending_stack_adjust ()
9206{
9207  if (inhibit_defer_pop == 0)
9208    {
9209      if (pending_stack_adjust != 0)
9210	adjust_stack (GEN_INT (pending_stack_adjust));
9211      pending_stack_adjust = 0;
9212    }
9213}
9214
9215/* Expand conditional expressions.  */
9216
9217/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9218   LABEL is an rtx of code CODE_LABEL, in this function and all the
9219   functions here.  */
9220
9221void
9222jumpifnot (exp, label)
9223     tree exp;
9224     rtx label;
9225{
9226  do_jump (exp, label, NULL_RTX);
9227}
9228
9229/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9230
9231void
9232jumpif (exp, label)
9233     tree exp;
9234     rtx label;
9235{
9236  do_jump (exp, NULL_RTX, label);
9237}
9238
9239/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9240   the result is zero, or IF_TRUE_LABEL if the result is one.
9241   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9242   meaning fall through in that case.
9243
9244   do_jump always does any pending stack adjust except when it does not
9245   actually perform a jump.  An example where there is no jump
9246   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9247
9248   This function is responsible for optimizing cases such as
9249   &&, || and comparison operators in EXP.  */
9250
9251void
9252do_jump (exp, if_false_label, if_true_label)
9253     tree exp;
9254     rtx if_false_label, if_true_label;
9255{
9256  enum tree_code code = TREE_CODE (exp);
9257  /* Some cases need to create a label to jump to
9258     in order to properly fall through.
9259     These cases set DROP_THROUGH_LABEL nonzero.  */
9260  rtx drop_through_label = 0;
9261  rtx temp;
9262  int i;
9263  tree type;
9264  enum machine_mode mode;
9265
9266#ifdef MAX_INTEGER_COMPUTATION_MODE
9267  check_max_integer_computation_mode (exp);
9268#endif
9269
9270  emit_queue ();
9271
9272  switch (code)
9273    {
9274    case ERROR_MARK:
9275      break;
9276
9277    case INTEGER_CST:
9278      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9279      if (temp)
9280	emit_jump (temp);
9281      break;
9282
9283#if 0
9284      /* This is not true with #pragma weak  */
9285    case ADDR_EXPR:
9286      /* The address of something can never be zero.  */
9287      if (if_true_label)
9288	emit_jump (if_true_label);
9289      break;
9290#endif
9291
9292    case NOP_EXPR:
9293      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9294	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9295	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9296	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9297	goto normal;
9298    case CONVERT_EXPR:
9299      /* If we are narrowing the operand, we have to do the compare in the
9300	 narrower mode.  */
9301      if ((TYPE_PRECISION (TREE_TYPE (exp))
9302	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9303	goto normal;
9304    case NON_LVALUE_EXPR:
9305    case REFERENCE_EXPR:
9306    case ABS_EXPR:
9307    case NEGATE_EXPR:
9308    case LROTATE_EXPR:
9309    case RROTATE_EXPR:
9310      /* These cannot change zero->non-zero or vice versa.  */
9311      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9312      break;
9313
9314    case WITH_RECORD_EXPR:
9315      /* Put the object on the placeholder list, recurse through our first
9316	 operand, and pop the list.  */
9317      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9318				    placeholder_list);
9319      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9320      placeholder_list = TREE_CHAIN (placeholder_list);
9321      break;
9322
9323#if 0
9324      /* This is never less insns than evaluating the PLUS_EXPR followed by
9325	 a test and can be longer if the test is eliminated.  */
9326    case PLUS_EXPR:
9327      /* Reduce to minus.  */
9328      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9329		   TREE_OPERAND (exp, 0),
9330		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9331				 TREE_OPERAND (exp, 1))));
9332      /* Process as MINUS.  */
9333#endif
9334
9335    case MINUS_EXPR:
9336      /* Non-zero iff operands of minus differ.  */
9337      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9338				  TREE_OPERAND (exp, 0),
9339				  TREE_OPERAND (exp, 1)),
9340			   NE, NE, if_false_label, if_true_label);
9341      break;
9342
9343    case BIT_AND_EXPR:
9344      /* If we are AND'ing with a small constant, do this comparison in the
9345	 smallest type that fits.  If the machine doesn't have comparisons
9346	 that small, it will be converted back to the wider comparison.
9347	 This helps if we are testing the sign bit of a narrower object.
9348	 combine can't do this for us because it can't know whether a
9349	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9350
9351      if (! SLOW_BYTE_ACCESS
9352	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9353	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9354	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9355	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9356	  && (type = type_for_mode (mode, 1)) != 0
9357	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9358	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9359	      != CODE_FOR_nothing))
9360	{
9361	  do_jump (convert (type, exp), if_false_label, if_true_label);
9362	  break;
9363	}
9364      goto normal;
9365
9366    case TRUTH_NOT_EXPR:
9367      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9368      break;
9369
9370    case TRUTH_ANDIF_EXPR:
9371      if (if_false_label == 0)
9372	if_false_label = drop_through_label = gen_label_rtx ();
9373      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9374      start_cleanup_deferral ();
9375      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9376      end_cleanup_deferral ();
9377      break;
9378
9379    case TRUTH_ORIF_EXPR:
9380      if (if_true_label == 0)
9381	if_true_label = drop_through_label = gen_label_rtx ();
9382      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9383      start_cleanup_deferral ();
9384      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9385      end_cleanup_deferral ();
9386      break;
9387
9388    case COMPOUND_EXPR:
9389      push_temp_slots ();
9390      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9391      preserve_temp_slots (NULL_RTX);
9392      free_temp_slots ();
9393      pop_temp_slots ();
9394      emit_queue ();
9395      do_pending_stack_adjust ();
9396      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9397      break;
9398
9399    case COMPONENT_REF:
9400    case BIT_FIELD_REF:
9401    case ARRAY_REF:
9402    case ARRAY_RANGE_REF:
9403      {
9404	HOST_WIDE_INT bitsize, bitpos;
9405	int unsignedp;
9406	enum machine_mode mode;
9407	tree type;
9408	tree offset;
9409	int volatilep = 0;
9410
9411	/* Get description of this reference.  We don't actually care
9412	   about the underlying object here.  */
9413	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9414			     &unsignedp, &volatilep);
9415
9416	type = type_for_size (bitsize, unsignedp);
9417	if (! SLOW_BYTE_ACCESS
9418	    && type != 0 && bitsize >= 0
9419	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9420	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9421		!= CODE_FOR_nothing))
9422	  {
9423	    do_jump (convert (type, exp), if_false_label, if_true_label);
9424	    break;
9425	  }
9426	goto normal;
9427      }
9428
9429    case COND_EXPR:
9430      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9431      if (integer_onep (TREE_OPERAND (exp, 1))
9432	  && integer_zerop (TREE_OPERAND (exp, 2)))
9433	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9434
9435      else if (integer_zerop (TREE_OPERAND (exp, 1))
9436	       && integer_onep (TREE_OPERAND (exp, 2)))
9437	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9438
9439      else
9440	{
9441	  rtx label1 = gen_label_rtx ();
9442	  drop_through_label = gen_label_rtx ();
9443
9444	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9445
9446	  start_cleanup_deferral ();
9447	  /* Now the THEN-expression.  */
9448	  do_jump (TREE_OPERAND (exp, 1),
9449		   if_false_label ? if_false_label : drop_through_label,
9450		   if_true_label ? if_true_label : drop_through_label);
9451	  /* In case the do_jump just above never jumps.  */
9452	  do_pending_stack_adjust ();
9453	  emit_label (label1);
9454
9455	  /* Now the ELSE-expression.  */
9456	  do_jump (TREE_OPERAND (exp, 2),
9457		   if_false_label ? if_false_label : drop_through_label,
9458		   if_true_label ? if_true_label : drop_through_label);
9459	  end_cleanup_deferral ();
9460	}
9461      break;
9462
9463    case EQ_EXPR:
9464      {
9465	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9466
9467	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9468	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9469	  {
9470	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9471	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9472	    do_jump
9473	      (fold
9474	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9475		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9476				    fold (build1 (REALPART_EXPR,
9477						  TREE_TYPE (inner_type),
9478						  exp0)),
9479				    fold (build1 (REALPART_EXPR,
9480						  TREE_TYPE (inner_type),
9481						  exp1)))),
9482		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9483				    fold (build1 (IMAGPART_EXPR,
9484						  TREE_TYPE (inner_type),
9485						  exp0)),
9486				    fold (build1 (IMAGPART_EXPR,
9487						  TREE_TYPE (inner_type),
9488						  exp1)))))),
9489	       if_false_label, if_true_label);
9490	  }
9491
9492	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9493	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9494
9495	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9496		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9497	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9498	else
9499	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9500	break;
9501      }
9502
9503    case NE_EXPR:
9504      {
9505	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9506
9507	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9508	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9509	  {
9510	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9511	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9512	    do_jump
9513	      (fold
9514	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9515		       fold (build (NE_EXPR, TREE_TYPE (exp),
9516				    fold (build1 (REALPART_EXPR,
9517						  TREE_TYPE (inner_type),
9518						  exp0)),
9519				    fold (build1 (REALPART_EXPR,
9520						  TREE_TYPE (inner_type),
9521						  exp1)))),
9522		       fold (build (NE_EXPR, TREE_TYPE (exp),
9523				    fold (build1 (IMAGPART_EXPR,
9524						  TREE_TYPE (inner_type),
9525						  exp0)),
9526				    fold (build1 (IMAGPART_EXPR,
9527						  TREE_TYPE (inner_type),
9528						  exp1)))))),
9529	       if_false_label, if_true_label);
9530	  }
9531
9532	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9533	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9534
9535	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9536		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9537	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9538	else
9539	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9540	break;
9541      }
9542
9543    case LT_EXPR:
9544      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9545      if (GET_MODE_CLASS (mode) == MODE_INT
9546	  && ! can_compare_p (LT, mode, ccp_jump))
9547	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9548      else
9549	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9550      break;
9551
9552    case LE_EXPR:
9553      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9554      if (GET_MODE_CLASS (mode) == MODE_INT
9555	  && ! can_compare_p (LE, mode, ccp_jump))
9556	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9557      else
9558	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9559      break;
9560
9561    case GT_EXPR:
9562      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9563      if (GET_MODE_CLASS (mode) == MODE_INT
9564	  && ! can_compare_p (GT, mode, ccp_jump))
9565	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9566      else
9567	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9568      break;
9569
9570    case GE_EXPR:
9571      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9572      if (GET_MODE_CLASS (mode) == MODE_INT
9573	  && ! can_compare_p (GE, mode, ccp_jump))
9574	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9575      else
9576	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9577      break;
9578
9579    case UNORDERED_EXPR:
9580    case ORDERED_EXPR:
9581      {
9582	enum rtx_code cmp, rcmp;
9583	int do_rev;
9584
9585	if (code == UNORDERED_EXPR)
9586	  cmp = UNORDERED, rcmp = ORDERED;
9587	else
9588	  cmp = ORDERED, rcmp = UNORDERED;
9589	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9590
9591	do_rev = 0;
9592	if (! can_compare_p (cmp, mode, ccp_jump)
9593	    && (can_compare_p (rcmp, mode, ccp_jump)
9594		/* If the target doesn't provide either UNORDERED or ORDERED
9595		   comparisons, canonicalize on UNORDERED for the library.  */
9596		|| rcmp == UNORDERED))
9597	  do_rev = 1;
9598
9599        if (! do_rev)
9600	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9601	else
9602	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9603      }
9604      break;
9605
9606    {
9607      enum rtx_code rcode1;
9608      enum tree_code tcode2;
9609
9610      case UNLT_EXPR:
9611	rcode1 = UNLT;
9612	tcode2 = LT_EXPR;
9613	goto unordered_bcc;
9614      case UNLE_EXPR:
9615	rcode1 = UNLE;
9616	tcode2 = LE_EXPR;
9617	goto unordered_bcc;
9618      case UNGT_EXPR:
9619	rcode1 = UNGT;
9620	tcode2 = GT_EXPR;
9621	goto unordered_bcc;
9622      case UNGE_EXPR:
9623	rcode1 = UNGE;
9624	tcode2 = GE_EXPR;
9625	goto unordered_bcc;
9626      case UNEQ_EXPR:
9627	rcode1 = UNEQ;
9628	tcode2 = EQ_EXPR;
9629	goto unordered_bcc;
9630
9631      unordered_bcc:
9632        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9633	if (can_compare_p (rcode1, mode, ccp_jump))
9634	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9635			       if_true_label);
9636	else
9637	  {
9638	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
9639	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
9640	    tree cmp0, cmp1;
9641
9642	    /* If the target doesn't support combined unordered
9643	       compares, decompose into UNORDERED + comparison.  */
9644	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9645	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9646	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9647	    do_jump (exp, if_false_label, if_true_label);
9648	  }
9649      }
9650      break;
9651
9652      /* Special case:
9653		__builtin_expect (<test>, 0)	and
9654		__builtin_expect (<test>, 1)
9655
9656	 We need to do this here, so that <test> is not converted to a SCC
9657	 operation on machines that use condition code registers and COMPARE
9658	 like the PowerPC, and then the jump is done based on whether the SCC
9659	 operation produced a 1 or 0.  */
9660    case CALL_EXPR:
9661      /* Check for a built-in function.  */
9662      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9663	{
9664	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9665	  tree arglist = TREE_OPERAND (exp, 1);
9666
9667	  if (TREE_CODE (fndecl) == FUNCTION_DECL
9668	      && DECL_BUILT_IN (fndecl)
9669	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9670	      && arglist != NULL_TREE
9671	      && TREE_CHAIN (arglist) != NULL_TREE)
9672	    {
9673	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9674						    if_true_label);
9675
9676	      if (seq != NULL_RTX)
9677		{
9678		  emit_insn (seq);
9679		  return;
9680		}
9681	    }
9682	}
9683      /* fall through and generate the normal code.  */
9684
9685    default:
9686    normal:
9687      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9688#if 0
9689      /* This is not needed any more and causes poor code since it causes
9690	 comparisons and tests from non-SI objects to have different code
9691	 sequences.  */
9692      /* Copy to register to avoid generating bad insns by cse
9693	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
9694      if (!cse_not_expected && GET_CODE (temp) == MEM)
9695	temp = copy_to_reg (temp);
9696#endif
9697      do_pending_stack_adjust ();
9698      /* Do any postincrements in the expression that was tested.  */
9699      emit_queue ();
9700
9701      if (GET_CODE (temp) == CONST_INT
9702	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9703	  || GET_CODE (temp) == LABEL_REF)
9704	{
9705	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9706	  if (target)
9707	    emit_jump (target);
9708	}
9709      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9710	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9711	/* Note swapping the labels gives us not-equal.  */
9712	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9713      else if (GET_MODE (temp) != VOIDmode)
9714	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9715				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9716				 GET_MODE (temp), NULL_RTX,
9717				 if_false_label, if_true_label);
9718      else
9719	abort ();
9720    }
9721
9722  if (drop_through_label)
9723    {
9724      /* If do_jump produces code that might be jumped around,
9725	 do any stack adjusts from that code, before the place
9726	 where control merges in.  */
9727      do_pending_stack_adjust ();
9728      emit_label (drop_through_label);
9729    }
9730}
9731
9732/* Given a comparison expression EXP for values too wide to be compared
9733   with one insn, test the comparison and jump to the appropriate label.
9734   The code of EXP is ignored; we always test GT if SWAP is 0,
9735   and LT if SWAP is 1.  */
9736
9737static void
9738do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9739     tree exp;
9740     int swap;
9741     rtx if_false_label, if_true_label;
9742{
9743  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9744  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9745  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9746  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9747
9748  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9749}
9750
9751/* Compare OP0 with OP1, word at a time, in mode MODE.
9752   UNSIGNEDP says to do unsigned comparison.
9753   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
9754
9755void
9756do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9757     enum machine_mode mode;
9758     int unsignedp;
9759     rtx op0, op1;
9760     rtx if_false_label, if_true_label;
9761{
9762  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9763  rtx drop_through_label = 0;
9764  int i;
9765
9766  if (! if_true_label || ! if_false_label)
9767    drop_through_label = gen_label_rtx ();
9768  if (! if_true_label)
9769    if_true_label = drop_through_label;
9770  if (! if_false_label)
9771    if_false_label = drop_through_label;
9772
9773  /* Compare a word at a time, high order first.  */
9774  for (i = 0; i < nwords; i++)
9775    {
9776      rtx op0_word, op1_word;
9777
9778      if (WORDS_BIG_ENDIAN)
9779	{
9780	  op0_word = operand_subword_force (op0, i, mode);
9781	  op1_word = operand_subword_force (op1, i, mode);
9782	}
9783      else
9784	{
9785	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9786	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9787	}
9788
9789      /* All but high-order word must be compared as unsigned.  */
9790      do_compare_rtx_and_jump (op0_word, op1_word, GT,
9791			       (unsignedp || i > 0), word_mode, NULL_RTX,
9792			       NULL_RTX, if_true_label);
9793
9794      /* Consider lower words only if these are equal.  */
9795      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9796			       NULL_RTX, NULL_RTX, if_false_label);
9797    }
9798
9799  if (if_false_label)
9800    emit_jump (if_false_label);
9801  if (drop_through_label)
9802    emit_label (drop_through_label);
9803}
9804
9805/* Given an EQ_EXPR expression EXP for values too wide to be compared
9806   with one insn, test the comparison and jump to the appropriate label.  */
9807
9808static void
9809do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9810     tree exp;
9811     rtx if_false_label, if_true_label;
9812{
9813  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9814  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9815  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9816  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9817  int i;
9818  rtx drop_through_label = 0;
9819
9820  if (! if_false_label)
9821    drop_through_label = if_false_label = gen_label_rtx ();
9822
9823  for (i = 0; i < nwords; i++)
9824    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9825			     operand_subword_force (op1, i, mode),
9826			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9827			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
9828
9829  if (if_true_label)
9830    emit_jump (if_true_label);
9831  if (drop_through_label)
9832    emit_label (drop_through_label);
9833}
9834
9835/* Jump according to whether OP0 is 0.
9836   We assume that OP0 has an integer mode that is too wide
9837   for the available compare insns.  */
9838
9839void
9840do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9841     rtx op0;
9842     rtx if_false_label, if_true_label;
9843{
9844  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9845  rtx part;
9846  int i;
9847  rtx drop_through_label = 0;
9848
9849  /* The fastest way of doing this comparison on almost any machine is to
9850     "or" all the words and compare the result.  If all have to be loaded
9851     from memory and this is a very wide item, it's possible this may
9852     be slower, but that's highly unlikely.  */
9853
9854  part = gen_reg_rtx (word_mode);
9855  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9856  for (i = 1; i < nwords && part != 0; i++)
9857    part = expand_binop (word_mode, ior_optab, part,
9858			 operand_subword_force (op0, i, GET_MODE (op0)),
9859			 part, 1, OPTAB_WIDEN);
9860
9861  if (part != 0)
9862    {
9863      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9864			       NULL_RTX, if_false_label, if_true_label);
9865
9866      return;
9867    }
9868
9869  /* If we couldn't do the "or" simply, do this with a series of compares.  */
9870  if (! if_false_label)
9871    drop_through_label = if_false_label = gen_label_rtx ();
9872
9873  for (i = 0; i < nwords; i++)
9874    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9875			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
9876			     if_false_label, NULL_RTX);
9877
9878  if (if_true_label)
9879    emit_jump (if_true_label);
9880
9881  if (drop_through_label)
9882    emit_label (drop_through_label);
9883}
9884
9885/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9886   (including code to compute the values to be compared)
9887   and set (CC0) according to the result.
9888   The decision as to signed or unsigned comparison must be made by the caller.
9889
9890   We force a stack adjustment unless there are currently
9891   things pushed on the stack that aren't yet used.
9892
9893   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9894   compared.  */
9895
9896rtx
9897compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9898     rtx op0, op1;
9899     enum rtx_code code;
9900     int unsignedp;
9901     enum machine_mode mode;
9902     rtx size;
9903{
9904  rtx tem;
9905
9906  /* If one operand is constant, make it the second one.  Only do this
9907     if the other operand is not constant as well.  */
9908
9909  if (swap_commutative_operands_p (op0, op1))
9910    {
9911      tem = op0;
9912      op0 = op1;
9913      op1 = tem;
9914      code = swap_condition (code);
9915    }
9916
9917  if (flag_force_mem)
9918    {
9919      op0 = force_not_mem (op0);
9920      op1 = force_not_mem (op1);
9921    }
9922
9923  do_pending_stack_adjust ();
9924
9925  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9926      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9927    return tem;
9928
9929#if 0
9930  /* There's no need to do this now that combine.c can eliminate lots of
9931     sign extensions.  This can be less efficient in certain cases on other
9932     machines.  */
9933
9934  /* If this is a signed equality comparison, we can do it as an
9935     unsigned comparison since zero-extension is cheaper than sign
9936     extension and comparisons with zero are done as unsigned.  This is
9937     the case even on machines that can do fast sign extension, since
9938     zero-extension is easier to combine with other operations than
9939     sign-extension is.  If we are comparing against a constant, we must
9940     convert it to what it would look like unsigned.  */
9941  if ((code == EQ || code == NE) && ! unsignedp
9942      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9943    {
9944      if (GET_CODE (op1) == CONST_INT
9945	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9946	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9947      unsignedp = 1;
9948    }
9949#endif
9950
9951  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9952
9953  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9954}
9955
9956/* Like do_compare_and_jump but expects the values to compare as two rtx's.
9957   The decision as to signed or unsigned comparison must be made by the caller.
9958
9959   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9960   compared.  */
9961
9962void
9963do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9964			 if_false_label, if_true_label)
9965     rtx op0, op1;
9966     enum rtx_code code;
9967     int unsignedp;
9968     enum machine_mode mode;
9969     rtx size;
9970     rtx if_false_label, if_true_label;
9971{
9972  rtx tem;
9973  int dummy_true_label = 0;
9974
9975  /* Reverse the comparison if that is safe and we want to jump if it is
9976     false.  */
9977  if (! if_true_label && ! FLOAT_MODE_P (mode))
9978    {
9979      if_true_label = if_false_label;
9980      if_false_label = 0;
9981      code = reverse_condition (code);
9982    }
9983
9984  /* If one operand is constant, make it the second one.  Only do this
9985     if the other operand is not constant as well.  */
9986
9987  if (swap_commutative_operands_p (op0, op1))
9988    {
9989      tem = op0;
9990      op0 = op1;
9991      op1 = tem;
9992      code = swap_condition (code);
9993    }
9994
9995  if (flag_force_mem)
9996    {
9997      op0 = force_not_mem (op0);
9998      op1 = force_not_mem (op1);
9999    }
10000
10001  do_pending_stack_adjust ();
10002
10003  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10004      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10005    {
10006      if (tem == const_true_rtx)
10007	{
10008	  if (if_true_label)
10009	    emit_jump (if_true_label);
10010	}
10011      else
10012	{
10013	  if (if_false_label)
10014	    emit_jump (if_false_label);
10015	}
10016      return;
10017    }
10018
10019#if 0
10020  /* There's no need to do this now that combine.c can eliminate lots of
10021     sign extensions.  This can be less efficient in certain cases on other
10022     machines.  */
10023
10024  /* If this is a signed equality comparison, we can do it as an
10025     unsigned comparison since zero-extension is cheaper than sign
10026     extension and comparisons with zero are done as unsigned.  This is
10027     the case even on machines that can do fast sign extension, since
10028     zero-extension is easier to combine with other operations than
10029     sign-extension is.  If we are comparing against a constant, we must
10030     convert it to what it would look like unsigned.  */
10031  if ((code == EQ || code == NE) && ! unsignedp
10032      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10033    {
10034      if (GET_CODE (op1) == CONST_INT
10035	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10036	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10037      unsignedp = 1;
10038    }
10039#endif
10040
10041  if (! if_true_label)
10042    {
10043      dummy_true_label = 1;
10044      if_true_label = gen_label_rtx ();
10045    }
10046
10047  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10048			   if_true_label);
10049
10050  if (if_false_label)
10051    emit_jump (if_false_label);
10052  if (dummy_true_label)
10053    emit_label (if_true_label);
10054}
10055
10056/* Generate code for a comparison expression EXP (including code to compute
10057   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10058   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
10059   generated code will drop through.
10060   SIGNED_CODE should be the rtx operation for this comparison for
10061   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10062
10063   We force a stack adjustment unless there are currently
10064   things pushed on the stack that aren't yet used.  */
10065
10066static void
10067do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10068		     if_true_label)
10069     tree exp;
10070     enum rtx_code signed_code, unsigned_code;
10071     rtx if_false_label, if_true_label;
10072{
10073  rtx op0, op1;
10074  tree type;
10075  enum machine_mode mode;
10076  int unsignedp;
10077  enum rtx_code code;
10078
10079  /* Don't crash if the comparison was erroneous.  */
10080  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10081  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10082    return;
10083
10084  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10085  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10086    return;
10087
10088  type = TREE_TYPE (TREE_OPERAND (exp, 0));
10089  mode = TYPE_MODE (type);
10090  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10091      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10092	  || (GET_MODE_BITSIZE (mode)
10093	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10094								      1)))))))
10095    {
10096      /* op0 might have been replaced by promoted constant, in which
10097	 case the type of second argument should be used.  */
10098      type = TREE_TYPE (TREE_OPERAND (exp, 1));
10099      mode = TYPE_MODE (type);
10100    }
10101  unsignedp = TREE_UNSIGNED (type);
10102  code = unsignedp ? unsigned_code : signed_code;
10103
10104#ifdef HAVE_canonicalize_funcptr_for_compare
10105  /* If function pointers need to be "canonicalized" before they can
10106     be reliably compared, then canonicalize them.  */
10107  if (HAVE_canonicalize_funcptr_for_compare
10108      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10109      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10110	  == FUNCTION_TYPE))
10111    {
10112      rtx new_op0 = gen_reg_rtx (mode);
10113
10114      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10115      op0 = new_op0;
10116    }
10117
10118  if (HAVE_canonicalize_funcptr_for_compare
10119      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10120      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10121	  == FUNCTION_TYPE))
10122    {
10123      rtx new_op1 = gen_reg_rtx (mode);
10124
10125      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10126      op1 = new_op1;
10127    }
10128#endif
10129
10130  /* Do any postincrements in the expression that was tested.  */
10131  emit_queue ();
10132
10133  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10134			   ((mode == BLKmode)
10135			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10136			   if_false_label, if_true_label);
10137}
10138
10139/* Generate code to calculate EXP using a store-flag instruction
10140   and return an rtx for the result.  EXP is either a comparison
10141   or a TRUTH_NOT_EXPR whose operand is a comparison.
10142
10143   If TARGET is nonzero, store the result there if convenient.
10144
10145   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10146   cheap.
10147
10148   Return zero if there is no suitable set-flag instruction
10149   available on this machine.
10150
10151   Once expand_expr has been called on the arguments of the comparison,
10152   we are committed to doing the store flag, since it is not safe to
10153   re-evaluate the expression.  We emit the store-flag insn by calling
10154   emit_store_flag, but only expand the arguments if we have a reason
10155   to believe that emit_store_flag will be successful.  If we think that
10156   it will, but it isn't, we have to simulate the store-flag with a
10157   set/jump/set sequence.  */
10158
10159static rtx
10160do_store_flag (exp, target, mode, only_cheap)
10161     tree exp;
10162     rtx target;
10163     enum machine_mode mode;
10164     int only_cheap;
10165{
10166  enum rtx_code code;
10167  tree arg0, arg1, type;
10168  tree tem;
10169  enum machine_mode operand_mode;
10170  int invert = 0;
10171  int unsignedp;
10172  rtx op0, op1;
10173  enum insn_code icode;
10174  rtx subtarget = target;
10175  rtx result, label;
10176
10177  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10178     result at the end.  We can't simply invert the test since it would
10179     have already been inverted if it were valid.  This case occurs for
10180     some floating-point comparisons.  */
10181
10182  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10183    invert = 1, exp = TREE_OPERAND (exp, 0);
10184
10185  arg0 = TREE_OPERAND (exp, 0);
10186  arg1 = TREE_OPERAND (exp, 1);
10187
10188  /* Don't crash if the comparison was erroneous.  */
10189  if (arg0 == error_mark_node || arg1 == error_mark_node)
10190    return const0_rtx;
10191
10192  type = TREE_TYPE (arg0);
10193  operand_mode = TYPE_MODE (type);
10194  unsignedp = TREE_UNSIGNED (type);
10195
10196  /* We won't bother with BLKmode store-flag operations because it would mean
10197     passing a lot of information to emit_store_flag.  */
10198  if (operand_mode == BLKmode)
10199    return 0;
10200
10201  /* We won't bother with store-flag operations involving function pointers
10202     when function pointers must be canonicalized before comparisons.  */
10203#ifdef HAVE_canonicalize_funcptr_for_compare
10204  if (HAVE_canonicalize_funcptr_for_compare
10205      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10206	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10207	       == FUNCTION_TYPE))
10208	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10209	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10210		  == FUNCTION_TYPE))))
10211    return 0;
10212#endif
10213
10214  STRIP_NOPS (arg0);
10215  STRIP_NOPS (arg1);
10216
10217  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10218     operation of some type.  Some comparisons against 1 and -1 can be
10219     converted to comparisons with zero.  Do so here so that the tests
10220     below will be aware that we have a comparison with zero.   These
10221     tests will not catch constants in the first operand, but constants
10222     are rarely passed as the first operand.  */
10223
10224  switch (TREE_CODE (exp))
10225    {
10226    case EQ_EXPR:
10227      code = EQ;
10228      break;
10229    case NE_EXPR:
10230      code = NE;
10231      break;
10232    case LT_EXPR:
10233      if (integer_onep (arg1))
10234	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10235      else
10236	code = unsignedp ? LTU : LT;
10237      break;
10238    case LE_EXPR:
10239      if (! unsignedp && integer_all_onesp (arg1))
10240	arg1 = integer_zero_node, code = LT;
10241      else
10242	code = unsignedp ? LEU : LE;
10243      break;
10244    case GT_EXPR:
10245      if (! unsignedp && integer_all_onesp (arg1))
10246	arg1 = integer_zero_node, code = GE;
10247      else
10248	code = unsignedp ? GTU : GT;
10249      break;
10250    case GE_EXPR:
10251      if (integer_onep (arg1))
10252	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10253      else
10254	code = unsignedp ? GEU : GE;
10255      break;
10256
10257    case UNORDERED_EXPR:
10258      code = UNORDERED;
10259      break;
10260    case ORDERED_EXPR:
10261      code = ORDERED;
10262      break;
10263    case UNLT_EXPR:
10264      code = UNLT;
10265      break;
10266    case UNLE_EXPR:
10267      code = UNLE;
10268      break;
10269    case UNGT_EXPR:
10270      code = UNGT;
10271      break;
10272    case UNGE_EXPR:
10273      code = UNGE;
10274      break;
10275    case UNEQ_EXPR:
10276      code = UNEQ;
10277      break;
10278
10279    default:
10280      abort ();
10281    }
10282
10283  /* Put a constant second.  */
10284  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10285    {
10286      tem = arg0; arg0 = arg1; arg1 = tem;
10287      code = swap_condition (code);
10288    }
10289
10290  /* If this is an equality or inequality test of a single bit, we can
10291     do this by shifting the bit being tested to the low-order bit and
10292     masking the result with the constant 1.  If the condition was EQ,
10293     we xor it with 1.  This does not require an scc insn and is faster
10294     than an scc insn even if we have it.  */
10295
10296  if ((code == NE || code == EQ)
10297      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10298      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10299    {
10300      tree inner = TREE_OPERAND (arg0, 0);
10301      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10302      int ops_unsignedp;
10303
10304      /* If INNER is a right shift of a constant and it plus BITNUM does
10305	 not overflow, adjust BITNUM and INNER.  */
10306
10307      if (TREE_CODE (inner) == RSHIFT_EXPR
10308	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10309	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10310	  && bitnum < TYPE_PRECISION (type)
10311	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10312				   bitnum - TYPE_PRECISION (type)))
10313	{
10314	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10315	  inner = TREE_OPERAND (inner, 0);
10316	}
10317
10318      /* If we are going to be able to omit the AND below, we must do our
10319	 operations as unsigned.  If we must use the AND, we have a choice.
10320	 Normally unsigned is faster, but for some machines signed is.  */
10321      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10322#ifdef LOAD_EXTEND_OP
10323		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10324#else
10325		       : 1
10326#endif
10327		       );
10328
10329      if (! get_subtarget (subtarget)
10330	  || GET_MODE (subtarget) != operand_mode
10331	  || ! safe_from_p (subtarget, inner, 1))
10332	subtarget = 0;
10333
10334      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10335
10336      if (bitnum != 0)
10337	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10338			    size_int (bitnum), subtarget, ops_unsignedp);
10339
10340      if (GET_MODE (op0) != mode)
10341	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10342
10343      if ((code == EQ && ! invert) || (code == NE && invert))
10344	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10345			    ops_unsignedp, OPTAB_LIB_WIDEN);
10346
10347      /* Put the AND last so it can combine with more things.  */
10348      if (bitnum != TYPE_PRECISION (type) - 1)
10349	op0 = expand_and (mode, op0, const1_rtx, subtarget);
10350
10351      return op0;
10352    }
10353
10354  /* Now see if we are likely to be able to do this.  Return if not.  */
10355  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10356    return 0;
10357
10358  icode = setcc_gen_code[(int) code];
10359  if (icode == CODE_FOR_nothing
10360      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10361    {
10362      /* We can only do this if it is one of the special cases that
10363	 can be handled without an scc insn.  */
10364      if ((code == LT && integer_zerop (arg1))
10365	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10366	;
10367      else if (BRANCH_COST >= 0
10368	       && ! only_cheap && (code == NE || code == EQ)
10369	       && TREE_CODE (type) != REAL_TYPE
10370	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10371		    != CODE_FOR_nothing)
10372		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10373		       != CODE_FOR_nothing)))
10374	;
10375      else
10376	return 0;
10377    }
10378
10379  if (! get_subtarget (target)
10380      || GET_MODE (subtarget) != operand_mode
10381      || ! safe_from_p (subtarget, arg1, 1))
10382    subtarget = 0;
10383
10384  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10385  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10386
10387  if (target == 0)
10388    target = gen_reg_rtx (mode);
10389
10390  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10391     because, if the emit_store_flag does anything it will succeed and
10392     OP0 and OP1 will not be used subsequently.  */
10393
10394  result = emit_store_flag (target, code,
10395			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10396			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10397			    operand_mode, unsignedp, 1);
10398
10399  if (result)
10400    {
10401      if (invert)
10402	result = expand_binop (mode, xor_optab, result, const1_rtx,
10403			       result, 0, OPTAB_LIB_WIDEN);
10404      return result;
10405    }
10406
10407  /* If this failed, we have to do this with set/compare/jump/set code.  */
10408  if (GET_CODE (target) != REG
10409      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10410    target = gen_reg_rtx (GET_MODE (target));
10411
10412  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10413  result = compare_from_rtx (op0, op1, code, unsignedp,
10414			     operand_mode, NULL_RTX);
10415  if (GET_CODE (result) == CONST_INT)
10416    return (((result == const0_rtx && ! invert)
10417	     || (result != const0_rtx && invert))
10418	    ? const0_rtx : const1_rtx);
10419
10420  /* The code of RESULT may not match CODE if compare_from_rtx
10421     decided to swap its operands and reverse the original code.
10422
10423     We know that compare_from_rtx returns either a CONST_INT or
10424     a new comparison code, so it is safe to just extract the
10425     code from RESULT.  */
10426  code = GET_CODE (result);
10427
10428  label = gen_label_rtx ();
10429  if (bcc_gen_fctn[(int) code] == 0)
10430    abort ();
10431
10432  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10433  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10434  emit_label (label);
10435
10436  return target;
10437}
10438
10439
10440/* Stubs in case we haven't got a casesi insn.  */
10441#ifndef HAVE_casesi
10442# define HAVE_casesi 0
10443# define gen_casesi(a, b, c, d, e) (0)
10444# define CODE_FOR_casesi CODE_FOR_nothing
10445#endif
10446
10447/* If the machine does not have a case insn that compares the bounds,
10448   this means extra overhead for dispatch tables, which raises the
10449   threshold for using them.  */
10450#ifndef CASE_VALUES_THRESHOLD
10451#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10452#endif /* CASE_VALUES_THRESHOLD */
10453
10454unsigned int
10455case_values_threshold ()
10456{
10457  return CASE_VALUES_THRESHOLD;
10458}
10459
10460/* Attempt to generate a casesi instruction.  Returns 1 if successful,
10461   0 otherwise (i.e. if there is no casesi instruction).  */
10462int
10463try_casesi (index_type, index_expr, minval, range,
10464	    table_label, default_label)
10465     tree index_type, index_expr, minval, range;
10466     rtx table_label ATTRIBUTE_UNUSED;
10467     rtx default_label;
10468{
10469  enum machine_mode index_mode = SImode;
10470  int index_bits = GET_MODE_BITSIZE (index_mode);
10471  rtx op1, op2, index;
10472  enum machine_mode op_mode;
10473
10474  if (! HAVE_casesi)
10475    return 0;
10476
10477  /* Convert the index to SImode.  */
10478  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10479    {
10480      enum machine_mode omode = TYPE_MODE (index_type);
10481      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10482
10483      /* We must handle the endpoints in the original mode.  */
10484      index_expr = build (MINUS_EXPR, index_type,
10485			  index_expr, minval);
10486      minval = integer_zero_node;
10487      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10488      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10489			       omode, 1, default_label);
10490      /* Now we can safely truncate.  */
10491      index = convert_to_mode (index_mode, index, 0);
10492    }
10493  else
10494    {
10495      if (TYPE_MODE (index_type) != index_mode)
10496	{
10497	  index_expr = convert (type_for_size (index_bits, 0),
10498				index_expr);
10499	  index_type = TREE_TYPE (index_expr);
10500	}
10501
10502      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10503    }
10504  emit_queue ();
10505  index = protect_from_queue (index, 0);
10506  do_pending_stack_adjust ();
10507
10508  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10509  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10510      (index, op_mode))
10511    index = copy_to_mode_reg (op_mode, index);
10512
10513  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10514
10515  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10516  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10517		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10518  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10519      (op1, op_mode))
10520    op1 = copy_to_mode_reg (op_mode, op1);
10521
10522  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10523
10524  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10525  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10526		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
10527  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10528      (op2, op_mode))
10529    op2 = copy_to_mode_reg (op_mode, op2);
10530
10531  emit_jump_insn (gen_casesi (index, op1, op2,
10532			      table_label, default_label));
10533  return 1;
10534}
10535
10536/* Attempt to generate a tablejump instruction; same concept.  */
10537#ifndef HAVE_tablejump
10538#define HAVE_tablejump 0
10539#define gen_tablejump(x, y) (0)
10540#endif
10541
10542/* Subroutine of the next function.
10543
10544   INDEX is the value being switched on, with the lowest value
10545   in the table already subtracted.
10546   MODE is its expected mode (needed if INDEX is constant).
10547   RANGE is the length of the jump table.
10548   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10549
10550   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10551   index value is out of range.  */
10552
10553static void
10554do_tablejump (index, mode, range, table_label, default_label)
10555     rtx index, range, table_label, default_label;
10556     enum machine_mode mode;
10557{
10558  rtx temp, vector;
10559
10560  /* Do an unsigned comparison (in the proper mode) between the index
10561     expression and the value which represents the length of the range.
10562     Since we just finished subtracting the lower bound of the range
10563     from the index expression, this comparison allows us to simultaneously
10564     check that the original index expression value is both greater than
10565     or equal to the minimum value of the range and less than or equal to
10566     the maximum value of the range.  */
10567
10568  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10569			   default_label);
10570
10571  /* If index is in range, it must fit in Pmode.
10572     Convert to Pmode so we can index with it.  */
10573  if (mode != Pmode)
10574    index = convert_to_mode (Pmode, index, 1);
10575
10576  /* Don't let a MEM slip thru, because then INDEX that comes
10577     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10578     and break_out_memory_refs will go to work on it and mess it up.  */
10579#ifdef PIC_CASE_VECTOR_ADDRESS
10580  if (flag_pic && GET_CODE (index) != REG)
10581    index = copy_to_mode_reg (Pmode, index);
10582#endif
10583
10584  /* If flag_force_addr were to affect this address
10585     it could interfere with the tricky assumptions made
10586     about addresses that contain label-refs,
10587     which may be valid only very near the tablejump itself.  */
10588  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10589     GET_MODE_SIZE, because this indicates how large insns are.  The other
10590     uses should all be Pmode, because they are addresses.  This code
10591     could fail if addresses and insns are not the same size.  */
10592  index = gen_rtx_PLUS (Pmode,
10593			gen_rtx_MULT (Pmode, index,
10594				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10595			gen_rtx_LABEL_REF (Pmode, table_label));
10596#ifdef PIC_CASE_VECTOR_ADDRESS
10597  if (flag_pic)
10598    index = PIC_CASE_VECTOR_ADDRESS (index);
10599  else
10600#endif
10601    index = memory_address_noforce (CASE_VECTOR_MODE, index);
10602  temp = gen_reg_rtx (CASE_VECTOR_MODE);
10603  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10604  RTX_UNCHANGING_P (vector) = 1;
10605  convert_move (temp, vector, 0);
10606
10607  emit_jump_insn (gen_tablejump (temp, table_label));
10608
10609  /* If we are generating PIC code or if the table is PC-relative, the
10610     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10611  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10612    emit_barrier ();
10613}
10614
10615int
10616try_tablejump (index_type, index_expr, minval, range,
10617	       table_label, default_label)
10618     tree index_type, index_expr, minval, range;
10619     rtx table_label, default_label;
10620{
10621  rtx index;
10622
10623  if (! HAVE_tablejump)
10624    return 0;
10625
10626  index_expr = fold (build (MINUS_EXPR, index_type,
10627			    convert (index_type, index_expr),
10628			    convert (index_type, minval)));
10629  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10630  emit_queue ();
10631  index = protect_from_queue (index, 0);
10632  do_pending_stack_adjust ();
10633
10634  do_tablejump (index, TYPE_MODE (index_type),
10635		convert_modes (TYPE_MODE (index_type),
10636			       TYPE_MODE (TREE_TYPE (range)),
10637			       expand_expr (range, NULL_RTX,
10638					    VOIDmode, 0),
10639			       TREE_UNSIGNED (TREE_TYPE (range))),
10640		table_label, default_label);
10641  return 1;
10642}
10643