expr.c revision 107590
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "tree.h"
27#include "obstack.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#ifndef PUSH_ARGS_REVERSED
58#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
60#endif
61#endif
62
63#endif
64
65#ifndef STACK_PUSH_CODE
66#ifdef STACK_GROWS_DOWNWARD
67#define STACK_PUSH_CODE PRE_DEC
68#else
69#define STACK_PUSH_CODE PRE_INC
70#endif
71#endif
72
73/* Assume that case vectors are not pc-relative.  */
74#ifndef CASE_VECTOR_PC_RELATIVE
75#define CASE_VECTOR_PC_RELATIVE 0
76#endif
77
78/* If this is nonzero, we do not bother generating VOLATILE
79   around volatile memory references, and we are willing to
80   output indirect addresses.  If cse is to follow, we reject
81   indirect addresses so a useful potential cse is generated;
82   if it is used only once, instruction combination will produce
83   the same indirect address eventually.  */
84int cse_not_expected;
85
86/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
87static tree placeholder_list = 0;
88
89/* This structure is used by move_by_pieces to describe the move to
90   be performed.  */
91struct move_by_pieces
92{
93  rtx to;
94  rtx to_addr;
95  int autinc_to;
96  int explicit_inc_to;
97  rtx from;
98  rtx from_addr;
99  int autinc_from;
100  int explicit_inc_from;
101  unsigned HOST_WIDE_INT len;
102  HOST_WIDE_INT offset;
103  int reverse;
104};
105
106/* This structure is used by store_by_pieces to describe the clear to
107   be performed.  */
108
109struct store_by_pieces
110{
111  rtx to;
112  rtx to_addr;
113  int autinc_to;
114  int explicit_inc_to;
115  unsigned HOST_WIDE_INT len;
116  HOST_WIDE_INT offset;
117  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
118  PTR constfundata;
119  int reverse;
120};
121
122extern struct obstack permanent_obstack;
123
124static rtx enqueue_insn		PARAMS ((rtx, rtx));
125static unsigned HOST_WIDE_INT move_by_pieces_ninsns
126				PARAMS ((unsigned HOST_WIDE_INT,
127					 unsigned int));
128static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
129					 struct move_by_pieces *));
130static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
131					 enum machine_mode));
132static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
133					 unsigned int));
134static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
135					 unsigned int));
136static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
137					 enum machine_mode,
138					 struct store_by_pieces *));
139static rtx get_subtarget	PARAMS ((rtx));
140static int is_zeros_p		PARAMS ((tree));
141static int mostly_zeros_p	PARAMS ((tree));
142static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143					     HOST_WIDE_INT, enum machine_mode,
144					     tree, tree, int, int));
145static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
147					 HOST_WIDE_INT, enum machine_mode,
148					 tree, enum machine_mode, int, tree,
149					 int));
150static rtx var_rtx		PARAMS ((tree));
151static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153static int is_aligning_offset	PARAMS ((tree, tree));
154static rtx expand_increment	PARAMS ((tree, int, int));
155static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
158					 rtx, rtx));
159static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
160#ifdef PUSH_ROUNDING
161static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162#endif
163static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
164
165/* Record for each mode whether we can move a register directly to or
166   from an object of that mode in memory.  If we can't, we won't try
167   to use that mode directly when accessing a field of that mode.  */
168
169static char direct_load[NUM_MACHINE_MODES];
170static char direct_store[NUM_MACHINE_MODES];
171
172/* If a memory-to-memory move would take MOVE_RATIO or more simple
173   move-instruction sequences, we will do a movstr or libcall instead.  */
174
175#ifndef MOVE_RATIO
176#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
177#define MOVE_RATIO 2
178#else
179/* If we are optimizing for space (-Os), cut down the default move ratio.  */
180#define MOVE_RATIO (optimize_size ? 3 : 15)
181#endif
182#endif
183
184/* This macro is used to determine whether move_by_pieces should be called
185   to perform a structure copy.  */
186#ifndef MOVE_BY_PIECES_P
187#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
188  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
189#endif
190
191/* This array records the insn_code of insns to perform block moves.  */
192enum insn_code movstr_optab[NUM_MACHINE_MODES];
193
194/* This array records the insn_code of insns to perform block clears.  */
195enum insn_code clrstr_optab[NUM_MACHINE_MODES];
196
197/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
198
199#ifndef SLOW_UNALIGNED_ACCESS
200#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
201#endif
202
203/* This is run once per compilation to set up which modes can be used
204   directly in memory and to initialize the block move optab.  */
205
206void
207init_expr_once ()
208{
209  rtx insn, pat;
210  enum machine_mode mode;
211  int num_clobbers;
212  rtx mem, mem1;
213
214  start_sequence ();
215
216  /* Try indexing by frame ptr and try by stack ptr.
217     It is known that on the Convex the stack ptr isn't a valid index.
218     With luck, one or the other is valid on any machine.  */
219  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
220  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
221
222  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
223  pat = PATTERN (insn);
224
225  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
226       mode = (enum machine_mode) ((int) mode + 1))
227    {
228      int regno;
229      rtx reg;
230
231      direct_load[(int) mode] = direct_store[(int) mode] = 0;
232      PUT_MODE (mem, mode);
233      PUT_MODE (mem1, mode);
234
235      /* See if there is some register that can be used in this mode and
236	 directly loaded or stored from memory.  */
237
238      if (mode != VOIDmode && mode != BLKmode)
239	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
240	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
241	     regno++)
242	  {
243	    if (! HARD_REGNO_MODE_OK (regno, mode))
244	      continue;
245
246	    reg = gen_rtx_REG (mode, regno);
247
248	    SET_SRC (pat) = mem;
249	    SET_DEST (pat) = reg;
250	    if (recog (pat, insn, &num_clobbers) >= 0)
251	      direct_load[(int) mode] = 1;
252
253	    SET_SRC (pat) = mem1;
254	    SET_DEST (pat) = reg;
255	    if (recog (pat, insn, &num_clobbers) >= 0)
256	      direct_load[(int) mode] = 1;
257
258	    SET_SRC (pat) = reg;
259	    SET_DEST (pat) = mem;
260	    if (recog (pat, insn, &num_clobbers) >= 0)
261	      direct_store[(int) mode] = 1;
262
263	    SET_SRC (pat) = reg;
264	    SET_DEST (pat) = mem1;
265	    if (recog (pat, insn, &num_clobbers) >= 0)
266	      direct_store[(int) mode] = 1;
267	  }
268    }
269
270  end_sequence ();
271}
272
273/* This is run at the start of compiling a function.  */
274
275void
276init_expr ()
277{
278  cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
279
280  pending_chain = 0;
281  pending_stack_adjust = 0;
282  stack_pointer_delta = 0;
283  inhibit_defer_pop = 0;
284  saveregs_value = 0;
285  apply_args_value = 0;
286  forced_labels = 0;
287}
288
289void
290mark_expr_status (p)
291     struct expr_status *p;
292{
293  if (p == NULL)
294    return;
295
296  ggc_mark_rtx (p->x_saveregs_value);
297  ggc_mark_rtx (p->x_apply_args_value);
298  ggc_mark_rtx (p->x_forced_labels);
299}
300
301void
302free_expr_status (f)
303     struct function *f;
304{
305  free (f->expr);
306  f->expr = NULL;
307}
308
309/* Small sanity check that the queue is empty at the end of a function.  */
310
311void
312finish_expr_for_function ()
313{
314  if (pending_chain)
315    abort ();
316}
317
318/* Manage the queue of increment instructions to be output
319   for POSTINCREMENT_EXPR expressions, etc.  */
320
321/* Queue up to increment (or change) VAR later.  BODY says how:
322   BODY should be the same thing you would pass to emit_insn
323   to increment right away.  It will go to emit_insn later on.
324
325   The value is a QUEUED expression to be used in place of VAR
326   where you want to guarantee the pre-incrementation value of VAR.  */
327
328static rtx
329enqueue_insn (var, body)
330     rtx var, body;
331{
332  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
333				  body, pending_chain);
334  return pending_chain;
335}
336
337/* Use protect_from_queue to convert a QUEUED expression
338   into something that you can put immediately into an instruction.
339   If the queued incrementation has not happened yet,
340   protect_from_queue returns the variable itself.
341   If the incrementation has happened, protect_from_queue returns a temp
342   that contains a copy of the old value of the variable.
343
344   Any time an rtx which might possibly be a QUEUED is to be put
345   into an instruction, it must be passed through protect_from_queue first.
346   QUEUED expressions are not meaningful in instructions.
347
348   Do not pass a value through protect_from_queue and then hold
349   on to it for a while before putting it in an instruction!
350   If the queue is flushed in between, incorrect code will result.  */
351
352rtx
353protect_from_queue (x, modify)
354     rtx x;
355     int modify;
356{
357  RTX_CODE code = GET_CODE (x);
358
359#if 0  /* A QUEUED can hang around after the queue is forced out.  */
360  /* Shortcut for most common case.  */
361  if (pending_chain == 0)
362    return x;
363#endif
364
365  if (code != QUEUED)
366    {
367      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
368	 use of autoincrement.  Make a copy of the contents of the memory
369	 location rather than a copy of the address, but not if the value is
370	 of mode BLKmode.  Don't modify X in place since it might be
371	 shared.  */
372      if (code == MEM && GET_MODE (x) != BLKmode
373	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
374	{
375	  rtx y = XEXP (x, 0);
376	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
377
378	  if (QUEUED_INSN (y))
379	    {
380	      rtx temp = gen_reg_rtx (GET_MODE (x));
381
382	      emit_insn_before (gen_move_insn (temp, new),
383				QUEUED_INSN (y));
384	      return temp;
385	    }
386
387	  /* Copy the address into a pseudo, so that the returned value
388	     remains correct across calls to emit_queue.  */
389	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
390	}
391
392      /* Otherwise, recursively protect the subexpressions of all
393	 the kinds of rtx's that can contain a QUEUED.  */
394      if (code == MEM)
395	{
396	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
397	  if (tem != XEXP (x, 0))
398	    {
399	      x = copy_rtx (x);
400	      XEXP (x, 0) = tem;
401	    }
402	}
403      else if (code == PLUS || code == MULT)
404	{
405	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
406	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
407	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
408	    {
409	      x = copy_rtx (x);
410	      XEXP (x, 0) = new0;
411	      XEXP (x, 1) = new1;
412	    }
413	}
414      return x;
415    }
416  /* If the increment has not happened, use the variable itself.  Copy it
417     into a new pseudo so that the value remains correct across calls to
418     emit_queue.  */
419  if (QUEUED_INSN (x) == 0)
420    return copy_to_reg (QUEUED_VAR (x));
421  /* If the increment has happened and a pre-increment copy exists,
422     use that copy.  */
423  if (QUEUED_COPY (x) != 0)
424    return QUEUED_COPY (x);
425  /* The increment has happened but we haven't set up a pre-increment copy.
426     Set one up now, and use it.  */
427  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
428  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
429		    QUEUED_INSN (x));
430  return QUEUED_COPY (x);
431}
432
433/* Return nonzero if X contains a QUEUED expression:
434   if it contains anything that will be altered by a queued increment.
435   We handle only combinations of MEM, PLUS, MINUS and MULT operators
436   since memory addresses generally contain only those.  */
437
438int
439queued_subexp_p (x)
440     rtx x;
441{
442  enum rtx_code code = GET_CODE (x);
443  switch (code)
444    {
445    case QUEUED:
446      return 1;
447    case MEM:
448      return queued_subexp_p (XEXP (x, 0));
449    case MULT:
450    case PLUS:
451    case MINUS:
452      return (queued_subexp_p (XEXP (x, 0))
453	      || queued_subexp_p (XEXP (x, 1)));
454    default:
455      return 0;
456    }
457}
458
459/* Perform all the pending incrementations.  */
460
461void
462emit_queue ()
463{
464  rtx p;
465  while ((p = pending_chain))
466    {
467      rtx body = QUEUED_BODY (p);
468
469      if (GET_CODE (body) == SEQUENCE)
470	{
471	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
472	  emit_insn (QUEUED_BODY (p));
473	}
474      else
475	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
476      pending_chain = QUEUED_NEXT (p);
477    }
478}
479
480/* Copy data from FROM to TO, where the machine modes are not the same.
481   Both modes may be integer, or both may be floating.
482   UNSIGNEDP should be nonzero if FROM is an unsigned type.
483   This causes zero-extension instead of sign-extension.  */
484
485void
486convert_move (to, from, unsignedp)
487     rtx to, from;
488     int unsignedp;
489{
490  enum machine_mode to_mode = GET_MODE (to);
491  enum machine_mode from_mode = GET_MODE (from);
492  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
493  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
494  enum insn_code code;
495  rtx libcall;
496
497  /* rtx code for making an equivalent value.  */
498  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
499
500  to = protect_from_queue (to, 1);
501  from = protect_from_queue (from, 0);
502
503  if (to_real != from_real)
504    abort ();
505
506  /* If FROM is a SUBREG that indicates that we have already done at least
507     the required extension, strip it.  We don't handle such SUBREGs as
508     TO here.  */
509
510  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
511      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
512	  >= GET_MODE_SIZE (to_mode))
513      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
514    from = gen_lowpart (to_mode, from), from_mode = to_mode;
515
516  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
517    abort ();
518
519  if (to_mode == from_mode
520      || (from_mode == VOIDmode && CONSTANT_P (from)))
521    {
522      emit_move_insn (to, from);
523      return;
524    }
525
526  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
527    {
528      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
529	abort ();
530
531      if (VECTOR_MODE_P (to_mode))
532	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
533      else
534	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
535
536      emit_move_insn (to, from);
537      return;
538    }
539
540  if (to_real != from_real)
541    abort ();
542
543  if (to_real)
544    {
545      rtx value, insns;
546
547      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
548	{
549	  /* Try converting directly if the insn is supported.  */
550	  if ((code = can_extend_p (to_mode, from_mode, 0))
551	      != CODE_FOR_nothing)
552	    {
553	      emit_unop_insn (code, to, from, UNKNOWN);
554	      return;
555	    }
556	}
557
558#ifdef HAVE_trunchfqf2
559      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
560	{
561	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
562	  return;
563	}
564#endif
565#ifdef HAVE_trunctqfqf2
566      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
567	{
568	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
569	  return;
570	}
571#endif
572#ifdef HAVE_truncsfqf2
573      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
574	{
575	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
576	  return;
577	}
578#endif
579#ifdef HAVE_truncdfqf2
580      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
581	{
582	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
583	  return;
584	}
585#endif
586#ifdef HAVE_truncxfqf2
587      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
588	{
589	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
590	  return;
591	}
592#endif
593#ifdef HAVE_trunctfqf2
594      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
595	{
596	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597	  return;
598	}
599#endif
600
601#ifdef HAVE_trunctqfhf2
602      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
603	{
604	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
605	  return;
606	}
607#endif
608#ifdef HAVE_truncsfhf2
609      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
610	{
611	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
612	  return;
613	}
614#endif
615#ifdef HAVE_truncdfhf2
616      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
617	{
618	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
619	  return;
620	}
621#endif
622#ifdef HAVE_truncxfhf2
623      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
624	{
625	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
626	  return;
627	}
628#endif
629#ifdef HAVE_trunctfhf2
630      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
631	{
632	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633	  return;
634	}
635#endif
636
637#ifdef HAVE_truncsftqf2
638      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
639	{
640	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
641	  return;
642	}
643#endif
644#ifdef HAVE_truncdftqf2
645      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
646	{
647	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
648	  return;
649	}
650#endif
651#ifdef HAVE_truncxftqf2
652      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
653	{
654	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
655	  return;
656	}
657#endif
658#ifdef HAVE_trunctftqf2
659      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
660	{
661	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662	  return;
663	}
664#endif
665
666#ifdef HAVE_truncdfsf2
667      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
668	{
669	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
670	  return;
671	}
672#endif
673#ifdef HAVE_truncxfsf2
674      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
675	{
676	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
677	  return;
678	}
679#endif
680#ifdef HAVE_trunctfsf2
681      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
682	{
683	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
684	  return;
685	}
686#endif
687#ifdef HAVE_truncxfdf2
688      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
689	{
690	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
691	  return;
692	}
693#endif
694#ifdef HAVE_trunctfdf2
695      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
696	{
697	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
698	  return;
699	}
700#endif
701
702      libcall = (rtx) 0;
703      switch (from_mode)
704	{
705	case SFmode:
706	  switch (to_mode)
707	    {
708	    case DFmode:
709	      libcall = extendsfdf2_libfunc;
710	      break;
711
712	    case XFmode:
713	      libcall = extendsfxf2_libfunc;
714	      break;
715
716	    case TFmode:
717	      libcall = extendsftf2_libfunc;
718	      break;
719
720	    default:
721	      break;
722	    }
723	  break;
724
725	case DFmode:
726	  switch (to_mode)
727	    {
728	    case SFmode:
729	      libcall = truncdfsf2_libfunc;
730	      break;
731
732	    case XFmode:
733	      libcall = extenddfxf2_libfunc;
734	      break;
735
736	    case TFmode:
737	      libcall = extenddftf2_libfunc;
738	      break;
739
740	    default:
741	      break;
742	    }
743	  break;
744
745	case XFmode:
746	  switch (to_mode)
747	    {
748	    case SFmode:
749	      libcall = truncxfsf2_libfunc;
750	      break;
751
752	    case DFmode:
753	      libcall = truncxfdf2_libfunc;
754	      break;
755
756	    default:
757	      break;
758	    }
759	  break;
760
761	case TFmode:
762	  switch (to_mode)
763	    {
764	    case SFmode:
765	      libcall = trunctfsf2_libfunc;
766	      break;
767
768	    case DFmode:
769	      libcall = trunctfdf2_libfunc;
770	      break;
771
772	    default:
773	      break;
774	    }
775	  break;
776
777	default:
778	  break;
779	}
780
781      if (libcall == (rtx) 0)
782	/* This conversion is not implemented yet.  */
783	abort ();
784
785      start_sequence ();
786      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
787				       1, from, from_mode);
788      insns = get_insns ();
789      end_sequence ();
790      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791								    from));
792      return;
793    }
794
795  /* Now both modes are integers.  */
796
797  /* Handle expanding beyond a word.  */
798  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
799      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
800    {
801      rtx insns;
802      rtx lowpart;
803      rtx fill_value;
804      rtx lowfrom;
805      int i;
806      enum machine_mode lowpart_mode;
807      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
808
809      /* Try converting directly if the insn is supported.  */
810      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
811	  != CODE_FOR_nothing)
812	{
813	  /* If FROM is a SUBREG, put it into a register.  Do this
814	     so that we always generate the same set of insns for
815	     better cse'ing; if an intermediate assignment occurred,
816	     we won't be doing the operation directly on the SUBREG.  */
817	  if (optimize > 0 && GET_CODE (from) == SUBREG)
818	    from = force_reg (from_mode, from);
819	  emit_unop_insn (code, to, from, equiv_code);
820	  return;
821	}
822      /* Next, try converting via full word.  */
823      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
824	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
825		   != CODE_FOR_nothing))
826	{
827	  if (GET_CODE (to) == REG)
828	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
829	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
830	  emit_unop_insn (code, to,
831			  gen_lowpart (word_mode, to), equiv_code);
832	  return;
833	}
834
835      /* No special multiword conversion insn; do it by hand.  */
836      start_sequence ();
837
838      /* Since we will turn this into a no conflict block, we must ensure
839	 that the source does not overlap the target.  */
840
841      if (reg_overlap_mentioned_p (to, from))
842	from = force_reg (from_mode, from);
843
844      /* Get a copy of FROM widened to a word, if necessary.  */
845      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
846	lowpart_mode = word_mode;
847      else
848	lowpart_mode = from_mode;
849
850      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
851
852      lowpart = gen_lowpart (lowpart_mode, to);
853      emit_move_insn (lowpart, lowfrom);
854
855      /* Compute the value to put in each remaining word.  */
856      if (unsignedp)
857	fill_value = const0_rtx;
858      else
859	{
860#ifdef HAVE_slt
861	  if (HAVE_slt
862	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
863	      && STORE_FLAG_VALUE == -1)
864	    {
865	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
866			     lowpart_mode, 0);
867	      fill_value = gen_reg_rtx (word_mode);
868	      emit_insn (gen_slt (fill_value));
869	    }
870	  else
871#endif
872	    {
873	      fill_value
874		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
875				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
876				NULL_RTX, 0);
877	      fill_value = convert_to_mode (word_mode, fill_value, 1);
878	    }
879	}
880
881      /* Fill the remaining words.  */
882      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
883	{
884	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
885	  rtx subword = operand_subword (to, index, 1, to_mode);
886
887	  if (subword == 0)
888	    abort ();
889
890	  if (fill_value != subword)
891	    emit_move_insn (subword, fill_value);
892	}
893
894      insns = get_insns ();
895      end_sequence ();
896
897      emit_no_conflict_block (insns, to, from, NULL_RTX,
898			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
899      return;
900    }
901
902  /* Truncating multi-word to a word or less.  */
903  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
904      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
905    {
906      if (!((GET_CODE (from) == MEM
907	     && ! MEM_VOLATILE_P (from)
908	     && direct_load[(int) to_mode]
909	     && ! mode_dependent_address_p (XEXP (from, 0)))
910	    || GET_CODE (from) == REG
911	    || GET_CODE (from) == SUBREG))
912	from = force_reg (from_mode, from);
913      convert_move (to, gen_lowpart (word_mode, from), 0);
914      return;
915    }
916
917  /* Handle pointer conversion.  */			/* SPEE 900220.  */
918  if (to_mode == PQImode)
919    {
920      if (from_mode != QImode)
921	from = convert_to_mode (QImode, from, unsignedp);
922
923#ifdef HAVE_truncqipqi2
924      if (HAVE_truncqipqi2)
925	{
926	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
927	  return;
928	}
929#endif /* HAVE_truncqipqi2 */
930      abort ();
931    }
932
933  if (from_mode == PQImode)
934    {
935      if (to_mode != QImode)
936	{
937	  from = convert_to_mode (QImode, from, unsignedp);
938	  from_mode = QImode;
939	}
940      else
941	{
942#ifdef HAVE_extendpqiqi2
943	  if (HAVE_extendpqiqi2)
944	    {
945	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
946	      return;
947	    }
948#endif /* HAVE_extendpqiqi2 */
949	  abort ();
950	}
951    }
952
953  if (to_mode == PSImode)
954    {
955      if (from_mode != SImode)
956	from = convert_to_mode (SImode, from, unsignedp);
957
958#ifdef HAVE_truncsipsi2
959      if (HAVE_truncsipsi2)
960	{
961	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
962	  return;
963	}
964#endif /* HAVE_truncsipsi2 */
965      abort ();
966    }
967
968  if (from_mode == PSImode)
969    {
970      if (to_mode != SImode)
971	{
972	  from = convert_to_mode (SImode, from, unsignedp);
973	  from_mode = SImode;
974	}
975      else
976	{
977#ifdef HAVE_extendpsisi2
978	  if (! unsignedp && HAVE_extendpsisi2)
979	    {
980	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
981	      return;
982	    }
983#endif /* HAVE_extendpsisi2 */
984#ifdef HAVE_zero_extendpsisi2
985	  if (unsignedp && HAVE_zero_extendpsisi2)
986	    {
987	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
988	      return;
989	    }
990#endif /* HAVE_zero_extendpsisi2 */
991	  abort ();
992	}
993    }
994
995  if (to_mode == PDImode)
996    {
997      if (from_mode != DImode)
998	from = convert_to_mode (DImode, from, unsignedp);
999
1000#ifdef HAVE_truncdipdi2
1001      if (HAVE_truncdipdi2)
1002	{
1003	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004	  return;
1005	}
1006#endif /* HAVE_truncdipdi2 */
1007      abort ();
1008    }
1009
1010  if (from_mode == PDImode)
1011    {
1012      if (to_mode != DImode)
1013	{
1014	  from = convert_to_mode (DImode, from, unsignedp);
1015	  from_mode = DImode;
1016	}
1017      else
1018	{
1019#ifdef HAVE_extendpdidi2
1020	  if (HAVE_extendpdidi2)
1021	    {
1022	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023	      return;
1024	    }
1025#endif /* HAVE_extendpdidi2 */
1026	  abort ();
1027	}
1028    }
1029
1030  /* Now follow all the conversions between integers
1031     no more than a word long.  */
1032
1033  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1034  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036				GET_MODE_BITSIZE (from_mode)))
1037    {
1038      if (!((GET_CODE (from) == MEM
1039	     && ! MEM_VOLATILE_P (from)
1040	     && direct_load[(int) to_mode]
1041	     && ! mode_dependent_address_p (XEXP (from, 0)))
1042	    || GET_CODE (from) == REG
1043	    || GET_CODE (from) == SUBREG))
1044	from = force_reg (from_mode, from);
1045      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047	from = copy_to_reg (from);
1048      emit_move_insn (to, gen_lowpart (to_mode, from));
1049      return;
1050    }
1051
1052  /* Handle extension.  */
1053  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054    {
1055      /* Convert directly if that works.  */
1056      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057	  != CODE_FOR_nothing)
1058	{
1059	  if (flag_force_mem)
1060	    from = force_not_mem (from);
1061
1062	  emit_unop_insn (code, to, from, equiv_code);
1063	  return;
1064	}
1065      else
1066	{
1067	  enum machine_mode intermediate;
1068	  rtx tmp;
1069	  tree shift_amount;
1070
1071	  /* Search for a mode to convert via.  */
1072	  for (intermediate = from_mode; intermediate != VOIDmode;
1073	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1074	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1075		  != CODE_FOR_nothing)
1076		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1077		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1078					       GET_MODE_BITSIZE (intermediate))))
1079		&& (can_extend_p (intermediate, from_mode, unsignedp)
1080		    != CODE_FOR_nothing))
1081	      {
1082		convert_move (to, convert_to_mode (intermediate, from,
1083						   unsignedp), unsignedp);
1084		return;
1085	      }
1086
1087	  /* No suitable intermediate mode.
1088	     Generate what we need with	shifts.  */
1089	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1090				      - GET_MODE_BITSIZE (from_mode), 0);
1091	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1092	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1093			      to, unsignedp);
1094	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1095			      to, unsignedp);
1096	  if (tmp != to)
1097	    emit_move_insn (to, tmp);
1098	  return;
1099	}
1100    }
1101
1102  /* Support special truncate insns for certain modes.  */
1103
1104  if (from_mode == DImode && to_mode == SImode)
1105    {
1106#ifdef HAVE_truncdisi2
1107      if (HAVE_truncdisi2)
1108	{
1109	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1110	  return;
1111	}
1112#endif
1113      convert_move (to, force_reg (from_mode, from), unsignedp);
1114      return;
1115    }
1116
1117  if (from_mode == DImode && to_mode == HImode)
1118    {
1119#ifdef HAVE_truncdihi2
1120      if (HAVE_truncdihi2)
1121	{
1122	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1123	  return;
1124	}
1125#endif
1126      convert_move (to, force_reg (from_mode, from), unsignedp);
1127      return;
1128    }
1129
1130  if (from_mode == DImode && to_mode == QImode)
1131    {
1132#ifdef HAVE_truncdiqi2
1133      if (HAVE_truncdiqi2)
1134	{
1135	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1136	  return;
1137	}
1138#endif
1139      convert_move (to, force_reg (from_mode, from), unsignedp);
1140      return;
1141    }
1142
1143  if (from_mode == SImode && to_mode == HImode)
1144    {
1145#ifdef HAVE_truncsihi2
1146      if (HAVE_truncsihi2)
1147	{
1148	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1149	  return;
1150	}
1151#endif
1152      convert_move (to, force_reg (from_mode, from), unsignedp);
1153      return;
1154    }
1155
1156  if (from_mode == SImode && to_mode == QImode)
1157    {
1158#ifdef HAVE_truncsiqi2
1159      if (HAVE_truncsiqi2)
1160	{
1161	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1162	  return;
1163	}
1164#endif
1165      convert_move (to, force_reg (from_mode, from), unsignedp);
1166      return;
1167    }
1168
1169  if (from_mode == HImode && to_mode == QImode)
1170    {
1171#ifdef HAVE_trunchiqi2
1172      if (HAVE_trunchiqi2)
1173	{
1174	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1175	  return;
1176	}
1177#endif
1178      convert_move (to, force_reg (from_mode, from), unsignedp);
1179      return;
1180    }
1181
1182  if (from_mode == TImode && to_mode == DImode)
1183    {
1184#ifdef HAVE_trunctidi2
1185      if (HAVE_trunctidi2)
1186	{
1187	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1188	  return;
1189	}
1190#endif
1191      convert_move (to, force_reg (from_mode, from), unsignedp);
1192      return;
1193    }
1194
1195  if (from_mode == TImode && to_mode == SImode)
1196    {
1197#ifdef HAVE_trunctisi2
1198      if (HAVE_trunctisi2)
1199	{
1200	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1201	  return;
1202	}
1203#endif
1204      convert_move (to, force_reg (from_mode, from), unsignedp);
1205      return;
1206    }
1207
1208  if (from_mode == TImode && to_mode == HImode)
1209    {
1210#ifdef HAVE_trunctihi2
1211      if (HAVE_trunctihi2)
1212	{
1213	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1214	  return;
1215	}
1216#endif
1217      convert_move (to, force_reg (from_mode, from), unsignedp);
1218      return;
1219    }
1220
1221  if (from_mode == TImode && to_mode == QImode)
1222    {
1223#ifdef HAVE_trunctiqi2
1224      if (HAVE_trunctiqi2)
1225	{
1226	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1227	  return;
1228	}
1229#endif
1230      convert_move (to, force_reg (from_mode, from), unsignedp);
1231      return;
1232    }
1233
1234  /* Handle truncation of volatile memrefs, and so on;
1235     the things that couldn't be truncated directly,
1236     and for which there was no special instruction.  */
1237  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1238    {
1239      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1240      emit_move_insn (to, temp);
1241      return;
1242    }
1243
1244  /* Mode combination is not recognized.  */
1245  abort ();
1246}
1247
1248/* Return an rtx for a value that would result
1249   from converting X to mode MODE.
1250   Both X and MODE may be floating, or both integer.
1251   UNSIGNEDP is nonzero if X is an unsigned value.
1252   This can be done by referring to a part of X in place
1253   or by copying to a new temporary with conversion.
1254
1255   This function *must not* call protect_from_queue
1256   except when putting X into an insn (in which case convert_move does it).  */
1257
1258rtx
1259convert_to_mode (mode, x, unsignedp)
1260     enum machine_mode mode;
1261     rtx x;
1262     int unsignedp;
1263{
1264  return convert_modes (mode, VOIDmode, x, unsignedp);
1265}
1266
1267/* Return an rtx for a value that would result
1268   from converting X from mode OLDMODE to mode MODE.
1269   Both modes may be floating, or both integer.
1270   UNSIGNEDP is nonzero if X is an unsigned value.
1271
1272   This can be done by referring to a part of X in place
1273   or by copying to a new temporary with conversion.
1274
1275   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1276
1277   This function *must not* call protect_from_queue
1278   except when putting X into an insn (in which case convert_move does it).  */
1279
1280rtx
1281convert_modes (mode, oldmode, x, unsignedp)
1282     enum machine_mode mode, oldmode;
1283     rtx x;
1284     int unsignedp;
1285{
1286  rtx temp;
1287
1288  /* If FROM is a SUBREG that indicates that we have already done at least
1289     the required extension, strip it.  */
1290
1291  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1292      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1293      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1294    x = gen_lowpart (mode, x);
1295
1296  if (GET_MODE (x) != VOIDmode)
1297    oldmode = GET_MODE (x);
1298
1299  if (mode == oldmode)
1300    return x;
1301
1302  /* There is one case that we must handle specially: If we are converting
1303     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1304     we are to interpret the constant as unsigned, gen_lowpart will do
1305     the wrong if the constant appears negative.  What we want to do is
1306     make the high-order word of the constant zero, not all ones.  */
1307
1308  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1309      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1310      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1311    {
1312      HOST_WIDE_INT val = INTVAL (x);
1313
1314      if (oldmode != VOIDmode
1315	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1316	{
1317	  int width = GET_MODE_BITSIZE (oldmode);
1318
1319	  /* We need to zero extend VAL.  */
1320	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1321	}
1322
1323      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1324    }
1325
1326  /* We can do this with a gen_lowpart if both desired and current modes
1327     are integer, and this is either a constant integer, a register, or a
1328     non-volatile MEM.  Except for the constant case where MODE is no
1329     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1330
1331  if ((GET_CODE (x) == CONST_INT
1332       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1333      || (GET_MODE_CLASS (mode) == MODE_INT
1334	  && GET_MODE_CLASS (oldmode) == MODE_INT
1335	  && (GET_CODE (x) == CONST_DOUBLE
1336	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1337		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1338		       && direct_load[(int) mode])
1339		      || (GET_CODE (x) == REG
1340			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1341						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1342    {
1343      /* ?? If we don't know OLDMODE, we have to assume here that
1344	 X does not need sign- or zero-extension.   This may not be
1345	 the case, but it's the best we can do.  */
1346      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1347	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1348	{
1349	  HOST_WIDE_INT val = INTVAL (x);
1350	  int width = GET_MODE_BITSIZE (oldmode);
1351
1352	  /* We must sign or zero-extend in this case.  Start by
1353	     zero-extending, then sign extend if we need to.  */
1354	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1355	  if (! unsignedp
1356	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1357	    val |= (HOST_WIDE_INT) (-1) << width;
1358
1359	  return GEN_INT (trunc_int_for_mode (val, mode));
1360	}
1361
1362      return gen_lowpart (mode, x);
1363    }
1364
1365  temp = gen_reg_rtx (mode);
1366  convert_move (temp, x, unsignedp);
1367  return temp;
1368}
1369
1370/* This macro is used to determine what the largest unit size that
1371   move_by_pieces can use is.  */
1372
1373/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1374   move efficiently, as opposed to  MOVE_MAX which is the maximum
1375   number of bytes we can move with a single instruction.  */
1376
1377#ifndef MOVE_MAX_PIECES
1378#define MOVE_MAX_PIECES   MOVE_MAX
1379#endif
1380
1381/* Generate several move instructions to copy LEN bytes from block FROM to
1382   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1383   and TO through protect_from_queue before calling.
1384
1385   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1386   used to push FROM to the stack.
1387
1388   ALIGN is maximum alignment we can assume.  */
1389
1390void
1391move_by_pieces (to, from, len, align)
1392     rtx to, from;
1393     unsigned HOST_WIDE_INT len;
1394     unsigned int align;
1395{
1396  struct move_by_pieces data;
1397  rtx to_addr, from_addr = XEXP (from, 0);
1398  unsigned int max_size = MOVE_MAX_PIECES + 1;
1399  enum machine_mode mode = VOIDmode, tmode;
1400  enum insn_code icode;
1401
1402  data.offset = 0;
1403  data.from_addr = from_addr;
1404  if (to)
1405    {
1406      to_addr = XEXP (to, 0);
1407      data.to = to;
1408      data.autinc_to
1409	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1410	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1411      data.reverse
1412	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1413    }
1414  else
1415    {
1416      to_addr = NULL_RTX;
1417      data.to = NULL_RTX;
1418      data.autinc_to = 1;
1419#ifdef STACK_GROWS_DOWNWARD
1420      data.reverse = 1;
1421#else
1422      data.reverse = 0;
1423#endif
1424    }
1425  data.to_addr = to_addr;
1426  data.from = from;
1427  data.autinc_from
1428    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1429       || GET_CODE (from_addr) == POST_INC
1430       || GET_CODE (from_addr) == POST_DEC);
1431
1432  data.explicit_inc_from = 0;
1433  data.explicit_inc_to = 0;
1434  if (data.reverse) data.offset = len;
1435  data.len = len;
1436
1437  /* If copying requires more than two move insns,
1438     copy addresses to registers (to make displacements shorter)
1439     and use post-increment if available.  */
1440  if (!(data.autinc_from && data.autinc_to)
1441      && move_by_pieces_ninsns (len, align) > 2)
1442    {
1443      /* Find the mode of the largest move...  */
1444      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1445	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1446	if (GET_MODE_SIZE (tmode) < max_size)
1447	  mode = tmode;
1448
1449      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1450	{
1451	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1452	  data.autinc_from = 1;
1453	  data.explicit_inc_from = -1;
1454	}
1455      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1456	{
1457	  data.from_addr = copy_addr_to_reg (from_addr);
1458	  data.autinc_from = 1;
1459	  data.explicit_inc_from = 1;
1460	}
1461      if (!data.autinc_from && CONSTANT_P (from_addr))
1462	data.from_addr = copy_addr_to_reg (from_addr);
1463      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1464	{
1465	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1466	  data.autinc_to = 1;
1467	  data.explicit_inc_to = -1;
1468	}
1469      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1470	{
1471	  data.to_addr = copy_addr_to_reg (to_addr);
1472	  data.autinc_to = 1;
1473	  data.explicit_inc_to = 1;
1474	}
1475      if (!data.autinc_to && CONSTANT_P (to_addr))
1476	data.to_addr = copy_addr_to_reg (to_addr);
1477    }
1478
1479  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1480      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1481    align = MOVE_MAX * BITS_PER_UNIT;
1482
1483  /* First move what we can in the largest integer mode, then go to
1484     successively smaller modes.  */
1485
1486  while (max_size > 1)
1487    {
1488      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1489	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1490	if (GET_MODE_SIZE (tmode) < max_size)
1491	  mode = tmode;
1492
1493      if (mode == VOIDmode)
1494	break;
1495
1496      icode = mov_optab->handlers[(int) mode].insn_code;
1497      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1498	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1499
1500      max_size = GET_MODE_SIZE (mode);
1501    }
1502
1503  /* The code above should have handled everything.  */
1504  if (data.len > 0)
1505    abort ();
1506}
1507
1508/* Return number of insns required to move L bytes by pieces.
1509   ALIGN (in bits) is maximum alignment we can assume.  */
1510
1511static unsigned HOST_WIDE_INT
1512move_by_pieces_ninsns (l, align)
1513     unsigned HOST_WIDE_INT l;
1514     unsigned int align;
1515{
1516  unsigned HOST_WIDE_INT n_insns = 0;
1517  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1518
1519  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1520      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1521    align = MOVE_MAX * BITS_PER_UNIT;
1522
1523  while (max_size > 1)
1524    {
1525      enum machine_mode mode = VOIDmode, tmode;
1526      enum insn_code icode;
1527
1528      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1529	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1530	if (GET_MODE_SIZE (tmode) < max_size)
1531	  mode = tmode;
1532
1533      if (mode == VOIDmode)
1534	break;
1535
1536      icode = mov_optab->handlers[(int) mode].insn_code;
1537      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1538	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1539
1540      max_size = GET_MODE_SIZE (mode);
1541    }
1542
1543  if (l)
1544    abort ();
1545  return n_insns;
1546}
1547
1548/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1549   with move instructions for mode MODE.  GENFUN is the gen_... function
1550   to make a move insn for that mode.  DATA has all the other info.  */
1551
1552static void
1553move_by_pieces_1 (genfun, mode, data)
1554     rtx (*genfun) PARAMS ((rtx, ...));
1555     enum machine_mode mode;
1556     struct move_by_pieces *data;
1557{
1558  unsigned int size = GET_MODE_SIZE (mode);
1559  rtx to1 = NULL_RTX, from1;
1560
1561  while (data->len >= size)
1562    {
1563      if (data->reverse)
1564	data->offset -= size;
1565
1566      if (data->to)
1567	{
1568	  if (data->autinc_to)
1569	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1570					     data->offset);
1571	  else
1572	    to1 = adjust_address (data->to, mode, data->offset);
1573	}
1574
1575      if (data->autinc_from)
1576	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1577					   data->offset);
1578      else
1579	from1 = adjust_address (data->from, mode, data->offset);
1580
1581      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1582	emit_insn (gen_add2_insn (data->to_addr,
1583				  GEN_INT (-(HOST_WIDE_INT)size)));
1584      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1585	emit_insn (gen_add2_insn (data->from_addr,
1586				  GEN_INT (-(HOST_WIDE_INT)size)));
1587
1588      if (data->to)
1589	emit_insn ((*genfun) (to1, from1));
1590      else
1591	{
1592#ifdef PUSH_ROUNDING
1593	  emit_single_push_insn (mode, from1, NULL);
1594#else
1595	  abort ();
1596#endif
1597	}
1598
1599      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1600	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1601      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1602	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1603
1604      if (! data->reverse)
1605	data->offset += size;
1606
1607      data->len -= size;
1608    }
1609}
1610
1611/* Emit code to move a block Y to a block X.
1612   This may be done with string-move instructions,
1613   with multiple scalar move instructions, or with a library call.
1614
1615   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1616   with mode BLKmode.
1617   SIZE is an rtx that says how long they are.
1618   ALIGN is the maximum alignment we can assume they have.
1619
1620   Return the address of the new block, if memcpy is called and returns it,
1621   0 otherwise.  */
1622
1623rtx
1624emit_block_move (x, y, size)
1625     rtx x, y;
1626     rtx size;
1627{
1628  rtx retval = 0;
1629#ifdef TARGET_MEM_FUNCTIONS
1630  static tree fn;
1631  tree call_expr, arg_list;
1632#endif
1633  unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1634
1635  if (GET_MODE (x) != BLKmode)
1636    abort ();
1637
1638  if (GET_MODE (y) != BLKmode)
1639    abort ();
1640
1641  x = protect_from_queue (x, 1);
1642  y = protect_from_queue (y, 0);
1643  size = protect_from_queue (size, 0);
1644
1645  if (GET_CODE (x) != MEM)
1646    abort ();
1647  if (GET_CODE (y) != MEM)
1648    abort ();
1649  if (size == 0)
1650    abort ();
1651
1652  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1653    move_by_pieces (x, y, INTVAL (size), align);
1654  else
1655    {
1656      /* Try the most limited insn first, because there's no point
1657	 including more than one in the machine description unless
1658	 the more limited one has some advantage.  */
1659
1660      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1661      enum machine_mode mode;
1662
1663      /* Since this is a move insn, we don't care about volatility.  */
1664      volatile_ok = 1;
1665
1666      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1667	   mode = GET_MODE_WIDER_MODE (mode))
1668	{
1669	  enum insn_code code = movstr_optab[(int) mode];
1670	  insn_operand_predicate_fn pred;
1671
1672	  if (code != CODE_FOR_nothing
1673	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1674		 here because if SIZE is less than the mode mask, as it is
1675		 returned by the macro, it will definitely be less than the
1676		 actual mode mask.  */
1677	      && ((GET_CODE (size) == CONST_INT
1678		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1679		       <= (GET_MODE_MASK (mode) >> 1)))
1680		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1681	      && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1682		  || (*pred) (x, BLKmode))
1683	      && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1684		  || (*pred) (y, BLKmode))
1685	      && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1686		  || (*pred) (opalign, VOIDmode)))
1687	    {
1688	      rtx op2;
1689	      rtx last = get_last_insn ();
1690	      rtx pat;
1691
1692	      op2 = convert_to_mode (mode, size, 1);
1693	      pred = insn_data[(int) code].operand[2].predicate;
1694	      if (pred != 0 && ! (*pred) (op2, mode))
1695		op2 = copy_to_mode_reg (mode, op2);
1696
1697	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1698	      if (pat)
1699		{
1700		  emit_insn (pat);
1701		  volatile_ok = 0;
1702		  return 0;
1703		}
1704	      else
1705		delete_insns_since (last);
1706	    }
1707	}
1708
1709      volatile_ok = 0;
1710
1711      /* X, Y, or SIZE may have been passed through protect_from_queue.
1712
1713	 It is unsafe to save the value generated by protect_from_queue
1714	 and reuse it later.  Consider what happens if emit_queue is
1715	 called before the return value from protect_from_queue is used.
1716
1717	 Expansion of the CALL_EXPR below will call emit_queue before
1718	 we are finished emitting RTL for argument setup.  So if we are
1719	 not careful we could get the wrong value for an argument.
1720
1721	 To avoid this problem we go ahead and emit code to copy X, Y &
1722	 SIZE into new pseudos.  We can then place those new pseudos
1723	 into an RTL_EXPR and use them later, even after a call to
1724	 emit_queue.
1725
1726	 Note this is not strictly needed for library calls since they
1727	 do not call emit_queue before loading their arguments.  However,
1728	 we may need to have library calls call emit_queue in the future
1729	 since failing to do so could cause problems for targets which
1730	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1731      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1732      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1733
1734#ifdef TARGET_MEM_FUNCTIONS
1735      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1736#else
1737      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1738			      TREE_UNSIGNED (integer_type_node));
1739      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1740#endif
1741
1742#ifdef TARGET_MEM_FUNCTIONS
1743      /* It is incorrect to use the libcall calling conventions to call
1744	 memcpy in this context.
1745
1746	 This could be a user call to memcpy and the user may wish to
1747	 examine the return value from memcpy.
1748
1749	 For targets where libcalls and normal calls have different conventions
1750	 for returning pointers, we could end up generating incorrect code.
1751
1752	 So instead of using a libcall sequence we build up a suitable
1753	 CALL_EXPR and expand the call in the normal fashion.  */
1754      if (fn == NULL_TREE)
1755	{
1756	  tree fntype;
1757
1758	  /* This was copied from except.c, I don't know if all this is
1759	     necessary in this context or not.  */
1760	  fn = get_identifier ("memcpy");
1761	  fntype = build_pointer_type (void_type_node);
1762	  fntype = build_function_type (fntype, NULL_TREE);
1763	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1764	  ggc_add_tree_root (&fn, 1);
1765	  DECL_EXTERNAL (fn) = 1;
1766	  TREE_PUBLIC (fn) = 1;
1767	  DECL_ARTIFICIAL (fn) = 1;
1768	  TREE_NOTHROW (fn) = 1;
1769	  make_decl_rtl (fn, NULL);
1770	  assemble_external (fn);
1771	}
1772
1773      /* We need to make an argument list for the function call.
1774
1775	 memcpy has three arguments, the first two are void * addresses and
1776	 the last is a size_t byte count for the copy.  */
1777      arg_list
1778	= build_tree_list (NULL_TREE,
1779			   make_tree (build_pointer_type (void_type_node), x));
1780      TREE_CHAIN (arg_list)
1781	= build_tree_list (NULL_TREE,
1782			   make_tree (build_pointer_type (void_type_node), y));
1783      TREE_CHAIN (TREE_CHAIN (arg_list))
1784	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1785      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1786
1787      /* Now we have to build up the CALL_EXPR itself.  */
1788      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1789      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1790			 call_expr, arg_list, NULL_TREE);
1791      TREE_SIDE_EFFECTS (call_expr) = 1;
1792
1793      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1794#else
1795      emit_library_call (bcopy_libfunc, LCT_NORMAL,
1796			 VOIDmode, 3, y, Pmode, x, Pmode,
1797			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1798					  TREE_UNSIGNED (integer_type_node)),
1799			 TYPE_MODE (integer_type_node));
1800#endif
1801
1802      /* If we are initializing a readonly value, show the above call
1803	 clobbered it.  Otherwise, a load from it may erroneously be hoisted
1804	 from a loop.  */
1805      if (RTX_UNCHANGING_P (x))
1806	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1807    }
1808
1809  return retval;
1810}
1811
1812/* Copy all or part of a value X into registers starting at REGNO.
1813   The number of registers to be filled is NREGS.  */
1814
1815void
1816move_block_to_reg (regno, x, nregs, mode)
1817     int regno;
1818     rtx x;
1819     int nregs;
1820     enum machine_mode mode;
1821{
1822  int i;
1823#ifdef HAVE_load_multiple
1824  rtx pat;
1825  rtx last;
1826#endif
1827
1828  if (nregs == 0)
1829    return;
1830
1831  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832    x = validize_mem (force_const_mem (mode, x));
1833
1834  /* See if the machine can do this with a load multiple insn.  */
1835#ifdef HAVE_load_multiple
1836  if (HAVE_load_multiple)
1837    {
1838      last = get_last_insn ();
1839      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1840			       GEN_INT (nregs));
1841      if (pat)
1842	{
1843	  emit_insn (pat);
1844	  return;
1845	}
1846      else
1847	delete_insns_since (last);
1848    }
1849#endif
1850
1851  for (i = 0; i < nregs; i++)
1852    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853		    operand_subword_force (x, i, mode));
1854}
1855
1856/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857   The number of registers to be filled is NREGS.  SIZE indicates the number
1858   of bytes in the object X.  */
1859
1860void
1861move_block_from_reg (regno, x, nregs, size)
1862     int regno;
1863     rtx x;
1864     int nregs;
1865     int size;
1866{
1867  int i;
1868#ifdef HAVE_store_multiple
1869  rtx pat;
1870  rtx last;
1871#endif
1872  enum machine_mode mode;
1873
1874  if (nregs == 0)
1875    return;
1876
1877  /* If SIZE is that of a mode no bigger than a word, just use that
1878     mode's store operation.  */
1879  if (size <= UNITS_PER_WORD
1880      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1881      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1882    {
1883      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1884      return;
1885    }
1886
1887  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1888     to the left before storing to memory.  Note that the previous test
1889     doesn't handle all cases (e.g. SIZE == 3).  */
1890  if (size < UNITS_PER_WORD
1891      && BYTES_BIG_ENDIAN
1892      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1893    {
1894      rtx tem = operand_subword (x, 0, 1, BLKmode);
1895      rtx shift;
1896
1897      if (tem == 0)
1898	abort ();
1899
1900      shift = expand_shift (LSHIFT_EXPR, word_mode,
1901			    gen_rtx_REG (word_mode, regno),
1902			    build_int_2 ((UNITS_PER_WORD - size)
1903					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1904      emit_move_insn (tem, shift);
1905      return;
1906    }
1907
1908  /* See if the machine can do this with a store multiple insn.  */
1909#ifdef HAVE_store_multiple
1910  if (HAVE_store_multiple)
1911    {
1912      last = get_last_insn ();
1913      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1914				GEN_INT (nregs));
1915      if (pat)
1916	{
1917	  emit_insn (pat);
1918	  return;
1919	}
1920      else
1921	delete_insns_since (last);
1922    }
1923#endif
1924
1925  for (i = 0; i < nregs; i++)
1926    {
1927      rtx tem = operand_subword (x, i, 1, BLKmode);
1928
1929      if (tem == 0)
1930	abort ();
1931
1932      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933    }
1934}
1935
1936/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1937   registers represented by a PARALLEL.  SSIZE represents the total size of
1938   block SRC in bytes, or -1 if not known.  */
1939/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1940   the balance will be in what would be the low-order memory addresses, i.e.
1941   left justified for big endian, right justified for little endian.  This
1942   happens to be true for the targets currently using this support.  If this
1943   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1944   would be needed.  */
1945
1946void
1947emit_group_load (dst, orig_src, ssize)
1948     rtx dst, orig_src;
1949     int ssize;
1950{
1951  rtx *tmps, src;
1952  int start, i;
1953
1954  if (GET_CODE (dst) != PARALLEL)
1955    abort ();
1956
1957  /* Check for a NULL entry, used to indicate that the parameter goes
1958     both on the stack and in registers.  */
1959  if (XEXP (XVECEXP (dst, 0, 0), 0))
1960    start = 0;
1961  else
1962    start = 1;
1963
1964  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1965
1966  /* Process the pieces.  */
1967  for (i = start; i < XVECLEN (dst, 0); i++)
1968    {
1969      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1970      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1971      unsigned int bytelen = GET_MODE_SIZE (mode);
1972      int shift = 0;
1973
1974      /* Handle trailing fragments that run over the size of the struct.  */
1975      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976	{
1977	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1978	  bytelen = ssize - bytepos;
1979	  if (bytelen <= 0)
1980	    abort ();
1981	}
1982
1983      /* If we won't be loading directly from memory, protect the real source
1984	 from strange tricks we might play; but make sure that the source can
1985	 be loaded directly into the destination.  */
1986      src = orig_src;
1987      if (GET_CODE (orig_src) != MEM
1988	  && (!CONSTANT_P (orig_src)
1989	      || (GET_MODE (orig_src) != mode
1990		  && GET_MODE (orig_src) != VOIDmode)))
1991	{
1992	  if (GET_MODE (orig_src) == VOIDmode)
1993	    src = gen_reg_rtx (mode);
1994	  else
1995	    src = gen_reg_rtx (GET_MODE (orig_src));
1996
1997	  emit_move_insn (src, orig_src);
1998	}
1999
2000      /* Optimize the access just a bit.  */
2001      if (GET_CODE (src) == MEM
2002	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2003	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2004	  && bytelen == GET_MODE_SIZE (mode))
2005	{
2006	  tmps[i] = gen_reg_rtx (mode);
2007	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2008	}
2009      else if (GET_CODE (src) == CONCAT)
2010	{
2011	  if ((bytepos == 0
2012	       && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2013	      || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2014		  && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2015	    {
2016	      tmps[i] = XEXP (src, bytepos != 0);
2017	      if (! CONSTANT_P (tmps[i])
2018		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2019		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2020					     0, 1, NULL_RTX, mode, mode, ssize);
2021	    }
2022	  else if (bytepos == 0)
2023	    {
2024	      rtx mem = assign_stack_temp (GET_MODE (src),
2025					   GET_MODE_SIZE (GET_MODE (src)), 0);
2026	      emit_move_insn (mem, src);
2027	      tmps[i] = adjust_address (mem, mode, 0);
2028	    }
2029	  else
2030	    abort ();
2031	}
2032      else if (CONSTANT_P (src)
2033	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2034	tmps[i] = src;
2035      else
2036	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2037				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2038				     mode, mode, ssize);
2039
2040      if (BYTES_BIG_ENDIAN && shift)
2041	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2042		      tmps[i], 0, OPTAB_WIDEN);
2043    }
2044
2045  emit_queue ();
2046
2047  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2048  for (i = start; i < XVECLEN (dst, 0); i++)
2049    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2050}
2051
2052/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2053   registers represented by a PARALLEL.  SSIZE represents the total size of
2054   block DST, or -1 if not known.  */
2055
2056void
2057emit_group_store (orig_dst, src, ssize)
2058     rtx orig_dst, src;
2059     int ssize;
2060{
2061  rtx *tmps, dst;
2062  int start, i;
2063
2064  if (GET_CODE (src) != PARALLEL)
2065    abort ();
2066
2067  /* Check for a NULL entry, used to indicate that the parameter goes
2068     both on the stack and in registers.  */
2069  if (XEXP (XVECEXP (src, 0, 0), 0))
2070    start = 0;
2071  else
2072    start = 1;
2073
2074  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2075
2076  /* Copy the (probable) hard regs into pseudos.  */
2077  for (i = start; i < XVECLEN (src, 0); i++)
2078    {
2079      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2080      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2081      emit_move_insn (tmps[i], reg);
2082    }
2083  emit_queue ();
2084
2085  /* If we won't be storing directly into memory, protect the real destination
2086     from strange tricks we might play.  */
2087  dst = orig_dst;
2088  if (GET_CODE (dst) == PARALLEL)
2089    {
2090      rtx temp;
2091
2092      /* We can get a PARALLEL dst if there is a conditional expression in
2093	 a return statement.  In that case, the dst and src are the same,
2094	 so no action is necessary.  */
2095      if (rtx_equal_p (dst, src))
2096	return;
2097
2098      /* It is unclear if we can ever reach here, but we may as well handle
2099	 it.  Allocate a temporary, and split this into a store/load to/from
2100	 the temporary.  */
2101
2102      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2103      emit_group_store (temp, src, ssize);
2104      emit_group_load (dst, temp, ssize);
2105      return;
2106    }
2107  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2108    {
2109      dst = gen_reg_rtx (GET_MODE (orig_dst));
2110      /* Make life a bit easier for combine.  */
2111      emit_move_insn (dst, const0_rtx);
2112    }
2113
2114  /* Process the pieces.  */
2115  for (i = start; i < XVECLEN (src, 0); i++)
2116    {
2117      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2118      enum machine_mode mode = GET_MODE (tmps[i]);
2119      unsigned int bytelen = GET_MODE_SIZE (mode);
2120      rtx dest = dst;
2121
2122      /* Handle trailing fragments that run over the size of the struct.  */
2123      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2124	{
2125	  if (BYTES_BIG_ENDIAN)
2126	    {
2127	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2128	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2129			    tmps[i], 0, OPTAB_WIDEN);
2130	    }
2131	  bytelen = ssize - bytepos;
2132	}
2133
2134      if (GET_CODE (dst) == CONCAT)
2135	{
2136	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2137	    dest = XEXP (dst, 0);
2138	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2139	    {
2140	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2141	      dest = XEXP (dst, 1);
2142	    }
2143	  else
2144	    abort ();
2145	}
2146
2147      /* Optimize the access just a bit.  */
2148      if (GET_CODE (dest) == MEM
2149	  && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2150	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2151	  && bytelen == GET_MODE_SIZE (mode))
2152	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2153      else
2154	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2155			 mode, tmps[i], ssize);
2156    }
2157
2158  emit_queue ();
2159
2160  /* Copy from the pseudo into the (probable) hard reg.  */
2161  if (GET_CODE (dst) == REG)
2162    emit_move_insn (orig_dst, dst);
2163}
2164
2165/* Generate code to copy a BLKmode object of TYPE out of a
2166   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2167   is null, a stack temporary is created.  TGTBLK is returned.
2168
2169   The primary purpose of this routine is to handle functions
2170   that return BLKmode structures in registers.  Some machines
2171   (the PA for example) want to return all small structures
2172   in registers regardless of the structure's alignment.  */
2173
2174rtx
2175copy_blkmode_from_reg (tgtblk, srcreg, type)
2176     rtx tgtblk;
2177     rtx srcreg;
2178     tree type;
2179{
2180  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2181  rtx src = NULL, dst = NULL;
2182  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2183  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2184
2185  if (tgtblk == 0)
2186    {
2187      tgtblk = assign_temp (build_qualified_type (type,
2188						  (TYPE_QUALS (type)
2189						   | TYPE_QUAL_CONST)),
2190			    0, 1, 1);
2191      preserve_temp_slots (tgtblk);
2192    }
2193
2194  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2195     into a new pseudo which is a full word.
2196
2197     If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2198     the wrong part of the register gets copied so we fake a type conversion
2199     in place.  */
2200  if (GET_MODE (srcreg) != BLKmode
2201      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2202    {
2203      if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2204	srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2205      else
2206	srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2207    }
2208
2209  /* Structures whose size is not a multiple of a word are aligned
2210     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2211     machine, this means we must skip the empty high order bytes when
2212     calculating the bit offset.  */
2213  if (BYTES_BIG_ENDIAN
2214      && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2215      && bytes % UNITS_PER_WORD)
2216    big_endian_correction
2217      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2218
2219  /* Copy the structure BITSIZE bites at a time.
2220
2221     We could probably emit more efficient code for machines which do not use
2222     strict alignment, but it doesn't seem worth the effort at the current
2223     time.  */
2224  for (bitpos = 0, xbitpos = big_endian_correction;
2225       bitpos < bytes * BITS_PER_UNIT;
2226       bitpos += bitsize, xbitpos += bitsize)
2227    {
2228      /* We need a new source operand each time xbitpos is on a
2229	 word boundary and when xbitpos == big_endian_correction
2230	 (the first time through).  */
2231      if (xbitpos % BITS_PER_WORD == 0
2232	  || xbitpos == big_endian_correction)
2233	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2234				     GET_MODE (srcreg));
2235
2236      /* We need a new destination operand each time bitpos is on
2237	 a word boundary.  */
2238      if (bitpos % BITS_PER_WORD == 0)
2239	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2240
2241      /* Use xbitpos for the source extraction (right justified) and
2242	 xbitpos for the destination store (left justified).  */
2243      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2244		       extract_bit_field (src, bitsize,
2245					  xbitpos % BITS_PER_WORD, 1,
2246					  NULL_RTX, word_mode, word_mode,
2247					  BITS_PER_WORD),
2248		       BITS_PER_WORD);
2249    }
2250
2251  return tgtblk;
2252}
2253
2254/* Add a USE expression for REG to the (possibly empty) list pointed
2255   to by CALL_FUSAGE.  REG must denote a hard register.  */
2256
2257void
2258use_reg (call_fusage, reg)
2259     rtx *call_fusage, reg;
2260{
2261  if (GET_CODE (reg) != REG
2262      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2263    abort ();
2264
2265  *call_fusage
2266    = gen_rtx_EXPR_LIST (VOIDmode,
2267			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2268}
2269
2270/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2271   starting at REGNO.  All of these registers must be hard registers.  */
2272
2273void
2274use_regs (call_fusage, regno, nregs)
2275     rtx *call_fusage;
2276     int regno;
2277     int nregs;
2278{
2279  int i;
2280
2281  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2282    abort ();
2283
2284  for (i = 0; i < nregs; i++)
2285    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2286}
2287
2288/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2289   PARALLEL REGS.  This is for calls that pass values in multiple
2290   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2291
2292void
2293use_group_regs (call_fusage, regs)
2294     rtx *call_fusage;
2295     rtx regs;
2296{
2297  int i;
2298
2299  for (i = 0; i < XVECLEN (regs, 0); i++)
2300    {
2301      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2302
2303      /* A NULL entry means the parameter goes both on the stack and in
2304	 registers.  This can also be a MEM for targets that pass values
2305	 partially on the stack and partially in registers.  */
2306      if (reg != 0 && GET_CODE (reg) == REG)
2307	use_reg (call_fusage, reg);
2308    }
2309}
2310
2311
2312int
2313can_store_by_pieces (len, constfun, constfundata, align)
2314     unsigned HOST_WIDE_INT len;
2315     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2316     PTR constfundata;
2317     unsigned int align;
2318{
2319  unsigned HOST_WIDE_INT max_size, l;
2320  HOST_WIDE_INT offset = 0;
2321  enum machine_mode mode, tmode;
2322  enum insn_code icode;
2323  int reverse;
2324  rtx cst;
2325
2326  if (! MOVE_BY_PIECES_P (len, align))
2327    return 0;
2328
2329  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2330      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2331    align = MOVE_MAX * BITS_PER_UNIT;
2332
2333  /* We would first store what we can in the largest integer mode, then go to
2334     successively smaller modes.  */
2335
2336  for (reverse = 0;
2337       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2338       reverse++)
2339    {
2340      l = len;
2341      mode = VOIDmode;
2342      max_size = MOVE_MAX_PIECES + 1;
2343      while (max_size > 1)
2344	{
2345	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2346	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2347	    if (GET_MODE_SIZE (tmode) < max_size)
2348	      mode = tmode;
2349
2350	  if (mode == VOIDmode)
2351	    break;
2352
2353	  icode = mov_optab->handlers[(int) mode].insn_code;
2354	  if (icode != CODE_FOR_nothing
2355	      && align >= GET_MODE_ALIGNMENT (mode))
2356	    {
2357	      unsigned int size = GET_MODE_SIZE (mode);
2358
2359	      while (l >= size)
2360		{
2361		  if (reverse)
2362		    offset -= size;
2363
2364		  cst = (*constfun) (constfundata, offset, mode);
2365		  if (!LEGITIMATE_CONSTANT_P (cst))
2366		    return 0;
2367
2368		  if (!reverse)
2369		    offset += size;
2370
2371		  l -= size;
2372		}
2373	    }
2374
2375	  max_size = GET_MODE_SIZE (mode);
2376	}
2377
2378      /* The code above should have handled everything.  */
2379      if (l != 0)
2380	abort ();
2381    }
2382
2383  return 1;
2384}
2385
2386/* Generate several move instructions to store LEN bytes generated by
2387   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2388   pointer which will be passed as argument in every CONSTFUN call.
2389   ALIGN is maximum alignment we can assume.  */
2390
2391void
2392store_by_pieces (to, len, constfun, constfundata, align)
2393     rtx to;
2394     unsigned HOST_WIDE_INT len;
2395     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2396     PTR constfundata;
2397     unsigned int align;
2398{
2399  struct store_by_pieces data;
2400
2401  if (! MOVE_BY_PIECES_P (len, align))
2402    abort ();
2403  to = protect_from_queue (to, 1);
2404  data.constfun = constfun;
2405  data.constfundata = constfundata;
2406  data.len = len;
2407  data.to = to;
2408  store_by_pieces_1 (&data, align);
2409}
2410
2411/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2412   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2413   before calling. ALIGN is maximum alignment we can assume.  */
2414
2415static void
2416clear_by_pieces (to, len, align)
2417     rtx to;
2418     unsigned HOST_WIDE_INT len;
2419     unsigned int align;
2420{
2421  struct store_by_pieces data;
2422
2423  data.constfun = clear_by_pieces_1;
2424  data.constfundata = NULL;
2425  data.len = len;
2426  data.to = to;
2427  store_by_pieces_1 (&data, align);
2428}
2429
2430/* Callback routine for clear_by_pieces.
2431   Return const0_rtx unconditionally.  */
2432
2433static rtx
2434clear_by_pieces_1 (data, offset, mode)
2435     PTR data ATTRIBUTE_UNUSED;
2436     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2437     enum machine_mode mode ATTRIBUTE_UNUSED;
2438{
2439  return const0_rtx;
2440}
2441
2442/* Subroutine of clear_by_pieces and store_by_pieces.
2443   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2444   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2445   before calling.  ALIGN is maximum alignment we can assume.  */
2446
2447static void
2448store_by_pieces_1 (data, align)
2449     struct store_by_pieces *data;
2450     unsigned int align;
2451{
2452  rtx to_addr = XEXP (data->to, 0);
2453  unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2454  enum machine_mode mode = VOIDmode, tmode;
2455  enum insn_code icode;
2456
2457  data->offset = 0;
2458  data->to_addr = to_addr;
2459  data->autinc_to
2460    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2461       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2462
2463  data->explicit_inc_to = 0;
2464  data->reverse
2465    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2466  if (data->reverse)
2467    data->offset = data->len;
2468
2469  /* If storing requires more than two move insns,
2470     copy addresses to registers (to make displacements shorter)
2471     and use post-increment if available.  */
2472  if (!data->autinc_to
2473      && move_by_pieces_ninsns (data->len, align) > 2)
2474    {
2475      /* Determine the main mode we'll be using.  */
2476      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2477	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2478	if (GET_MODE_SIZE (tmode) < max_size)
2479	  mode = tmode;
2480
2481      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2482	{
2483	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2484	  data->autinc_to = 1;
2485	  data->explicit_inc_to = -1;
2486	}
2487
2488      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2489	  && ! data->autinc_to)
2490	{
2491	  data->to_addr = copy_addr_to_reg (to_addr);
2492	  data->autinc_to = 1;
2493	  data->explicit_inc_to = 1;
2494	}
2495
2496      if ( !data->autinc_to && CONSTANT_P (to_addr))
2497	data->to_addr = copy_addr_to_reg (to_addr);
2498    }
2499
2500  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2501      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2502    align = MOVE_MAX * BITS_PER_UNIT;
2503
2504  /* First store what we can in the largest integer mode, then go to
2505     successively smaller modes.  */
2506
2507  while (max_size > 1)
2508    {
2509      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2510	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2511	if (GET_MODE_SIZE (tmode) < max_size)
2512	  mode = tmode;
2513
2514      if (mode == VOIDmode)
2515	break;
2516
2517      icode = mov_optab->handlers[(int) mode].insn_code;
2518      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2519	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2520
2521      max_size = GET_MODE_SIZE (mode);
2522    }
2523
2524  /* The code above should have handled everything.  */
2525  if (data->len != 0)
2526    abort ();
2527}
2528
2529/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2530   with move instructions for mode MODE.  GENFUN is the gen_... function
2531   to make a move insn for that mode.  DATA has all the other info.  */
2532
2533static void
2534store_by_pieces_2 (genfun, mode, data)
2535     rtx (*genfun) PARAMS ((rtx, ...));
2536     enum machine_mode mode;
2537     struct store_by_pieces *data;
2538{
2539  unsigned int size = GET_MODE_SIZE (mode);
2540  rtx to1, cst;
2541
2542  while (data->len >= size)
2543    {
2544      if (data->reverse)
2545	data->offset -= size;
2546
2547      if (data->autinc_to)
2548	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2549					 data->offset);
2550      else
2551	to1 = adjust_address (data->to, mode, data->offset);
2552
2553      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2554	emit_insn (gen_add2_insn (data->to_addr,
2555				  GEN_INT (-(HOST_WIDE_INT) size)));
2556
2557      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2558      emit_insn ((*genfun) (to1, cst));
2559
2560      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2561	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2562
2563      if (! data->reverse)
2564	data->offset += size;
2565
2566      data->len -= size;
2567    }
2568}
2569
2570/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2571   its length in bytes.  */
2572
2573rtx
2574clear_storage (object, size)
2575     rtx object;
2576     rtx size;
2577{
2578#ifdef TARGET_MEM_FUNCTIONS
2579  static tree fn;
2580  tree call_expr, arg_list;
2581#endif
2582  rtx retval = 0;
2583  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2584			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2585
2586  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2587     just move a zero.  Otherwise, do this a piece at a time.  */
2588  if (GET_MODE (object) != BLKmode
2589      && GET_CODE (size) == CONST_INT
2590      && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2591    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2592  else
2593    {
2594      object = protect_from_queue (object, 1);
2595      size = protect_from_queue (size, 0);
2596
2597      if (GET_CODE (size) == CONST_INT
2598	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2599	clear_by_pieces (object, INTVAL (size), align);
2600      else
2601	{
2602	  /* Try the most limited insn first, because there's no point
2603	     including more than one in the machine description unless
2604	     the more limited one has some advantage.  */
2605
2606	  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2607	  enum machine_mode mode;
2608
2609	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2610	       mode = GET_MODE_WIDER_MODE (mode))
2611	    {
2612	      enum insn_code code = clrstr_optab[(int) mode];
2613	      insn_operand_predicate_fn pred;
2614
2615	      if (code != CODE_FOR_nothing
2616		  /* We don't need MODE to be narrower than
2617		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2618		     the mode mask, as it is returned by the macro, it will
2619		     definitely be less than the actual mode mask.  */
2620		  && ((GET_CODE (size) == CONST_INT
2621		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2622			   <= (GET_MODE_MASK (mode) >> 1)))
2623		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2624		  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2625		      || (*pred) (object, BLKmode))
2626		  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2627		      || (*pred) (opalign, VOIDmode)))
2628		{
2629		  rtx op1;
2630		  rtx last = get_last_insn ();
2631		  rtx pat;
2632
2633		  op1 = convert_to_mode (mode, size, 1);
2634		  pred = insn_data[(int) code].operand[1].predicate;
2635		  if (pred != 0 && ! (*pred) (op1, mode))
2636		    op1 = copy_to_mode_reg (mode, op1);
2637
2638		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2639		  if (pat)
2640		    {
2641		      emit_insn (pat);
2642		      return 0;
2643		    }
2644		  else
2645		    delete_insns_since (last);
2646		}
2647	    }
2648
2649	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2650
2651	     It is unsafe to save the value generated by protect_from_queue
2652	     and reuse it later.  Consider what happens if emit_queue is
2653	     called before the return value from protect_from_queue is used.
2654
2655	     Expansion of the CALL_EXPR below will call emit_queue before
2656	     we are finished emitting RTL for argument setup.  So if we are
2657	     not careful we could get the wrong value for an argument.
2658
2659	     To avoid this problem we go ahead and emit code to copy OBJECT
2660	     and SIZE into new pseudos.  We can then place those new pseudos
2661	     into an RTL_EXPR and use them later, even after a call to
2662	     emit_queue.
2663
2664	     Note this is not strictly needed for library calls since they
2665	     do not call emit_queue before loading their arguments.  However,
2666	     we may need to have library calls call emit_queue in the future
2667	     since failing to do so could cause problems for targets which
2668	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2669	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2670
2671#ifdef TARGET_MEM_FUNCTIONS
2672	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2673#else
2674	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2675				  TREE_UNSIGNED (integer_type_node));
2676	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2677#endif
2678
2679#ifdef TARGET_MEM_FUNCTIONS
2680	  /* It is incorrect to use the libcall calling conventions to call
2681	     memset in this context.
2682
2683	     This could be a user call to memset and the user may wish to
2684	     examine the return value from memset.
2685
2686	     For targets where libcalls and normal calls have different
2687	     conventions for returning pointers, we could end up generating
2688	     incorrect code.
2689
2690	     So instead of using a libcall sequence we build up a suitable
2691	     CALL_EXPR and expand the call in the normal fashion.  */
2692	  if (fn == NULL_TREE)
2693	    {
2694	      tree fntype;
2695
2696	      /* This was copied from except.c, I don't know if all this is
2697		 necessary in this context or not.  */
2698	      fn = get_identifier ("memset");
2699	      fntype = build_pointer_type (void_type_node);
2700	      fntype = build_function_type (fntype, NULL_TREE);
2701	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2702	      ggc_add_tree_root (&fn, 1);
2703	      DECL_EXTERNAL (fn) = 1;
2704	      TREE_PUBLIC (fn) = 1;
2705	      DECL_ARTIFICIAL (fn) = 1;
2706	      TREE_NOTHROW (fn) = 1;
2707	      make_decl_rtl (fn, NULL);
2708	      assemble_external (fn);
2709	    }
2710
2711	  /* We need to make an argument list for the function call.
2712
2713	     memset has three arguments, the first is a void * addresses, the
2714	     second an integer with the initialization value, the last is a
2715	     size_t byte count for the copy.  */
2716	  arg_list
2717	    = build_tree_list (NULL_TREE,
2718			       make_tree (build_pointer_type (void_type_node),
2719					  object));
2720	  TREE_CHAIN (arg_list)
2721	    = build_tree_list (NULL_TREE,
2722			       make_tree (integer_type_node, const0_rtx));
2723	  TREE_CHAIN (TREE_CHAIN (arg_list))
2724	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2725	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2726
2727	  /* Now we have to build up the CALL_EXPR itself.  */
2728	  call_expr = build1 (ADDR_EXPR,
2729			      build_pointer_type (TREE_TYPE (fn)), fn);
2730	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2731			     call_expr, arg_list, NULL_TREE);
2732	  TREE_SIDE_EFFECTS (call_expr) = 1;
2733
2734	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2735#else
2736	  emit_library_call (bzero_libfunc, LCT_NORMAL,
2737			     VOIDmode, 2, object, Pmode, size,
2738			     TYPE_MODE (integer_type_node));
2739#endif
2740
2741	  /* If we are initializing a readonly value, show the above call
2742	     clobbered it.  Otherwise, a load from it may erroneously be
2743	     hoisted from a loop.  */
2744	  if (RTX_UNCHANGING_P (object))
2745	    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2746	}
2747    }
2748
2749  return retval;
2750}
2751
2752/* Generate code to copy Y into X.
2753   Both Y and X must have the same mode, except that
2754   Y can be a constant with VOIDmode.
2755   This mode cannot be BLKmode; use emit_block_move for that.
2756
2757   Return the last instruction emitted.  */
2758
2759rtx
2760emit_move_insn (x, y)
2761     rtx x, y;
2762{
2763  enum machine_mode mode = GET_MODE (x);
2764  rtx y_cst = NULL_RTX;
2765  rtx last_insn;
2766
2767  x = protect_from_queue (x, 1);
2768  y = protect_from_queue (y, 0);
2769
2770  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2771    abort ();
2772
2773  /* Never force constant_p_rtx to memory.  */
2774  if (GET_CODE (y) == CONSTANT_P_RTX)
2775    ;
2776  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2777    {
2778      y_cst = y;
2779      y = force_const_mem (mode, y);
2780    }
2781
2782  /* If X or Y are memory references, verify that their addresses are valid
2783     for the machine.  */
2784  if (GET_CODE (x) == MEM
2785      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2786	   && ! push_operand (x, GET_MODE (x)))
2787	  || (flag_force_addr
2788	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2789    x = validize_mem (x);
2790
2791  if (GET_CODE (y) == MEM
2792      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2793	  || (flag_force_addr
2794	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2795    y = validize_mem (y);
2796
2797  if (mode == BLKmode)
2798    abort ();
2799
2800  last_insn = emit_move_insn_1 (x, y);
2801
2802  if (y_cst && GET_CODE (x) == REG)
2803    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2804
2805  return last_insn;
2806}
2807
2808/* Low level part of emit_move_insn.
2809   Called just like emit_move_insn, but assumes X and Y
2810   are basically valid.  */
2811
2812rtx
2813emit_move_insn_1 (x, y)
2814     rtx x, y;
2815{
2816  enum machine_mode mode = GET_MODE (x);
2817  enum machine_mode submode;
2818  enum mode_class class = GET_MODE_CLASS (mode);
2819
2820  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2821    abort ();
2822
2823  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2824    return
2825      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2826
2827  /* Expand complex moves by moving real part and imag part, if possible.  */
2828  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2829	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2830						    * BITS_PER_UNIT),
2831						   (class == MODE_COMPLEX_INT
2832						    ? MODE_INT : MODE_FLOAT),
2833						   0))
2834	   && (mov_optab->handlers[(int) submode].insn_code
2835	       != CODE_FOR_nothing))
2836    {
2837      /* Don't split destination if it is a stack push.  */
2838      int stack = push_operand (x, GET_MODE (x));
2839
2840#ifdef PUSH_ROUNDING
2841      /* In case we output to the stack, but the size is smaller machine can
2842	 push exactly, we need to use move instructions.  */
2843      if (stack
2844	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2845	      != GET_MODE_SIZE (submode)))
2846	{
2847	  rtx temp;
2848	  HOST_WIDE_INT offset1, offset2;
2849
2850	  /* Do not use anti_adjust_stack, since we don't want to update
2851	     stack_pointer_delta.  */
2852	  temp = expand_binop (Pmode,
2853#ifdef STACK_GROWS_DOWNWARD
2854			       sub_optab,
2855#else
2856			       add_optab,
2857#endif
2858			       stack_pointer_rtx,
2859			       GEN_INT
2860				 (PUSH_ROUNDING
2861				  (GET_MODE_SIZE (GET_MODE (x)))),
2862			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2863
2864	  if (temp != stack_pointer_rtx)
2865	    emit_move_insn (stack_pointer_rtx, temp);
2866
2867#ifdef STACK_GROWS_DOWNWARD
2868	  offset1 = 0;
2869	  offset2 = GET_MODE_SIZE (submode);
2870#else
2871	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2872	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2873		     + GET_MODE_SIZE (submode));
2874#endif
2875
2876	  emit_move_insn (change_address (x, submode,
2877					  gen_rtx_PLUS (Pmode,
2878						        stack_pointer_rtx,
2879							GEN_INT (offset1))),
2880			  gen_realpart (submode, y));
2881	  emit_move_insn (change_address (x, submode,
2882					  gen_rtx_PLUS (Pmode,
2883						        stack_pointer_rtx,
2884							GEN_INT (offset2))),
2885			  gen_imagpart (submode, y));
2886	}
2887      else
2888#endif
2889      /* If this is a stack, push the highpart first, so it
2890	 will be in the argument order.
2891
2892	 In that case, change_address is used only to convert
2893	 the mode, not to change the address.  */
2894      if (stack)
2895	{
2896	  /* Note that the real part always precedes the imag part in memory
2897	     regardless of machine's endianness.  */
2898#ifdef STACK_GROWS_DOWNWARD
2899	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2900		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2901		      gen_imagpart (submode, y)));
2902	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2903		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2904		      gen_realpart (submode, y)));
2905#else
2906	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2907		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2908		      gen_realpart (submode, y)));
2909	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2910		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2911		      gen_imagpart (submode, y)));
2912#endif
2913	}
2914      else
2915	{
2916	  rtx realpart_x, realpart_y;
2917	  rtx imagpart_x, imagpart_y;
2918
2919	  /* If this is a complex value with each part being smaller than a
2920	     word, the usual calling sequence will likely pack the pieces into
2921	     a single register.  Unfortunately, SUBREG of hard registers only
2922	     deals in terms of words, so we have a problem converting input
2923	     arguments to the CONCAT of two registers that is used elsewhere
2924	     for complex values.  If this is before reload, we can copy it into
2925	     memory and reload.  FIXME, we should see about using extract and
2926	     insert on integer registers, but complex short and complex char
2927	     variables should be rarely used.  */
2928	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2929	      && (reload_in_progress | reload_completed) == 0)
2930	    {
2931	      int packed_dest_p
2932		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2933	      int packed_src_p
2934		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2935
2936	      if (packed_dest_p || packed_src_p)
2937		{
2938		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2939					       ? MODE_FLOAT : MODE_INT);
2940
2941		  enum machine_mode reg_mode
2942		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2943
2944		  if (reg_mode != BLKmode)
2945		    {
2946		      rtx mem = assign_stack_temp (reg_mode,
2947						   GET_MODE_SIZE (mode), 0);
2948		      rtx cmem = adjust_address (mem, mode, 0);
2949
2950		      cfun->cannot_inline
2951			= N_("function using short complex types cannot be inline");
2952
2953		      if (packed_dest_p)
2954			{
2955			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2956
2957			  emit_move_insn_1 (cmem, y);
2958			  return emit_move_insn_1 (sreg, mem);
2959			}
2960		      else
2961			{
2962			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2963
2964			  emit_move_insn_1 (mem, sreg);
2965			  return emit_move_insn_1 (x, cmem);
2966			}
2967		    }
2968		}
2969	    }
2970
2971	  realpart_x = gen_realpart (submode, x);
2972	  realpart_y = gen_realpart (submode, y);
2973	  imagpart_x = gen_imagpart (submode, x);
2974	  imagpart_y = gen_imagpart (submode, y);
2975
2976	  /* Show the output dies here.  This is necessary for SUBREGs
2977	     of pseudos since we cannot track their lifetimes correctly;
2978	     hard regs shouldn't appear here except as return values.
2979	     We never want to emit such a clobber after reload.  */
2980	  if (x != y
2981	      && ! (reload_in_progress || reload_completed)
2982	      && (GET_CODE (realpart_x) == SUBREG
2983		  || GET_CODE (imagpart_x) == SUBREG))
2984	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2985
2986	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2987		     (realpart_x, realpart_y));
2988	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2989		     (imagpart_x, imagpart_y));
2990	}
2991
2992      return get_last_insn ();
2993    }
2994
2995  /* This will handle any multi-word mode that lacks a move_insn pattern.
2996     However, you will get better code if you define such patterns,
2997     even if they must turn into multiple assembler instructions.  */
2998  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2999    {
3000      rtx last_insn = 0;
3001      rtx seq, inner;
3002      int need_clobber;
3003      int i;
3004
3005#ifdef PUSH_ROUNDING
3006
3007      /* If X is a push on the stack, do the push now and replace
3008	 X with a reference to the stack pointer.  */
3009      if (push_operand (x, GET_MODE (x)))
3010	{
3011	  rtx temp;
3012	  enum rtx_code code;
3013
3014	  /* Do not use anti_adjust_stack, since we don't want to update
3015	     stack_pointer_delta.  */
3016	  temp = expand_binop (Pmode,
3017#ifdef STACK_GROWS_DOWNWARD
3018			       sub_optab,
3019#else
3020			       add_optab,
3021#endif
3022			       stack_pointer_rtx,
3023			       GEN_INT
3024				 (PUSH_ROUNDING
3025				  (GET_MODE_SIZE (GET_MODE (x)))),
3026			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3027
3028          if (temp != stack_pointer_rtx)
3029            emit_move_insn (stack_pointer_rtx, temp);
3030
3031	  code = GET_CODE (XEXP (x, 0));
3032
3033	  /* Just hope that small offsets off SP are OK.  */
3034	  if (code == POST_INC)
3035	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3036				GEN_INT (-((HOST_WIDE_INT)
3037					   GET_MODE_SIZE (GET_MODE (x)))));
3038	  else if (code == POST_DEC)
3039	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3040				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3041	  else
3042	    temp = stack_pointer_rtx;
3043
3044	  x = change_address (x, VOIDmode, temp);
3045	}
3046#endif
3047
3048      /* If we are in reload, see if either operand is a MEM whose address
3049	 is scheduled for replacement.  */
3050      if (reload_in_progress && GET_CODE (x) == MEM
3051	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3052	x = replace_equiv_address_nv (x, inner);
3053      if (reload_in_progress && GET_CODE (y) == MEM
3054	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3055	y = replace_equiv_address_nv (y, inner);
3056
3057      start_sequence ();
3058
3059      need_clobber = 0;
3060      for (i = 0;
3061	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3062	   i++)
3063	{
3064	  rtx xpart = operand_subword (x, i, 1, mode);
3065	  rtx ypart = operand_subword (y, i, 1, mode);
3066
3067	  /* If we can't get a part of Y, put Y into memory if it is a
3068	     constant.  Otherwise, force it into a register.  If we still
3069	     can't get a part of Y, abort.  */
3070	  if (ypart == 0 && CONSTANT_P (y))
3071	    {
3072	      y = force_const_mem (mode, y);
3073	      ypart = operand_subword (y, i, 1, mode);
3074	    }
3075	  else if (ypart == 0)
3076	    ypart = operand_subword_force (y, i, mode);
3077
3078	  if (xpart == 0 || ypart == 0)
3079	    abort ();
3080
3081	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3082
3083	  last_insn = emit_move_insn (xpart, ypart);
3084	}
3085
3086      seq = gen_sequence ();
3087      end_sequence ();
3088
3089      /* Show the output dies here.  This is necessary for SUBREGs
3090	 of pseudos since we cannot track their lifetimes correctly;
3091	 hard regs shouldn't appear here except as return values.
3092	 We never want to emit such a clobber after reload.  */
3093      if (x != y
3094	  && ! (reload_in_progress || reload_completed)
3095	  && need_clobber != 0)
3096	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3097
3098      emit_insn (seq);
3099
3100      return last_insn;
3101    }
3102  else
3103    abort ();
3104}
3105
3106/* Pushing data onto the stack.  */
3107
3108/* Push a block of length SIZE (perhaps variable)
3109   and return an rtx to address the beginning of the block.
3110   Note that it is not possible for the value returned to be a QUEUED.
3111   The value may be virtual_outgoing_args_rtx.
3112
3113   EXTRA is the number of bytes of padding to push in addition to SIZE.
3114   BELOW nonzero means this padding comes at low addresses;
3115   otherwise, the padding comes at high addresses.  */
3116
3117rtx
3118push_block (size, extra, below)
3119     rtx size;
3120     int extra, below;
3121{
3122  rtx temp;
3123
3124  size = convert_modes (Pmode, ptr_mode, size, 1);
3125  if (CONSTANT_P (size))
3126    anti_adjust_stack (plus_constant (size, extra));
3127  else if (GET_CODE (size) == REG && extra == 0)
3128    anti_adjust_stack (size);
3129  else
3130    {
3131      temp = copy_to_mode_reg (Pmode, size);
3132      if (extra != 0)
3133	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3134			     temp, 0, OPTAB_LIB_WIDEN);
3135      anti_adjust_stack (temp);
3136    }
3137
3138#ifndef STACK_GROWS_DOWNWARD
3139  if (0)
3140#else
3141  if (1)
3142#endif
3143    {
3144      temp = virtual_outgoing_args_rtx;
3145      if (extra != 0 && below)
3146	temp = plus_constant (temp, extra);
3147    }
3148  else
3149    {
3150      if (GET_CODE (size) == CONST_INT)
3151	temp = plus_constant (virtual_outgoing_args_rtx,
3152			      -INTVAL (size) - (below ? 0 : extra));
3153      else if (extra != 0 && !below)
3154	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3155			     negate_rtx (Pmode, plus_constant (size, extra)));
3156      else
3157	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3158			     negate_rtx (Pmode, size));
3159    }
3160
3161  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3162}
3163
3164#ifdef PUSH_ROUNDING
3165
3166/* Emit single push insn.  */
3167
3168static void
3169emit_single_push_insn (mode, x, type)
3170     rtx x;
3171     enum machine_mode mode;
3172     tree type;
3173{
3174  rtx dest_addr;
3175  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3176  rtx dest;
3177  enum insn_code icode;
3178  insn_operand_predicate_fn pred;
3179
3180  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3181  /* If there is push pattern, use it.  Otherwise try old way of throwing
3182     MEM representing push operation to move expander.  */
3183  icode = push_optab->handlers[(int) mode].insn_code;
3184  if (icode != CODE_FOR_nothing)
3185    {
3186      if (((pred = insn_data[(int) icode].operand[0].predicate)
3187	   && !((*pred) (x, mode))))
3188	x = force_reg (mode, x);
3189      emit_insn (GEN_FCN (icode) (x));
3190      return;
3191    }
3192  if (GET_MODE_SIZE (mode) == rounded_size)
3193    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3194  else
3195    {
3196#ifdef STACK_GROWS_DOWNWARD
3197      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3198				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3199#else
3200      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3201				GEN_INT (rounded_size));
3202#endif
3203      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3204    }
3205
3206  dest = gen_rtx_MEM (mode, dest_addr);
3207
3208  if (type != 0)
3209    {
3210      set_mem_attributes (dest, type, 1);
3211
3212      if (flag_optimize_sibling_calls)
3213	/* Function incoming arguments may overlap with sibling call
3214	   outgoing arguments and we cannot allow reordering of reads
3215	   from function arguments with stores to outgoing arguments
3216	   of sibling calls.  */
3217	set_mem_alias_set (dest, 0);
3218    }
3219  emit_move_insn (dest, x);
3220}
3221#endif
3222
3223/* Generate code to push X onto the stack, assuming it has mode MODE and
3224   type TYPE.
3225   MODE is redundant except when X is a CONST_INT (since they don't
3226   carry mode info).
3227   SIZE is an rtx for the size of data to be copied (in bytes),
3228   needed only if X is BLKmode.
3229
3230   ALIGN (in bits) is maximum alignment we can assume.
3231
3232   If PARTIAL and REG are both nonzero, then copy that many of the first
3233   words of X into registers starting with REG, and push the rest of X.
3234   The amount of space pushed is decreased by PARTIAL words,
3235   rounded *down* to a multiple of PARM_BOUNDARY.
3236   REG must be a hard register in this case.
3237   If REG is zero but PARTIAL is not, take any all others actions for an
3238   argument partially in registers, but do not actually load any
3239   registers.
3240
3241   EXTRA is the amount in bytes of extra space to leave next to this arg.
3242   This is ignored if an argument block has already been allocated.
3243
3244   On a machine that lacks real push insns, ARGS_ADDR is the address of
3245   the bottom of the argument block for this call.  We use indexing off there
3246   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3247   argument block has not been preallocated.
3248
3249   ARGS_SO_FAR is the size of args previously pushed for this call.
3250
3251   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3252   for arguments passed in registers.  If nonzero, it will be the number
3253   of bytes required.  */
3254
3255void
3256emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3257		args_addr, args_so_far, reg_parm_stack_space,
3258                alignment_pad)
3259     rtx x;
3260     enum machine_mode mode;
3261     tree type;
3262     rtx size;
3263     unsigned int align;
3264     int partial;
3265     rtx reg;
3266     int extra;
3267     rtx args_addr;
3268     rtx args_so_far;
3269     int reg_parm_stack_space;
3270     rtx alignment_pad;
3271{
3272  rtx xinner;
3273  enum direction stack_direction
3274#ifdef STACK_GROWS_DOWNWARD
3275    = downward;
3276#else
3277    = upward;
3278#endif
3279
3280  /* Decide where to pad the argument: `downward' for below,
3281     `upward' for above, or `none' for don't pad it.
3282     Default is below for small data on big-endian machines; else above.  */
3283  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3284
3285  /* Invert direction if stack is post-decrement.
3286     FIXME: why?  */
3287  if (STACK_PUSH_CODE == POST_DEC)
3288    if (where_pad != none)
3289      where_pad = (where_pad == downward ? upward : downward);
3290
3291  xinner = x = protect_from_queue (x, 0);
3292
3293  if (mode == BLKmode)
3294    {
3295      /* Copy a block into the stack, entirely or partially.  */
3296
3297      rtx temp;
3298      int used = partial * UNITS_PER_WORD;
3299      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3300      int skip;
3301
3302      if (size == 0)
3303	abort ();
3304
3305      used -= offset;
3306
3307      /* USED is now the # of bytes we need not copy to the stack
3308	 because registers will take care of them.  */
3309
3310      if (partial != 0)
3311	xinner = adjust_address (xinner, BLKmode, used);
3312
3313      /* If the partial register-part of the arg counts in its stack size,
3314	 skip the part of stack space corresponding to the registers.
3315	 Otherwise, start copying to the beginning of the stack space,
3316	 by setting SKIP to 0.  */
3317      skip = (reg_parm_stack_space == 0) ? 0 : used;
3318
3319#ifdef PUSH_ROUNDING
3320      /* Do it with several push insns if that doesn't take lots of insns
3321	 and if there is no difficulty with push insns that skip bytes
3322	 on the stack for alignment purposes.  */
3323      if (args_addr == 0
3324	  && PUSH_ARGS
3325	  && GET_CODE (size) == CONST_INT
3326	  && skip == 0
3327	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3328	  /* Here we avoid the case of a structure whose weak alignment
3329	     forces many pushes of a small amount of data,
3330	     and such small pushes do rounding that causes trouble.  */
3331	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3332	      || align >= BIGGEST_ALIGNMENT
3333	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3334		  == (align / BITS_PER_UNIT)))
3335	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3336	{
3337	  /* Push padding now if padding above and stack grows down,
3338	     or if padding below and stack grows up.
3339	     But if space already allocated, this has already been done.  */
3340	  if (extra && args_addr == 0
3341	      && where_pad != none && where_pad != stack_direction)
3342	    anti_adjust_stack (GEN_INT (extra));
3343
3344	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3345	}
3346      else
3347#endif /* PUSH_ROUNDING  */
3348	{
3349	  rtx target;
3350
3351	  /* Otherwise make space on the stack and copy the data
3352	     to the address of that space.  */
3353
3354	  /* Deduct words put into registers from the size we must copy.  */
3355	  if (partial != 0)
3356	    {
3357	      if (GET_CODE (size) == CONST_INT)
3358		size = GEN_INT (INTVAL (size) - used);
3359	      else
3360		size = expand_binop (GET_MODE (size), sub_optab, size,
3361				     GEN_INT (used), NULL_RTX, 0,
3362				     OPTAB_LIB_WIDEN);
3363	    }
3364
3365	  /* Get the address of the stack space.
3366	     In this case, we do not deal with EXTRA separately.
3367	     A single stack adjust will do.  */
3368	  if (! args_addr)
3369	    {
3370	      temp = push_block (size, extra, where_pad == downward);
3371	      extra = 0;
3372	    }
3373	  else if (GET_CODE (args_so_far) == CONST_INT)
3374	    temp = memory_address (BLKmode,
3375				   plus_constant (args_addr,
3376						  skip + INTVAL (args_so_far)));
3377	  else
3378	    temp = memory_address (BLKmode,
3379				   plus_constant (gen_rtx_PLUS (Pmode,
3380								args_addr,
3381								args_so_far),
3382						  skip));
3383	  target = gen_rtx_MEM (BLKmode, temp);
3384
3385	  if (type != 0)
3386	    {
3387	      set_mem_attributes (target, type, 1);
3388	      /* Function incoming arguments may overlap with sibling call
3389		 outgoing arguments and we cannot allow reordering of reads
3390		 from function arguments with stores to outgoing arguments
3391		 of sibling calls.  */
3392	      set_mem_alias_set (target, 0);
3393	    }
3394	  else
3395	    set_mem_align (target, align);
3396
3397	  /* TEMP is the address of the block.  Copy the data there.  */
3398	  if (GET_CODE (size) == CONST_INT
3399	      && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3400	    {
3401	      move_by_pieces (target, xinner, INTVAL (size), align);
3402	      goto ret;
3403	    }
3404	  else
3405	    {
3406	      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3407	      enum machine_mode mode;
3408
3409	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3410		   mode != VOIDmode;
3411		   mode = GET_MODE_WIDER_MODE (mode))
3412		{
3413		  enum insn_code code = movstr_optab[(int) mode];
3414		  insn_operand_predicate_fn pred;
3415
3416		  if (code != CODE_FOR_nothing
3417		      && ((GET_CODE (size) == CONST_INT
3418			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3419			       <= (GET_MODE_MASK (mode) >> 1)))
3420			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3421		      && (!(pred = insn_data[(int) code].operand[0].predicate)
3422			  || ((*pred) (target, BLKmode)))
3423		      && (!(pred = insn_data[(int) code].operand[1].predicate)
3424			  || ((*pred) (xinner, BLKmode)))
3425		      && (!(pred = insn_data[(int) code].operand[3].predicate)
3426			  || ((*pred) (opalign, VOIDmode))))
3427		    {
3428		      rtx op2 = convert_to_mode (mode, size, 1);
3429		      rtx last = get_last_insn ();
3430		      rtx pat;
3431
3432		      pred = insn_data[(int) code].operand[2].predicate;
3433		      if (pred != 0 && ! (*pred) (op2, mode))
3434			op2 = copy_to_mode_reg (mode, op2);
3435
3436		      pat = GEN_FCN ((int) code) (target, xinner,
3437						  op2, opalign);
3438		      if (pat)
3439			{
3440			  emit_insn (pat);
3441			  goto ret;
3442			}
3443		      else
3444			delete_insns_since (last);
3445		    }
3446		}
3447	    }
3448
3449	  if (!ACCUMULATE_OUTGOING_ARGS)
3450	    {
3451	      /* If the source is referenced relative to the stack pointer,
3452		 copy it to another register to stabilize it.  We do not need
3453		 to do this if we know that we won't be changing sp.  */
3454
3455	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3456		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3457		temp = copy_to_reg (temp);
3458	    }
3459
3460	  /* Make inhibit_defer_pop nonzero around the library call
3461	     to force it to pop the bcopy-arguments right away.  */
3462	  NO_DEFER_POP;
3463#ifdef TARGET_MEM_FUNCTIONS
3464	  emit_library_call (memcpy_libfunc, LCT_NORMAL,
3465			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3466			     convert_to_mode (TYPE_MODE (sizetype),
3467					      size, TREE_UNSIGNED (sizetype)),
3468			     TYPE_MODE (sizetype));
3469#else
3470	  emit_library_call (bcopy_libfunc, LCT_NORMAL,
3471			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3472			     convert_to_mode (TYPE_MODE (integer_type_node),
3473					      size,
3474					      TREE_UNSIGNED (integer_type_node)),
3475			     TYPE_MODE (integer_type_node));
3476#endif
3477	  OK_DEFER_POP;
3478	}
3479    }
3480  else if (partial > 0)
3481    {
3482      /* Scalar partly in registers.  */
3483
3484      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3485      int i;
3486      int not_stack;
3487      /* # words of start of argument
3488	 that we must make space for but need not store.  */
3489      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3490      int args_offset = INTVAL (args_so_far);
3491      int skip;
3492
3493      /* Push padding now if padding above and stack grows down,
3494	 or if padding below and stack grows up.
3495	 But if space already allocated, this has already been done.  */
3496      if (extra && args_addr == 0
3497	  && where_pad != none && where_pad != stack_direction)
3498	anti_adjust_stack (GEN_INT (extra));
3499
3500      /* If we make space by pushing it, we might as well push
3501	 the real data.  Otherwise, we can leave OFFSET nonzero
3502	 and leave the space uninitialized.  */
3503      if (args_addr == 0)
3504	offset = 0;
3505
3506      /* Now NOT_STACK gets the number of words that we don't need to
3507	 allocate on the stack.  */
3508      not_stack = partial - offset;
3509
3510      /* If the partial register-part of the arg counts in its stack size,
3511	 skip the part of stack space corresponding to the registers.
3512	 Otherwise, start copying to the beginning of the stack space,
3513	 by setting SKIP to 0.  */
3514      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3515
3516      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3517	x = validize_mem (force_const_mem (mode, x));
3518
3519      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3520	 SUBREGs of such registers are not allowed.  */
3521      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3522	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3523	x = copy_to_reg (x);
3524
3525      /* Loop over all the words allocated on the stack for this arg.  */
3526      /* We can do it by words, because any scalar bigger than a word
3527	 has a size a multiple of a word.  */
3528#ifndef PUSH_ARGS_REVERSED
3529      for (i = not_stack; i < size; i++)
3530#else
3531      for (i = size - 1; i >= not_stack; i--)
3532#endif
3533	if (i >= not_stack + offset)
3534	  emit_push_insn (operand_subword_force (x, i, mode),
3535			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3536			  0, args_addr,
3537			  GEN_INT (args_offset + ((i - not_stack + skip)
3538						  * UNITS_PER_WORD)),
3539			  reg_parm_stack_space, alignment_pad);
3540    }
3541  else
3542    {
3543      rtx addr;
3544      rtx target = NULL_RTX;
3545      rtx dest;
3546
3547      /* Push padding now if padding above and stack grows down,
3548	 or if padding below and stack grows up.
3549	 But if space already allocated, this has already been done.  */
3550      if (extra && args_addr == 0
3551	  && where_pad != none && where_pad != stack_direction)
3552	anti_adjust_stack (GEN_INT (extra));
3553
3554#ifdef PUSH_ROUNDING
3555      if (args_addr == 0 && PUSH_ARGS)
3556	emit_single_push_insn (mode, x, type);
3557      else
3558#endif
3559	{
3560	  if (GET_CODE (args_so_far) == CONST_INT)
3561	    addr
3562	      = memory_address (mode,
3563				plus_constant (args_addr,
3564					       INTVAL (args_so_far)));
3565	  else
3566	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3567						       args_so_far));
3568	  target = addr;
3569	  dest = gen_rtx_MEM (mode, addr);
3570	  if (type != 0)
3571	    {
3572	      set_mem_attributes (dest, type, 1);
3573	      /* Function incoming arguments may overlap with sibling call
3574		 outgoing arguments and we cannot allow reordering of reads
3575		 from function arguments with stores to outgoing arguments
3576		 of sibling calls.  */
3577	      set_mem_alias_set (dest, 0);
3578	    }
3579
3580	  emit_move_insn (dest, x);
3581	}
3582
3583    }
3584
3585 ret:
3586  /* If part should go in registers, copy that part
3587     into the appropriate registers.  Do this now, at the end,
3588     since mem-to-mem copies above may do function calls.  */
3589  if (partial > 0 && reg != 0)
3590    {
3591      /* Handle calls that pass values in multiple non-contiguous locations.
3592	 The Irix 6 ABI has examples of this.  */
3593      if (GET_CODE (reg) == PARALLEL)
3594	emit_group_load (reg, x, -1);  /* ??? size? */
3595      else
3596	move_block_to_reg (REGNO (reg), x, partial, mode);
3597    }
3598
3599  if (extra && args_addr == 0 && where_pad == stack_direction)
3600    anti_adjust_stack (GEN_INT (extra));
3601
3602  if (alignment_pad && args_addr == 0)
3603    anti_adjust_stack (alignment_pad);
3604}
3605
3606/* Return X if X can be used as a subtarget in a sequence of arithmetic
3607   operations.  */
3608
3609static rtx
3610get_subtarget (x)
3611     rtx x;
3612{
3613  return ((x == 0
3614	   /* Only registers can be subtargets.  */
3615	   || GET_CODE (x) != REG
3616	   /* If the register is readonly, it can't be set more than once.  */
3617	   || RTX_UNCHANGING_P (x)
3618	   /* Don't use hard regs to avoid extending their life.  */
3619	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3620	   /* Avoid subtargets inside loops,
3621	      since they hide some invariant expressions.  */
3622	   || preserve_subexpressions_p ())
3623	  ? 0 : x);
3624}
3625
3626/* Expand an assignment that stores the value of FROM into TO.
3627   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3628   (This may contain a QUEUED rtx;
3629   if the value is constant, this rtx is a constant.)
3630   Otherwise, the returned value is NULL_RTX.
3631
3632   SUGGEST_REG is no longer actually used.
3633   It used to mean, copy the value through a register
3634   and return that register, if that is possible.
3635   We now use WANT_VALUE to decide whether to do this.  */
3636
3637rtx
3638expand_assignment (to, from, want_value, suggest_reg)
3639     tree to, from;
3640     int want_value;
3641     int suggest_reg ATTRIBUTE_UNUSED;
3642{
3643  rtx to_rtx = 0;
3644  rtx result;
3645
3646  /* Don't crash if the lhs of the assignment was erroneous.  */
3647
3648  if (TREE_CODE (to) == ERROR_MARK)
3649    {
3650      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3651      return want_value ? result : NULL_RTX;
3652    }
3653
3654  /* Assignment of a structure component needs special treatment
3655     if the structure component's rtx is not simply a MEM.
3656     Assignment of an array element at a constant index, and assignment of
3657     an array element in an unaligned packed structure field, has the same
3658     problem.  */
3659
3660  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3661      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3662    {
3663      enum machine_mode mode1;
3664      HOST_WIDE_INT bitsize, bitpos;
3665      rtx orig_to_rtx;
3666      tree offset;
3667      int unsignedp;
3668      int volatilep = 0;
3669      tree tem;
3670
3671      push_temp_slots ();
3672      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3673				 &unsignedp, &volatilep);
3674
3675      /* If we are going to use store_bit_field and extract_bit_field,
3676	 make sure to_rtx will be safe for multiple use.  */
3677
3678      if (mode1 == VOIDmode && want_value)
3679	tem = stabilize_reference (tem);
3680
3681      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3682
3683      if (offset != 0)
3684	{
3685	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3686
3687	  if (GET_CODE (to_rtx) != MEM)
3688	    abort ();
3689
3690#ifdef POINTERS_EXTEND_UNSIGNED
3691	  if (GET_MODE (offset_rtx) != Pmode)
3692	    offset_rtx = convert_memory_address (Pmode, offset_rtx);
3693#else
3694	  if (GET_MODE (offset_rtx) != ptr_mode)
3695	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3696#endif
3697
3698	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3699	     to call force_reg for that case.  Avoid that case.  */
3700	  if (GET_CODE (to_rtx) == MEM
3701	      && GET_MODE (to_rtx) == BLKmode
3702	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3703	      && bitsize > 0
3704	      && (bitpos % bitsize) == 0
3705	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3706	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3707	    {
3708	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3709	      bitpos = 0;
3710	    }
3711
3712	  to_rtx = offset_address (to_rtx, offset_rtx,
3713				   highest_pow2_factor_for_type (TREE_TYPE (to),
3714								 offset));
3715	}
3716
3717      if (GET_CODE (to_rtx) == MEM)
3718	{
3719	  tree old_expr = MEM_EXPR (to_rtx);
3720
3721	  /* If the field is at offset zero, we could have been given the
3722	     DECL_RTX of the parent struct.  Don't munge it.  */
3723	  to_rtx = shallow_copy_rtx (to_rtx);
3724
3725	  set_mem_attributes (to_rtx, to, 0);
3726
3727	  /* If we changed MEM_EXPR, that means we're now referencing
3728	     the COMPONENT_REF, which means that MEM_OFFSET must be
3729	     relative to that field.  But we've not yet reflected BITPOS
3730	     in TO_RTX.  This will be done in store_field.  Adjust for
3731	     that by biasing MEM_OFFSET by -bitpos.  */
3732	  if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3733	      && (bitpos / BITS_PER_UNIT) != 0)
3734	    set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3735					     - (bitpos / BITS_PER_UNIT)));
3736	}
3737
3738      /* Deal with volatile and readonly fields.  The former is only done
3739	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
3740      if (volatilep && GET_CODE (to_rtx) == MEM)
3741	{
3742	  if (to_rtx == orig_to_rtx)
3743	    to_rtx = copy_rtx (to_rtx);
3744	  MEM_VOLATILE_P (to_rtx) = 1;
3745	}
3746
3747      if (TREE_CODE (to) == COMPONENT_REF
3748	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3749	{
3750	  if (to_rtx == orig_to_rtx)
3751	    to_rtx = copy_rtx (to_rtx);
3752	  RTX_UNCHANGING_P (to_rtx) = 1;
3753	}
3754
3755      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3756	{
3757	  if (to_rtx == orig_to_rtx)
3758	    to_rtx = copy_rtx (to_rtx);
3759	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3760	}
3761
3762      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3763			    (want_value
3764			     /* Spurious cast for HPUX compiler.  */
3765			     ? ((enum machine_mode)
3766				TYPE_MODE (TREE_TYPE (to)))
3767			     : VOIDmode),
3768			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
3769
3770      preserve_temp_slots (result);
3771      free_temp_slots ();
3772      pop_temp_slots ();
3773
3774      /* If the value is meaningful, convert RESULT to the proper mode.
3775	 Otherwise, return nothing.  */
3776      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3777					  TYPE_MODE (TREE_TYPE (from)),
3778					  result,
3779					  TREE_UNSIGNED (TREE_TYPE (to)))
3780	      : NULL_RTX);
3781    }
3782
3783  /* If the rhs is a function call and its value is not an aggregate,
3784     call the function before we start to compute the lhs.
3785     This is needed for correct code for cases such as
3786     val = setjmp (buf) on machines where reference to val
3787     requires loading up part of an address in a separate insn.
3788
3789     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3790     since it might be a promoted variable where the zero- or sign- extension
3791     needs to be done.  Handling this in the normal way is safe because no
3792     computation is done before the call.  */
3793  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3794      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3795      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3796	    && GET_CODE (DECL_RTL (to)) == REG))
3797    {
3798      rtx value;
3799
3800      push_temp_slots ();
3801      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3802      if (to_rtx == 0)
3803	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3804
3805      /* Handle calls that return values in multiple non-contiguous locations.
3806	 The Irix 6 ABI has examples of this.  */
3807      if (GET_CODE (to_rtx) == PARALLEL)
3808	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3809      else if (GET_MODE (to_rtx) == BLKmode)
3810	emit_block_move (to_rtx, value, expr_size (from));
3811      else
3812	{
3813#ifdef POINTERS_EXTEND_UNSIGNED
3814	  if (POINTER_TYPE_P (TREE_TYPE (to))
3815	      && GET_MODE (to_rtx) != GET_MODE (value))
3816	    value = convert_memory_address (GET_MODE (to_rtx), value);
3817#endif
3818	  emit_move_insn (to_rtx, value);
3819	}
3820      preserve_temp_slots (to_rtx);
3821      free_temp_slots ();
3822      pop_temp_slots ();
3823      return want_value ? to_rtx : NULL_RTX;
3824    }
3825
3826  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3827     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3828
3829  if (to_rtx == 0)
3830    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3831
3832  /* Don't move directly into a return register.  */
3833  if (TREE_CODE (to) == RESULT_DECL
3834      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3835    {
3836      rtx temp;
3837
3838      push_temp_slots ();
3839      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3840
3841      if (GET_CODE (to_rtx) == PARALLEL)
3842	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3843      else
3844	emit_move_insn (to_rtx, temp);
3845
3846      preserve_temp_slots (to_rtx);
3847      free_temp_slots ();
3848      pop_temp_slots ();
3849      return want_value ? to_rtx : NULL_RTX;
3850    }
3851
3852  /* In case we are returning the contents of an object which overlaps
3853     the place the value is being stored, use a safe function when copying
3854     a value through a pointer into a structure value return block.  */
3855  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3856      && current_function_returns_struct
3857      && !current_function_returns_pcc_struct)
3858    {
3859      rtx from_rtx, size;
3860
3861      push_temp_slots ();
3862      size = expr_size (from);
3863      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3864
3865#ifdef TARGET_MEM_FUNCTIONS
3866      emit_library_call (memmove_libfunc, LCT_NORMAL,
3867			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3868			 XEXP (from_rtx, 0), Pmode,
3869			 convert_to_mode (TYPE_MODE (sizetype),
3870					  size, TREE_UNSIGNED (sizetype)),
3871			 TYPE_MODE (sizetype));
3872#else
3873      emit_library_call (bcopy_libfunc, LCT_NORMAL,
3874			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3875			 XEXP (to_rtx, 0), Pmode,
3876			 convert_to_mode (TYPE_MODE (integer_type_node),
3877					  size, TREE_UNSIGNED (integer_type_node)),
3878			 TYPE_MODE (integer_type_node));
3879#endif
3880
3881      preserve_temp_slots (to_rtx);
3882      free_temp_slots ();
3883      pop_temp_slots ();
3884      return want_value ? to_rtx : NULL_RTX;
3885    }
3886
3887  /* Compute FROM and store the value in the rtx we got.  */
3888
3889  push_temp_slots ();
3890  result = store_expr (from, to_rtx, want_value);
3891  preserve_temp_slots (result);
3892  free_temp_slots ();
3893  pop_temp_slots ();
3894  return want_value ? result : NULL_RTX;
3895}
3896
3897/* Generate code for computing expression EXP,
3898   and storing the value into TARGET.
3899   TARGET may contain a QUEUED rtx.
3900
3901   If WANT_VALUE is nonzero, return a copy of the value
3902   not in TARGET, so that we can be sure to use the proper
3903   value in a containing expression even if TARGET has something
3904   else stored in it.  If possible, we copy the value through a pseudo
3905   and return that pseudo.  Or, if the value is constant, we try to
3906   return the constant.  In some cases, we return a pseudo
3907   copied *from* TARGET.
3908
3909   If the mode is BLKmode then we may return TARGET itself.
3910   It turns out that in BLKmode it doesn't cause a problem.
3911   because C has no operators that could combine two different
3912   assignments into the same BLKmode object with different values
3913   with no sequence point.  Will other languages need this to
3914   be more thorough?
3915
3916   If WANT_VALUE is 0, we return NULL, to make sure
3917   to catch quickly any cases where the caller uses the value
3918   and fails to set WANT_VALUE.  */
3919
3920rtx
3921store_expr (exp, target, want_value)
3922     tree exp;
3923     rtx target;
3924     int want_value;
3925{
3926  rtx temp;
3927  int dont_return_target = 0;
3928  int dont_store_target = 0;
3929
3930  if (TREE_CODE (exp) == COMPOUND_EXPR)
3931    {
3932      /* Perform first part of compound expression, then assign from second
3933	 part.  */
3934      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3935      emit_queue ();
3936      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3937    }
3938  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3939    {
3940      /* For conditional expression, get safe form of the target.  Then
3941	 test the condition, doing the appropriate assignment on either
3942	 side.  This avoids the creation of unnecessary temporaries.
3943	 For non-BLKmode, it is more efficient not to do this.  */
3944
3945      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3946
3947      emit_queue ();
3948      target = protect_from_queue (target, 1);
3949
3950      do_pending_stack_adjust ();
3951      NO_DEFER_POP;
3952      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3953      start_cleanup_deferral ();
3954      store_expr (TREE_OPERAND (exp, 1), target, 0);
3955      end_cleanup_deferral ();
3956      emit_queue ();
3957      emit_jump_insn (gen_jump (lab2));
3958      emit_barrier ();
3959      emit_label (lab1);
3960      start_cleanup_deferral ();
3961      store_expr (TREE_OPERAND (exp, 2), target, 0);
3962      end_cleanup_deferral ();
3963      emit_queue ();
3964      emit_label (lab2);
3965      OK_DEFER_POP;
3966
3967      return want_value ? target : NULL_RTX;
3968    }
3969  else if (queued_subexp_p (target))
3970    /* If target contains a postincrement, let's not risk
3971       using it as the place to generate the rhs.  */
3972    {
3973      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3974	{
3975	  /* Expand EXP into a new pseudo.  */
3976	  temp = gen_reg_rtx (GET_MODE (target));
3977	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3978	}
3979      else
3980	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3981
3982      /* If target is volatile, ANSI requires accessing the value
3983	 *from* the target, if it is accessed.  So make that happen.
3984	 In no case return the target itself.  */
3985      if (! MEM_VOLATILE_P (target) && want_value)
3986	dont_return_target = 1;
3987    }
3988  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3989	   && GET_MODE (target) != BLKmode)
3990    /* If target is in memory and caller wants value in a register instead,
3991       arrange that.  Pass TARGET as target for expand_expr so that,
3992       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3993       We know expand_expr will not use the target in that case.
3994       Don't do this if TARGET is volatile because we are supposed
3995       to write it and then read it.  */
3996    {
3997      temp = expand_expr (exp, target, GET_MODE (target), 0);
3998      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3999	{
4000	  /* If TEMP is already in the desired TARGET, only copy it from
4001	     memory and don't store it there again.  */
4002	  if (temp == target
4003	      || (rtx_equal_p (temp, target)
4004		  && ! side_effects_p (temp) && ! side_effects_p (target)))
4005	    dont_store_target = 1;
4006	  temp = copy_to_reg (temp);
4007	}
4008      dont_return_target = 1;
4009    }
4010  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4011    /* If this is an scalar in a register that is stored in a wider mode
4012       than the declared mode, compute the result into its declared mode
4013       and then convert to the wider mode.  Our value is the computed
4014       expression.  */
4015    {
4016      rtx inner_target = 0;
4017
4018      /* If we don't want a value, we can do the conversion inside EXP,
4019	 which will often result in some optimizations.  Do the conversion
4020	 in two steps: first change the signedness, if needed, then
4021	 the extend.  But don't do this if the type of EXP is a subtype
4022	 of something else since then the conversion might involve
4023	 more than just converting modes.  */
4024      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4025	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4026	{
4027	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4028	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4029	    exp
4030	      = convert
4031		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4032					  TREE_TYPE (exp)),
4033		 exp);
4034
4035	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4036					SUBREG_PROMOTED_UNSIGNED_P (target)),
4037			 exp);
4038
4039	  inner_target = SUBREG_REG (target);
4040	}
4041
4042      temp = expand_expr (exp, inner_target, VOIDmode, 0);
4043
4044      /* If TEMP is a volatile MEM and we want a result value, make
4045	 the access now so it gets done only once.  Likewise if
4046	 it contains TARGET.  */
4047      if (GET_CODE (temp) == MEM && want_value
4048	  && (MEM_VOLATILE_P (temp)
4049	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4050	temp = copy_to_reg (temp);
4051
4052      /* If TEMP is a VOIDmode constant, use convert_modes to make
4053	 sure that we properly convert it.  */
4054      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4055	{
4056	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4057				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4058	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4059			        GET_MODE (target), temp,
4060			        SUBREG_PROMOTED_UNSIGNED_P (target));
4061	}
4062
4063      convert_move (SUBREG_REG (target), temp,
4064		    SUBREG_PROMOTED_UNSIGNED_P (target));
4065
4066      /* If we promoted a constant, change the mode back down to match
4067	 target.  Otherwise, the caller might get confused by a result whose
4068	 mode is larger than expected.  */
4069
4070      if (want_value && GET_MODE (temp) != GET_MODE (target))
4071	{
4072	  if (GET_MODE (temp) != VOIDmode)
4073	    {
4074	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4075	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4076	      SUBREG_PROMOTED_UNSIGNED_P (temp)
4077		= SUBREG_PROMOTED_UNSIGNED_P (target);
4078	    }
4079	  else
4080	    temp = convert_modes (GET_MODE (target),
4081				  GET_MODE (SUBREG_REG (target)),
4082				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4083	}
4084
4085      return want_value ? temp : NULL_RTX;
4086    }
4087  else
4088    {
4089      temp = expand_expr (exp, target, GET_MODE (target), 0);
4090      /* Return TARGET if it's a specified hardware register.
4091	 If TARGET is a volatile mem ref, either return TARGET
4092	 or return a reg copied *from* TARGET; ANSI requires this.
4093
4094	 Otherwise, if TEMP is not TARGET, return TEMP
4095	 if it is constant (for efficiency),
4096	 or if we really want the correct value.  */
4097      if (!(target && GET_CODE (target) == REG
4098	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4099	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4100	  && ! rtx_equal_p (temp, target)
4101	  && (CONSTANT_P (temp) || want_value))
4102	dont_return_target = 1;
4103    }
4104
4105  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4106     the same as that of TARGET, adjust the constant.  This is needed, for
4107     example, in case it is a CONST_DOUBLE and we want only a word-sized
4108     value.  */
4109  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4110      && TREE_CODE (exp) != ERROR_MARK
4111      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4112    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4113			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4114
4115  /* If value was not generated in the target, store it there.
4116     Convert the value to TARGET's type first if necessary.
4117     If TEMP and TARGET compare equal according to rtx_equal_p, but
4118     one or both of them are volatile memory refs, we have to distinguish
4119     two cases:
4120     - expand_expr has used TARGET.  In this case, we must not generate
4121       another copy.  This can be detected by TARGET being equal according
4122       to == .
4123     - expand_expr has not used TARGET - that means that the source just
4124       happens to have the same RTX form.  Since temp will have been created
4125       by expand_expr, it will compare unequal according to == .
4126       We must generate a copy in this case, to reach the correct number
4127       of volatile memory references.  */
4128
4129  if ((! rtx_equal_p (temp, target)
4130       || (temp != target && (side_effects_p (temp)
4131			      || side_effects_p (target))))
4132      && TREE_CODE (exp) != ERROR_MARK
4133      && ! dont_store_target
4134	 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4135	    but TARGET is not valid memory reference, TEMP will differ
4136	    from TARGET although it is really the same location.  */
4137      && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4138	  || target != DECL_RTL_IF_SET (exp)))
4139    {
4140      target = protect_from_queue (target, 1);
4141      if (GET_MODE (temp) != GET_MODE (target)
4142	  && GET_MODE (temp) != VOIDmode)
4143	{
4144	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4145	  if (dont_return_target)
4146	    {
4147	      /* In this case, we will return TEMP,
4148		 so make sure it has the proper mode.
4149		 But don't forget to store the value into TARGET.  */
4150	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4151	      emit_move_insn (target, temp);
4152	    }
4153	  else
4154	    convert_move (target, temp, unsignedp);
4155	}
4156
4157      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4158	{
4159	  /* Handle copying a string constant into an array.  The string
4160	     constant may be shorter than the array.  So copy just the string's
4161	     actual length, and clear the rest.  First get the size of the data
4162	     type of the string, which is actually the size of the target.  */
4163	  rtx size = expr_size (exp);
4164
4165	  if (GET_CODE (size) == CONST_INT
4166	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4167	    emit_block_move (target, temp, size);
4168	  else
4169	    {
4170	      /* Compute the size of the data to copy from the string.  */
4171	      tree copy_size
4172		= size_binop (MIN_EXPR,
4173			      make_tree (sizetype, size),
4174			      size_int (TREE_STRING_LENGTH (exp)));
4175	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4176					       VOIDmode, 0);
4177	      rtx label = 0;
4178
4179	      /* Copy that much.  */
4180	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4181	      emit_block_move (target, temp, copy_size_rtx);
4182
4183	      /* Figure out how much is left in TARGET that we have to clear.
4184		 Do all calculations in ptr_mode.  */
4185	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4186		{
4187		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4188		  target = adjust_address (target, BLKmode,
4189					   INTVAL (copy_size_rtx));
4190		}
4191	      else
4192		{
4193		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4194				       copy_size_rtx, NULL_RTX, 0,
4195				       OPTAB_LIB_WIDEN);
4196
4197#ifdef POINTERS_EXTEND_UNSIGNED
4198		  if (GET_MODE (copy_size_rtx) != Pmode)
4199		    copy_size_rtx = convert_memory_address (Pmode,
4200							    copy_size_rtx);
4201#endif
4202
4203		  target = offset_address (target, copy_size_rtx,
4204					   highest_pow2_factor (copy_size));
4205		  label = gen_label_rtx ();
4206		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4207					   GET_MODE (size), 0, label);
4208		}
4209
4210	      if (size != const0_rtx)
4211		clear_storage (target, size);
4212
4213	      if (label)
4214		emit_label (label);
4215	    }
4216	}
4217      /* Handle calls that return values in multiple non-contiguous locations.
4218	 The Irix 6 ABI has examples of this.  */
4219      else if (GET_CODE (target) == PARALLEL)
4220	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4221      else if (GET_MODE (temp) == BLKmode)
4222	emit_block_move (target, temp, expr_size (exp));
4223      else
4224	emit_move_insn (target, temp);
4225    }
4226
4227  /* If we don't want a value, return NULL_RTX.  */
4228  if (! want_value)
4229    return NULL_RTX;
4230
4231  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4232     ??? The latter test doesn't seem to make sense.  */
4233  else if (dont_return_target && GET_CODE (temp) != MEM)
4234    return temp;
4235
4236  /* Return TARGET itself if it is a hard register.  */
4237  else if (want_value && GET_MODE (target) != BLKmode
4238	   && ! (GET_CODE (target) == REG
4239		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4240    return copy_to_reg (target);
4241
4242  else
4243    return target;
4244}
4245
4246/* Return 1 if EXP just contains zeros.  */
4247
4248static int
4249is_zeros_p (exp)
4250     tree exp;
4251{
4252  tree elt;
4253
4254  switch (TREE_CODE (exp))
4255    {
4256    case CONVERT_EXPR:
4257    case NOP_EXPR:
4258    case NON_LVALUE_EXPR:
4259    case VIEW_CONVERT_EXPR:
4260      return is_zeros_p (TREE_OPERAND (exp, 0));
4261
4262    case INTEGER_CST:
4263      return integer_zerop (exp);
4264
4265    case COMPLEX_CST:
4266      return
4267	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4268
4269    case REAL_CST:
4270      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4271
4272    case VECTOR_CST:
4273      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4274	   elt = TREE_CHAIN (elt))
4275	if (!is_zeros_p (TREE_VALUE (elt)))
4276	  return 0;
4277
4278      return 1;
4279
4280    case CONSTRUCTOR:
4281      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4282	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4283      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4284	if (! is_zeros_p (TREE_VALUE (elt)))
4285	  return 0;
4286
4287      return 1;
4288
4289    default:
4290      return 0;
4291    }
4292}
4293
4294/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4295
4296static int
4297mostly_zeros_p (exp)
4298     tree exp;
4299{
4300  if (TREE_CODE (exp) == CONSTRUCTOR)
4301    {
4302      int elts = 0, zeros = 0;
4303      tree elt = CONSTRUCTOR_ELTS (exp);
4304      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4305	{
4306	  /* If there are no ranges of true bits, it is all zero.  */
4307	  return elt == NULL_TREE;
4308	}
4309      for (; elt; elt = TREE_CHAIN (elt))
4310	{
4311	  /* We do not handle the case where the index is a RANGE_EXPR,
4312	     so the statistic will be somewhat inaccurate.
4313	     We do make a more accurate count in store_constructor itself,
4314	     so since this function is only used for nested array elements,
4315	     this should be close enough.  */
4316	  if (mostly_zeros_p (TREE_VALUE (elt)))
4317	    zeros++;
4318	  elts++;
4319	}
4320
4321      return 4 * zeros >= 3 * elts;
4322    }
4323
4324  return is_zeros_p (exp);
4325}
4326
4327/* Helper function for store_constructor.
4328   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4329   TYPE is the type of the CONSTRUCTOR, not the element type.
4330   CLEARED is as for store_constructor.
4331   ALIAS_SET is the alias set to use for any stores.
4332
4333   This provides a recursive shortcut back to store_constructor when it isn't
4334   necessary to go through store_field.  This is so that we can pass through
4335   the cleared field to let store_constructor know that we may not have to
4336   clear a substructure if the outer structure has already been cleared.  */
4337
4338static void
4339store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4340			 alias_set)
4341     rtx target;
4342     unsigned HOST_WIDE_INT bitsize;
4343     HOST_WIDE_INT bitpos;
4344     enum machine_mode mode;
4345     tree exp, type;
4346     int cleared;
4347     int alias_set;
4348{
4349  if (TREE_CODE (exp) == CONSTRUCTOR
4350      && bitpos % BITS_PER_UNIT == 0
4351      /* If we have a non-zero bitpos for a register target, then we just
4352	 let store_field do the bitfield handling.  This is unlikely to
4353	 generate unnecessary clear instructions anyways.  */
4354      && (bitpos == 0 || GET_CODE (target) == MEM))
4355    {
4356      if (GET_CODE (target) == MEM)
4357	target
4358	  = adjust_address (target,
4359			    GET_MODE (target) == BLKmode
4360			    || 0 != (bitpos
4361				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4362			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4363
4364
4365      /* Update the alias set, if required.  */
4366      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4367	  && MEM_ALIAS_SET (target) != 0)
4368	{
4369	  target = copy_rtx (target);
4370	  set_mem_alias_set (target, alias_set);
4371	}
4372
4373      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4374    }
4375  else
4376    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4377		 alias_set);
4378}
4379
4380/* Store the value of constructor EXP into the rtx TARGET.
4381   TARGET is either a REG or a MEM; we know it cannot conflict, since
4382   safe_from_p has been called.
4383   CLEARED is true if TARGET is known to have been zero'd.
4384   SIZE is the number of bytes of TARGET we are allowed to modify: this
4385   may not be the same as the size of EXP if we are assigning to a field
4386   which has been packed to exclude padding bits.  */
4387
4388static void
4389store_constructor (exp, target, cleared, size)
4390     tree exp;
4391     rtx target;
4392     int cleared;
4393     HOST_WIDE_INT size;
4394{
4395  tree type = TREE_TYPE (exp);
4396#ifdef WORD_REGISTER_OPERATIONS
4397  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4398#endif
4399
4400  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4401      || TREE_CODE (type) == QUAL_UNION_TYPE)
4402    {
4403      tree elt;
4404
4405      /* We either clear the aggregate or indicate the value is dead.  */
4406      if ((TREE_CODE (type) == UNION_TYPE
4407	   || TREE_CODE (type) == QUAL_UNION_TYPE)
4408	  && ! cleared
4409	  && ! CONSTRUCTOR_ELTS (exp))
4410	/* If the constructor is empty, clear the union.  */
4411	{
4412	  clear_storage (target, expr_size (exp));
4413	  cleared = 1;
4414	}
4415
4416      /* If we are building a static constructor into a register,
4417	 set the initial value as zero so we can fold the value into
4418	 a constant.  But if more than one register is involved,
4419	 this probably loses.  */
4420      else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4421	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4422	{
4423	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4424	  cleared = 1;
4425	}
4426
4427      /* If the constructor has fewer fields than the structure
4428	 or if we are initializing the structure to mostly zeros,
4429	 clear the whole structure first.  Don't do this if TARGET is a
4430	 register whose mode size isn't equal to SIZE since clear_storage
4431	 can't handle this case.  */
4432      else if (! cleared && size > 0
4433	       && ((list_length (CONSTRUCTOR_ELTS (exp))
4434		    != fields_length (type))
4435		   || mostly_zeros_p (exp))
4436	       && (GET_CODE (target) != REG
4437		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4438		       == size)))
4439	{
4440	  clear_storage (target, GEN_INT (size));
4441	  cleared = 1;
4442	}
4443
4444      if (! cleared)
4445	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4446
4447      /* Store each element of the constructor into
4448	 the corresponding field of TARGET.  */
4449
4450      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4451	{
4452	  tree field = TREE_PURPOSE (elt);
4453	  tree value = TREE_VALUE (elt);
4454	  enum machine_mode mode;
4455	  HOST_WIDE_INT bitsize;
4456	  HOST_WIDE_INT bitpos = 0;
4457	  int unsignedp;
4458	  tree offset;
4459	  rtx to_rtx = target;
4460
4461	  /* Just ignore missing fields.
4462	     We cleared the whole structure, above,
4463	     if any fields are missing.  */
4464	  if (field == 0)
4465	    continue;
4466
4467	  if (cleared && is_zeros_p (value))
4468	    continue;
4469
4470	  if (host_integerp (DECL_SIZE (field), 1))
4471	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4472	  else
4473	    bitsize = -1;
4474
4475	  unsignedp = TREE_UNSIGNED (field);
4476	  mode = DECL_MODE (field);
4477	  if (DECL_BIT_FIELD (field))
4478	    mode = VOIDmode;
4479
4480	  offset = DECL_FIELD_OFFSET (field);
4481	  if (host_integerp (offset, 0)
4482	      && host_integerp (bit_position (field), 0))
4483	    {
4484	      bitpos = int_bit_position (field);
4485	      offset = 0;
4486	    }
4487	  else
4488	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4489
4490	  if (offset)
4491	    {
4492	      rtx offset_rtx;
4493
4494	      if (contains_placeholder_p (offset))
4495		offset = build (WITH_RECORD_EXPR, sizetype,
4496				offset, make_tree (TREE_TYPE (exp), target));
4497
4498	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4499	      if (GET_CODE (to_rtx) != MEM)
4500		abort ();
4501
4502#ifdef POINTERS_EXTEND_UNSIGNED
4503	      if (GET_MODE (offset_rtx) != Pmode)
4504		offset_rtx = convert_memory_address (Pmode, offset_rtx);
4505#else
4506	      if (GET_MODE (offset_rtx) != ptr_mode)
4507		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4508#endif
4509
4510	      to_rtx = offset_address (to_rtx, offset_rtx,
4511				       highest_pow2_factor (offset));
4512	    }
4513
4514	  if (TREE_READONLY (field))
4515	    {
4516	      if (GET_CODE (to_rtx) == MEM)
4517		to_rtx = copy_rtx (to_rtx);
4518
4519	      RTX_UNCHANGING_P (to_rtx) = 1;
4520	    }
4521
4522#ifdef WORD_REGISTER_OPERATIONS
4523	  /* If this initializes a field that is smaller than a word, at the
4524	     start of a word, try to widen it to a full word.
4525	     This special case allows us to output C++ member function
4526	     initializations in a form that the optimizers can understand.  */
4527	  if (GET_CODE (target) == REG
4528	      && bitsize < BITS_PER_WORD
4529	      && bitpos % BITS_PER_WORD == 0
4530	      && GET_MODE_CLASS (mode) == MODE_INT
4531	      && TREE_CODE (value) == INTEGER_CST
4532	      && exp_size >= 0
4533	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4534	    {
4535	      tree type = TREE_TYPE (value);
4536
4537	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4538		{
4539		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4540		  value = convert (type, value);
4541		}
4542
4543	      if (BYTES_BIG_ENDIAN)
4544		value
4545		  = fold (build (LSHIFT_EXPR, type, value,
4546				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4547	      bitsize = BITS_PER_WORD;
4548	      mode = word_mode;
4549	    }
4550#endif
4551
4552	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4553	      && DECL_NONADDRESSABLE_P (field))
4554	    {
4555	      to_rtx = copy_rtx (to_rtx);
4556	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4557	    }
4558
4559	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4560				   value, type, cleared,
4561				   get_alias_set (TREE_TYPE (field)));
4562	}
4563    }
4564  else if (TREE_CODE (type) == ARRAY_TYPE
4565	   || TREE_CODE (type) == VECTOR_TYPE)
4566    {
4567      tree elt;
4568      int i;
4569      int need_to_clear;
4570      tree domain = TYPE_DOMAIN (type);
4571      tree elttype = TREE_TYPE (type);
4572      int const_bounds_p;
4573      HOST_WIDE_INT minelt = 0;
4574      HOST_WIDE_INT maxelt = 0;
4575
4576      /* Vectors are like arrays, but the domain is stored via an array
4577	 type indirectly.  */
4578      if (TREE_CODE (type) == VECTOR_TYPE)
4579	{
4580	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4581	     the same field as TYPE_DOMAIN, we are not guaranteed that
4582	     it always will.  */
4583	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4584	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4585	}
4586
4587      const_bounds_p = (TYPE_MIN_VALUE (domain)
4588			&& TYPE_MAX_VALUE (domain)
4589			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
4590			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
4591
4592      /* If we have constant bounds for the range of the type, get them.  */
4593      if (const_bounds_p)
4594	{
4595	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4596	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4597	}
4598
4599      /* If the constructor has fewer elements than the array,
4600         clear the whole array first.  Similarly if this is
4601         static constructor of a non-BLKmode object.  */
4602      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4603	need_to_clear = 1;
4604      else
4605	{
4606	  HOST_WIDE_INT count = 0, zero_count = 0;
4607	  need_to_clear = ! const_bounds_p;
4608
4609	  /* This loop is a more accurate version of the loop in
4610	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4611	     It is also needed to check for missing elements.  */
4612	  for (elt = CONSTRUCTOR_ELTS (exp);
4613	       elt != NULL_TREE && ! need_to_clear;
4614	       elt = TREE_CHAIN (elt))
4615	    {
4616	      tree index = TREE_PURPOSE (elt);
4617	      HOST_WIDE_INT this_node_count;
4618
4619	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4620		{
4621		  tree lo_index = TREE_OPERAND (index, 0);
4622		  tree hi_index = TREE_OPERAND (index, 1);
4623
4624		  if (! host_integerp (lo_index, 1)
4625		      || ! host_integerp (hi_index, 1))
4626		    {
4627		      need_to_clear = 1;
4628		      break;
4629		    }
4630
4631		  this_node_count = (tree_low_cst (hi_index, 1)
4632				     - tree_low_cst (lo_index, 1) + 1);
4633		}
4634	      else
4635		this_node_count = 1;
4636
4637	      count += this_node_count;
4638	      if (mostly_zeros_p (TREE_VALUE (elt)))
4639		zero_count += this_node_count;
4640	    }
4641
4642	  /* Clear the entire array first if there are any missing elements,
4643	     or if the incidence of zero elements is >= 75%.  */
4644	  if (! need_to_clear
4645	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4646	    need_to_clear = 1;
4647	}
4648
4649      if (need_to_clear && size > 0)
4650	{
4651	  if (! cleared)
4652	    {
4653	      if (REG_P (target))
4654		emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
4655	      else
4656		clear_storage (target, GEN_INT (size));
4657	    }
4658	  cleared = 1;
4659	}
4660      else if (REG_P (target))
4661	/* Inform later passes that the old value is dead.  */
4662	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4663
4664      /* Store each element of the constructor into
4665	 the corresponding element of TARGET, determined
4666	 by counting the elements.  */
4667      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4668	   elt;
4669	   elt = TREE_CHAIN (elt), i++)
4670	{
4671	  enum machine_mode mode;
4672	  HOST_WIDE_INT bitsize;
4673	  HOST_WIDE_INT bitpos;
4674	  int unsignedp;
4675	  tree value = TREE_VALUE (elt);
4676	  tree index = TREE_PURPOSE (elt);
4677	  rtx xtarget = target;
4678
4679	  if (cleared && is_zeros_p (value))
4680	    continue;
4681
4682	  unsignedp = TREE_UNSIGNED (elttype);
4683	  mode = TYPE_MODE (elttype);
4684	  if (mode == BLKmode)
4685	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4686		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
4687		       : -1);
4688	  else
4689	    bitsize = GET_MODE_BITSIZE (mode);
4690
4691	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4692	    {
4693	      tree lo_index = TREE_OPERAND (index, 0);
4694	      tree hi_index = TREE_OPERAND (index, 1);
4695	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4696	      struct nesting *loop;
4697	      HOST_WIDE_INT lo, hi, count;
4698	      tree position;
4699
4700	      /* If the range is constant and "small", unroll the loop.  */
4701	      if (const_bounds_p
4702		  && host_integerp (lo_index, 0)
4703		  && host_integerp (hi_index, 0)
4704		  && (lo = tree_low_cst (lo_index, 0),
4705		      hi = tree_low_cst (hi_index, 0),
4706		      count = hi - lo + 1,
4707		      (GET_CODE (target) != MEM
4708		       || count <= 2
4709		       || (host_integerp (TYPE_SIZE (elttype), 1)
4710			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4711			       <= 40 * 8)))))
4712		{
4713		  lo -= minelt;  hi -= minelt;
4714		  for (; lo <= hi; lo++)
4715		    {
4716		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4717
4718		      if (GET_CODE (target) == MEM
4719			  && !MEM_KEEP_ALIAS_SET_P (target)
4720			  && TREE_CODE (type) == ARRAY_TYPE
4721			  && TYPE_NONALIASED_COMPONENT (type))
4722			{
4723			  target = copy_rtx (target);
4724			  MEM_KEEP_ALIAS_SET_P (target) = 1;
4725			}
4726
4727		      store_constructor_field
4728			(target, bitsize, bitpos, mode, value, type, cleared,
4729			 get_alias_set (elttype));
4730		    }
4731		}
4732	      else
4733		{
4734		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4735		  loop_top = gen_label_rtx ();
4736		  loop_end = gen_label_rtx ();
4737
4738		  unsignedp = TREE_UNSIGNED (domain);
4739
4740		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4741
4742		  index_r
4743		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4744						 &unsignedp, 0));
4745		  SET_DECL_RTL (index, index_r);
4746		  if (TREE_CODE (value) == SAVE_EXPR
4747		      && SAVE_EXPR_RTL (value) == 0)
4748		    {
4749		      /* Make sure value gets expanded once before the
4750                         loop.  */
4751		      expand_expr (value, const0_rtx, VOIDmode, 0);
4752		      emit_queue ();
4753		    }
4754		  store_expr (lo_index, index_r, 0);
4755		  loop = expand_start_loop (0);
4756
4757		  /* Assign value to element index.  */
4758		  position
4759		    = convert (ssizetype,
4760			       fold (build (MINUS_EXPR, TREE_TYPE (index),
4761					    index, TYPE_MIN_VALUE (domain))));
4762		  position = size_binop (MULT_EXPR, position,
4763					 convert (ssizetype,
4764						  TYPE_SIZE_UNIT (elttype)));
4765
4766		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4767		  xtarget = offset_address (target, pos_rtx,
4768					    highest_pow2_factor (position));
4769		  xtarget = adjust_address (xtarget, mode, 0);
4770		  if (TREE_CODE (value) == CONSTRUCTOR)
4771		    store_constructor (value, xtarget, cleared,
4772				       bitsize / BITS_PER_UNIT);
4773		  else
4774		    store_expr (value, xtarget, 0);
4775
4776		  expand_exit_loop_if_false (loop,
4777					     build (LT_EXPR, integer_type_node,
4778						    index, hi_index));
4779
4780		  expand_increment (build (PREINCREMENT_EXPR,
4781					   TREE_TYPE (index),
4782					   index, integer_one_node), 0, 0);
4783		  expand_end_loop ();
4784		  emit_label (loop_end);
4785		}
4786	    }
4787	  else if ((index != 0 && ! host_integerp (index, 0))
4788		   || ! host_integerp (TYPE_SIZE (elttype), 1))
4789	    {
4790	      tree position;
4791
4792	      if (index == 0)
4793		index = ssize_int (1);
4794
4795	      if (minelt)
4796		index = convert (ssizetype,
4797				 fold (build (MINUS_EXPR, index,
4798					      TYPE_MIN_VALUE (domain))));
4799
4800	      position = size_binop (MULT_EXPR, index,
4801				     convert (ssizetype,
4802					      TYPE_SIZE_UNIT (elttype)));
4803	      xtarget = offset_address (target,
4804					expand_expr (position, 0, VOIDmode, 0),
4805					highest_pow2_factor (position));
4806	      xtarget = adjust_address (xtarget, mode, 0);
4807	      store_expr (value, xtarget, 0);
4808	    }
4809	  else
4810	    {
4811	      if (index != 0)
4812		bitpos = ((tree_low_cst (index, 0) - minelt)
4813			  * tree_low_cst (TYPE_SIZE (elttype), 1));
4814	      else
4815		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4816
4817	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4818		  && TREE_CODE (type) == ARRAY_TYPE
4819		  && TYPE_NONALIASED_COMPONENT (type))
4820		{
4821		  target = copy_rtx (target);
4822		  MEM_KEEP_ALIAS_SET_P (target) = 1;
4823		}
4824
4825	      store_constructor_field (target, bitsize, bitpos, mode, value,
4826				       type, cleared, get_alias_set (elttype));
4827
4828	    }
4829	}
4830    }
4831
4832  /* Set constructor assignments.  */
4833  else if (TREE_CODE (type) == SET_TYPE)
4834    {
4835      tree elt = CONSTRUCTOR_ELTS (exp);
4836      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4837      tree domain = TYPE_DOMAIN (type);
4838      tree domain_min, domain_max, bitlength;
4839
4840      /* The default implementation strategy is to extract the constant
4841	 parts of the constructor, use that to initialize the target,
4842	 and then "or" in whatever non-constant ranges we need in addition.
4843
4844	 If a large set is all zero or all ones, it is
4845	 probably better to set it using memset (if available) or bzero.
4846	 Also, if a large set has just a single range, it may also be
4847	 better to first clear all the first clear the set (using
4848	 bzero/memset), and set the bits we want.  */
4849
4850      /* Check for all zeros.  */
4851      if (elt == NULL_TREE && size > 0)
4852	{
4853	  if (!cleared)
4854	    clear_storage (target, GEN_INT (size));
4855	  return;
4856	}
4857
4858      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4859      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4860      bitlength = size_binop (PLUS_EXPR,
4861			      size_diffop (domain_max, domain_min),
4862			      ssize_int (1));
4863
4864      nbits = tree_low_cst (bitlength, 1);
4865
4866      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4867	 are "complicated" (more than one range), initialize (the
4868	 constant parts) by copying from a constant.  */
4869      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4870	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4871	{
4872	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4873	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4874	  char *bit_buffer = (char *) alloca (nbits);
4875	  HOST_WIDE_INT word = 0;
4876	  unsigned int bit_pos = 0;
4877	  unsigned int ibit = 0;
4878	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
4879
4880	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4881	  for (;;)
4882	    {
4883	      if (bit_buffer[ibit])
4884		{
4885		  if (BYTES_BIG_ENDIAN)
4886		    word |= (1 << (set_word_size - 1 - bit_pos));
4887		  else
4888		    word |= 1 << bit_pos;
4889		}
4890
4891	      bit_pos++;  ibit++;
4892	      if (bit_pos >= set_word_size || ibit == nbits)
4893		{
4894		  if (word != 0 || ! cleared)
4895		    {
4896		      rtx datum = GEN_INT (word);
4897		      rtx to_rtx;
4898
4899		      /* The assumption here is that it is safe to use
4900			 XEXP if the set is multi-word, but not if
4901			 it's single-word.  */
4902		      if (GET_CODE (target) == MEM)
4903			to_rtx = adjust_address (target, mode, offset);
4904		      else if (offset == 0)
4905			to_rtx = target;
4906		      else
4907			abort ();
4908		      emit_move_insn (to_rtx, datum);
4909		    }
4910
4911		  if (ibit == nbits)
4912		    break;
4913		  word = 0;
4914		  bit_pos = 0;
4915		  offset += set_word_size / BITS_PER_UNIT;
4916		}
4917	    }
4918	}
4919      else if (!cleared)
4920	/* Don't bother clearing storage if the set is all ones.  */
4921	if (TREE_CHAIN (elt) != NULL_TREE
4922	    || (TREE_PURPOSE (elt) == NULL_TREE
4923		? nbits != 1
4924		: ( ! host_integerp (TREE_VALUE (elt), 0)
4925		   || ! host_integerp (TREE_PURPOSE (elt), 0)
4926		   || (tree_low_cst (TREE_VALUE (elt), 0)
4927		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4928		       != (HOST_WIDE_INT) nbits))))
4929	  clear_storage (target, expr_size (exp));
4930
4931      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4932	{
4933	  /* Start of range of element or NULL.  */
4934	  tree startbit = TREE_PURPOSE (elt);
4935	  /* End of range of element, or element value.  */
4936	  tree endbit   = TREE_VALUE (elt);
4937#ifdef TARGET_MEM_FUNCTIONS
4938	  HOST_WIDE_INT startb, endb;
4939#endif
4940	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4941
4942	  bitlength_rtx = expand_expr (bitlength,
4943				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4944
4945	  /* Handle non-range tuple element like [ expr ].  */
4946	  if (startbit == NULL_TREE)
4947	    {
4948	      startbit = save_expr (endbit);
4949	      endbit = startbit;
4950	    }
4951
4952	  startbit = convert (sizetype, startbit);
4953	  endbit = convert (sizetype, endbit);
4954	  if (! integer_zerop (domain_min))
4955	    {
4956	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4957	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4958	    }
4959	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4960				      EXPAND_CONST_ADDRESS);
4961	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4962				    EXPAND_CONST_ADDRESS);
4963
4964	  if (REG_P (target))
4965	    {
4966	      targetx
4967		= assign_temp
4968		  ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4969					  TYPE_QUAL_CONST)),
4970		   0, 1, 1);
4971	      emit_move_insn (targetx, target);
4972	    }
4973
4974	  else if (GET_CODE (target) == MEM)
4975	    targetx = target;
4976	  else
4977	    abort ();
4978
4979#ifdef TARGET_MEM_FUNCTIONS
4980	  /* Optimization:  If startbit and endbit are
4981	     constants divisible by BITS_PER_UNIT,
4982	     call memset instead.  */
4983	  if (TREE_CODE (startbit) == INTEGER_CST
4984	      && TREE_CODE (endbit) == INTEGER_CST
4985	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4986	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4987	    {
4988	      emit_library_call (memset_libfunc, LCT_NORMAL,
4989				 VOIDmode, 3,
4990				 plus_constant (XEXP (targetx, 0),
4991						startb / BITS_PER_UNIT),
4992				 Pmode,
4993				 constm1_rtx, TYPE_MODE (integer_type_node),
4994				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4995				 TYPE_MODE (sizetype));
4996	    }
4997	  else
4998#endif
4999	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5000			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5001			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5002			       startbit_rtx, TYPE_MODE (sizetype),
5003			       endbit_rtx, TYPE_MODE (sizetype));
5004
5005	  if (REG_P (target))
5006	    emit_move_insn (target, targetx);
5007	}
5008    }
5009
5010  else
5011    abort ();
5012}
5013
5014/* Store the value of EXP (an expression tree)
5015   into a subfield of TARGET which has mode MODE and occupies
5016   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5017   If MODE is VOIDmode, it means that we are storing into a bit-field.
5018
5019   If VALUE_MODE is VOIDmode, return nothing in particular.
5020   UNSIGNEDP is not used in this case.
5021
5022   Otherwise, return an rtx for the value stored.  This rtx
5023   has mode VALUE_MODE if that is convenient to do.
5024   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5025
5026   TYPE is the type of the underlying object,
5027
5028   ALIAS_SET is the alias set for the destination.  This value will
5029   (in general) be different from that for TARGET, since TARGET is a
5030   reference to the containing structure.  */
5031
5032static rtx
5033store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5034	     alias_set)
5035     rtx target;
5036     HOST_WIDE_INT bitsize;
5037     HOST_WIDE_INT bitpos;
5038     enum machine_mode mode;
5039     tree exp;
5040     enum machine_mode value_mode;
5041     int unsignedp;
5042     tree type;
5043     int alias_set;
5044{
5045  HOST_WIDE_INT width_mask = 0;
5046
5047  if (TREE_CODE (exp) == ERROR_MARK)
5048    return const0_rtx;
5049
5050  /* If we have nothing to store, do nothing unless the expression has
5051     side-effects.  */
5052  if (bitsize == 0)
5053    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5054  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5055    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5056
5057  /* If we are storing into an unaligned field of an aligned union that is
5058     in a register, we may have the mode of TARGET being an integer mode but
5059     MODE == BLKmode.  In that case, get an aligned object whose size and
5060     alignment are the same as TARGET and store TARGET into it (we can avoid
5061     the store if the field being stored is the entire width of TARGET).  Then
5062     call ourselves recursively to store the field into a BLKmode version of
5063     that object.  Finally, load from the object into TARGET.  This is not
5064     very efficient in general, but should only be slightly more expensive
5065     than the otherwise-required unaligned accesses.  Perhaps this can be
5066     cleaned up later.  */
5067
5068  if (mode == BLKmode
5069      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5070    {
5071      rtx object
5072	= assign_temp
5073	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5074	   0, 1, 1);
5075      rtx blk_object = adjust_address (object, BLKmode, 0);
5076
5077      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5078	emit_move_insn (object, target);
5079
5080      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5081		   alias_set);
5082
5083      emit_move_insn (target, object);
5084
5085      /* We want to return the BLKmode version of the data.  */
5086      return blk_object;
5087    }
5088
5089  if (GET_CODE (target) == CONCAT)
5090    {
5091      /* We're storing into a struct containing a single __complex.  */
5092
5093      if (bitpos != 0)
5094	abort ();
5095      return store_expr (exp, target, 0);
5096    }
5097
5098  /* If the structure is in a register or if the component
5099     is a bit field, we cannot use addressing to access it.
5100     Use bit-field techniques or SUBREG to store in it.  */
5101
5102  if (mode == VOIDmode
5103      || (mode != BLKmode && ! direct_store[(int) mode]
5104	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5105	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5106      || GET_CODE (target) == REG
5107      || GET_CODE (target) == SUBREG
5108      /* If the field isn't aligned enough to store as an ordinary memref,
5109	 store it as a bit field.  */
5110      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5111	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5112	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5113      /* If the RHS and field are a constant size and the size of the
5114	 RHS isn't the same size as the bitfield, we must use bitfield
5115	 operations.  */
5116      || (bitsize >= 0
5117	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5118	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5119    {
5120      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5121
5122      /* If BITSIZE is narrower than the size of the type of EXP
5123	 we will be narrowing TEMP.  Normally, what's wanted are the
5124	 low-order bits.  However, if EXP's type is a record and this is
5125	 big-endian machine, we want the upper BITSIZE bits.  */
5126      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5127	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5128	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5129	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5130			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5131				       - bitsize),
5132			     temp, 1);
5133
5134      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5135	 MODE.  */
5136      if (mode != VOIDmode && mode != BLKmode
5137	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5138	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5139
5140      /* If the modes of TARGET and TEMP are both BLKmode, both
5141	 must be in memory and BITPOS must be aligned on a byte
5142	 boundary.  If so, we simply do a block copy.  */
5143      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5144	{
5145	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5146	      || bitpos % BITS_PER_UNIT != 0)
5147	    abort ();
5148
5149	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5150	  emit_block_move (target, temp,
5151			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5152				    / BITS_PER_UNIT));
5153
5154	  return value_mode == VOIDmode ? const0_rtx : target;
5155	}
5156
5157      /* Store the value in the bitfield.  */
5158      store_bit_field (target, bitsize, bitpos, mode, temp,
5159		       int_size_in_bytes (type));
5160
5161      if (value_mode != VOIDmode)
5162	{
5163	  /* The caller wants an rtx for the value.
5164	     If possible, avoid refetching from the bitfield itself.  */
5165	  if (width_mask != 0
5166	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5167	    {
5168	      tree count;
5169	      enum machine_mode tmode;
5170
5171	      tmode = GET_MODE (temp);
5172	      if (tmode == VOIDmode)
5173		tmode = value_mode;
5174
5175	      if (unsignedp)
5176		return expand_and (tmode, temp,
5177				   GEN_INT (trunc_int_for_mode (width_mask,
5178								tmode)),
5179				   NULL_RTX);
5180
5181	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5182	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5183	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5184	    }
5185
5186	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5187				    NULL_RTX, value_mode, VOIDmode,
5188				    int_size_in_bytes (type));
5189	}
5190      return const0_rtx;
5191    }
5192  else
5193    {
5194      rtx addr = XEXP (target, 0);
5195      rtx to_rtx = target;
5196
5197      /* If a value is wanted, it must be the lhs;
5198	 so make the address stable for multiple use.  */
5199
5200      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5201	  && ! CONSTANT_ADDRESS_P (addr)
5202	  /* A frame-pointer reference is already stable.  */
5203	  && ! (GET_CODE (addr) == PLUS
5204		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5205		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5206		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5207	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5208
5209      /* Now build a reference to just the desired component.  */
5210
5211      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5212
5213      if (to_rtx == target)
5214	to_rtx = copy_rtx (to_rtx);
5215
5216      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5217      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5218	set_mem_alias_set (to_rtx, alias_set);
5219
5220      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5221    }
5222}
5223
5224/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5225   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5226   codes and find the ultimate containing object, which we return.
5227
5228   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5229   bit position, and *PUNSIGNEDP to the signedness of the field.
5230   If the position of the field is variable, we store a tree
5231   giving the variable offset (in units) in *POFFSET.
5232   This offset is in addition to the bit position.
5233   If the position is not variable, we store 0 in *POFFSET.
5234
5235   If any of the extraction expressions is volatile,
5236   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5237
5238   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5239   is a mode that can be used to access the field.  In that case, *PBITSIZE
5240   is redundant.
5241
5242   If the field describes a variable-sized object, *PMODE is set to
5243   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5244   this case, but the address of the object can be found.  */
5245
5246tree
5247get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5248		     punsignedp, pvolatilep)
5249     tree exp;
5250     HOST_WIDE_INT *pbitsize;
5251     HOST_WIDE_INT *pbitpos;
5252     tree *poffset;
5253     enum machine_mode *pmode;
5254     int *punsignedp;
5255     int *pvolatilep;
5256{
5257  tree size_tree = 0;
5258  enum machine_mode mode = VOIDmode;
5259  tree offset = size_zero_node;
5260  tree bit_offset = bitsize_zero_node;
5261  tree placeholder_ptr = 0;
5262  tree tem;
5263
5264  /* First get the mode, signedness, and size.  We do this from just the
5265     outermost expression.  */
5266  if (TREE_CODE (exp) == COMPONENT_REF)
5267    {
5268      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5269      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5270	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5271
5272      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5273    }
5274  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5275    {
5276      size_tree = TREE_OPERAND (exp, 1);
5277      *punsignedp = TREE_UNSIGNED (exp);
5278    }
5279  else
5280    {
5281      mode = TYPE_MODE (TREE_TYPE (exp));
5282      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5283
5284      if (mode == BLKmode)
5285	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5286      else
5287	*pbitsize = GET_MODE_BITSIZE (mode);
5288    }
5289
5290  if (size_tree != 0)
5291    {
5292      if (! host_integerp (size_tree, 1))
5293	mode = BLKmode, *pbitsize = -1;
5294      else
5295	*pbitsize = tree_low_cst (size_tree, 1);
5296    }
5297
5298  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5299     and find the ultimate containing object.  */
5300  while (1)
5301    {
5302      if (TREE_CODE (exp) == BIT_FIELD_REF)
5303	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5304      else if (TREE_CODE (exp) == COMPONENT_REF)
5305	{
5306	  tree field = TREE_OPERAND (exp, 1);
5307	  tree this_offset = DECL_FIELD_OFFSET (field);
5308
5309	  /* If this field hasn't been filled in yet, don't go
5310	     past it.  This should only happen when folding expressions
5311	     made during type construction.  */
5312	  if (this_offset == 0)
5313	    break;
5314	  else if (! TREE_CONSTANT (this_offset)
5315		   && contains_placeholder_p (this_offset))
5316	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5317
5318	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5319	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5320				   DECL_FIELD_BIT_OFFSET (field));
5321
5322	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5323	}
5324
5325      else if (TREE_CODE (exp) == ARRAY_REF
5326	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5327	{
5328	  tree index = TREE_OPERAND (exp, 1);
5329	  tree array = TREE_OPERAND (exp, 0);
5330	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5331	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5332	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5333
5334	  /* We assume all arrays have sizes that are a multiple of a byte.
5335	     First subtract the lower bound, if any, in the type of the
5336	     index, then convert to sizetype and multiply by the size of the
5337	     array element.  */
5338	  if (low_bound != 0 && ! integer_zerop (low_bound))
5339	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5340				 index, low_bound));
5341
5342	  /* If the index has a self-referential type, pass it to a
5343	     WITH_RECORD_EXPR; if the component size is, pass our
5344	     component to one.  */
5345	  if (! TREE_CONSTANT (index)
5346	      && contains_placeholder_p (index))
5347	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5348	  if (! TREE_CONSTANT (unit_size)
5349	      && contains_placeholder_p (unit_size))
5350	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5351
5352	  offset = size_binop (PLUS_EXPR, offset,
5353			       size_binop (MULT_EXPR,
5354					   convert (sizetype, index),
5355					   unit_size));
5356	}
5357
5358      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5359	{
5360	  tree new = find_placeholder (exp, &placeholder_ptr);
5361
5362	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5363	     We might have been called from tree optimization where we
5364	     haven't set up an object yet.  */
5365	  if (new == 0)
5366	    break;
5367	  else
5368	    exp = new;
5369
5370	  continue;
5371	}
5372      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5373	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5374	       && ! ((TREE_CODE (exp) == NOP_EXPR
5375		      || TREE_CODE (exp) == CONVERT_EXPR)
5376		     && (TYPE_MODE (TREE_TYPE (exp))
5377			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5378	break;
5379
5380      /* If any reference in the chain is volatile, the effect is volatile.  */
5381      if (TREE_THIS_VOLATILE (exp))
5382	*pvolatilep = 1;
5383
5384      exp = TREE_OPERAND (exp, 0);
5385    }
5386
5387  /* If OFFSET is constant, see if we can return the whole thing as a
5388     constant bit position.  Otherwise, split it up.  */
5389  if (host_integerp (offset, 0)
5390      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5391				 bitsize_unit_node))
5392      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5393      && host_integerp (tem, 0))
5394    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5395  else
5396    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5397
5398  *pmode = mode;
5399  return exp;
5400}
5401
5402/* Return 1 if T is an expression that get_inner_reference handles.  */
5403
5404int
5405handled_component_p (t)
5406     tree t;
5407{
5408  switch (TREE_CODE (t))
5409    {
5410    case BIT_FIELD_REF:
5411    case COMPONENT_REF:
5412    case ARRAY_REF:
5413    case ARRAY_RANGE_REF:
5414    case NON_LVALUE_EXPR:
5415    case VIEW_CONVERT_EXPR:
5416      return 1;
5417
5418    case NOP_EXPR:
5419    case CONVERT_EXPR:
5420      return (TYPE_MODE (TREE_TYPE (t))
5421	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5422
5423    default:
5424      return 0;
5425    }
5426}
5427
5428/* Given an rtx VALUE that may contain additions and multiplications, return
5429   an equivalent value that just refers to a register, memory, or constant.
5430   This is done by generating instructions to perform the arithmetic and
5431   returning a pseudo-register containing the value.
5432
5433   The returned value may be a REG, SUBREG, MEM or constant.  */
5434
5435rtx
5436force_operand (value, target)
5437     rtx value, target;
5438{
5439  rtx op1, op2;
5440  /* Use subtarget as the target for operand 0 of a binary operation.  */
5441  rtx subtarget = get_subtarget (target);
5442  enum rtx_code code = GET_CODE (value);
5443
5444  /* Check for a PIC address load.  */
5445  if ((code == PLUS || code == MINUS)
5446      && XEXP (value, 0) == pic_offset_table_rtx
5447      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5448	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5449	  || GET_CODE (XEXP (value, 1)) == CONST))
5450    {
5451      if (!subtarget)
5452	subtarget = gen_reg_rtx (GET_MODE (value));
5453      emit_move_insn (subtarget, value);
5454      return subtarget;
5455    }
5456
5457  if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5458    {
5459      if (!target)
5460	target = gen_reg_rtx (GET_MODE (value));
5461      convert_move (target, force_operand (XEXP (value, 0), NULL),
5462		    code == ZERO_EXTEND);
5463      return target;
5464    }
5465
5466  if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5467    {
5468      op2 = XEXP (value, 1);
5469      if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5470	subtarget = 0;
5471      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5472	{
5473	  code = PLUS;
5474	  op2 = negate_rtx (GET_MODE (value), op2);
5475	}
5476
5477      /* Check for an addition with OP2 a constant integer and our first
5478         operand a PLUS of a virtual register and something else.  In that
5479         case, we want to emit the sum of the virtual register and the
5480         constant first and then add the other value.  This allows virtual
5481         register instantiation to simply modify the constant rather than
5482         creating another one around this addition.  */
5483      if (code == PLUS && GET_CODE (op2) == CONST_INT
5484	  && GET_CODE (XEXP (value, 0)) == PLUS
5485	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5486	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5487	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5488	{
5489	  rtx temp = expand_simple_binop (GET_MODE (value), code,
5490					  XEXP (XEXP (value, 0), 0), op2,
5491					  subtarget, 0, OPTAB_LIB_WIDEN);
5492	  return expand_simple_binop (GET_MODE (value), code, temp,
5493				      force_operand (XEXP (XEXP (value,
5494								 0), 1), 0),
5495				      target, 0, OPTAB_LIB_WIDEN);
5496	}
5497
5498      op1 = force_operand (XEXP (value, 0), subtarget);
5499      op2 = force_operand (op2, NULL_RTX);
5500      switch (code)
5501	{
5502	case MULT:
5503	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
5504	case DIV:
5505	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
5506	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
5507					target, 1, OPTAB_LIB_WIDEN);
5508	  else
5509	    return expand_divmod (0,
5510				  FLOAT_MODE_P (GET_MODE (value))
5511				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
5512				  GET_MODE (value), op1, op2, target, 0);
5513	  break;
5514	case MOD:
5515	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5516				target, 0);
5517	  break;
5518	case UDIV:
5519	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5520				target, 1);
5521	  break;
5522	case UMOD:
5523	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5524				target, 1);
5525	  break;
5526	case ASHIFTRT:
5527	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5528				      target, 0, OPTAB_LIB_WIDEN);
5529	  break;
5530	default:
5531	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5532				      target, 1, OPTAB_LIB_WIDEN);
5533	}
5534    }
5535  if (GET_RTX_CLASS (code) == '1')
5536    {
5537      op1 = force_operand (XEXP (value, 0), NULL_RTX);
5538      return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5539    }
5540
5541#ifdef INSN_SCHEDULING
5542  /* On machines that have insn scheduling, we want all memory reference to be
5543     explicit, so we need to deal with such paradoxical SUBREGs.  */
5544  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5545      && (GET_MODE_SIZE (GET_MODE (value))
5546	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5547    value
5548      = simplify_gen_subreg (GET_MODE (value),
5549			     force_reg (GET_MODE (SUBREG_REG (value)),
5550					force_operand (SUBREG_REG (value),
5551						       NULL_RTX)),
5552			     GET_MODE (SUBREG_REG (value)),
5553			     SUBREG_BYTE (value));
5554#endif
5555
5556  return value;
5557}
5558
5559/* Subroutine of expand_expr: return nonzero iff there is no way that
5560   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5561   call is going to be used to determine whether we need a temporary
5562   for EXP, as opposed to a recursive call to this function.
5563
5564   It is always safe for this routine to return zero since it merely
5565   searches for optimization opportunities.  */
5566
5567int
5568safe_from_p (x, exp, top_p)
5569     rtx x;
5570     tree exp;
5571     int top_p;
5572{
5573  rtx exp_rtl = 0;
5574  int i, nops;
5575  static tree save_expr_list;
5576
5577  if (x == 0
5578      /* If EXP has varying size, we MUST use a target since we currently
5579	 have no way of allocating temporaries of variable size
5580	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5581	 So we assume here that something at a higher level has prevented a
5582	 clash.  This is somewhat bogus, but the best we can do.  Only
5583	 do this when X is BLKmode and when we are at the top level.  */
5584      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5585	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5586	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5587	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5588	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5589	      != INTEGER_CST)
5590	  && GET_MODE (x) == BLKmode)
5591      /* If X is in the outgoing argument area, it is always safe.  */
5592      || (GET_CODE (x) == MEM
5593	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
5594	      || (GET_CODE (XEXP (x, 0)) == PLUS
5595		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5596    return 1;
5597
5598  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5599     find the underlying pseudo.  */
5600  if (GET_CODE (x) == SUBREG)
5601    {
5602      x = SUBREG_REG (x);
5603      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5604	return 0;
5605    }
5606
5607  /* A SAVE_EXPR might appear many times in the expression passed to the
5608     top-level safe_from_p call, and if it has a complex subexpression,
5609     examining it multiple times could result in a combinatorial explosion.
5610     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5611     with optimization took about 28 minutes to compile -- even though it was
5612     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
5613     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
5614     we have processed.  Note that the only test of top_p was above.  */
5615
5616  if (top_p)
5617    {
5618      int rtn;
5619      tree t;
5620
5621      save_expr_list = 0;
5622
5623      rtn = safe_from_p (x, exp, 0);
5624
5625      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5626	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5627
5628      return rtn;
5629    }
5630
5631  /* Now look at our tree code and possibly recurse.  */
5632  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5633    {
5634    case 'd':
5635      exp_rtl = DECL_RTL_IF_SET (exp);
5636      break;
5637
5638    case 'c':
5639      return 1;
5640
5641    case 'x':
5642      if (TREE_CODE (exp) == TREE_LIST)
5643	return ((TREE_VALUE (exp) == 0
5644		 || safe_from_p (x, TREE_VALUE (exp), 0))
5645		&& (TREE_CHAIN (exp) == 0
5646		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5647      else if (TREE_CODE (exp) == ERROR_MARK)
5648	return 1;	/* An already-visited SAVE_EXPR? */
5649      else
5650	return 0;
5651
5652    case '1':
5653      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5654
5655    case '2':
5656    case '<':
5657      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5658	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5659
5660    case 'e':
5661    case 'r':
5662      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5663	 the expression.  If it is set, we conflict iff we are that rtx or
5664	 both are in memory.  Otherwise, we check all operands of the
5665	 expression recursively.  */
5666
5667      switch (TREE_CODE (exp))
5668	{
5669	case ADDR_EXPR:
5670	  /* If the operand is static or we are static, we can't conflict.
5671	     Likewise if we don't conflict with the operand at all.  */
5672	  if (staticp (TREE_OPERAND (exp, 0))
5673	      || TREE_STATIC (exp)
5674	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5675	    return 1;
5676
5677	  /* Otherwise, the only way this can conflict is if we are taking
5678	     the address of a DECL a that address if part of X, which is
5679	     very rare.  */
5680	  exp = TREE_OPERAND (exp, 0);
5681	  if (DECL_P (exp))
5682	    {
5683	      if (!DECL_RTL_SET_P (exp)
5684		  || GET_CODE (DECL_RTL (exp)) != MEM)
5685		return 0;
5686	      else
5687		exp_rtl = XEXP (DECL_RTL (exp), 0);
5688	    }
5689	  break;
5690
5691	case INDIRECT_REF:
5692	  if (GET_CODE (x) == MEM
5693	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5694					get_alias_set (exp)))
5695	    return 0;
5696	  break;
5697
5698	case CALL_EXPR:
5699	  /* Assume that the call will clobber all hard registers and
5700	     all of memory.  */
5701	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5702	      || GET_CODE (x) == MEM)
5703	    return 0;
5704	  break;
5705
5706	case RTL_EXPR:
5707	  /* If a sequence exists, we would have to scan every instruction
5708	     in the sequence to see if it was safe.  This is probably not
5709	     worthwhile.  */
5710	  if (RTL_EXPR_SEQUENCE (exp))
5711	    return 0;
5712
5713	  exp_rtl = RTL_EXPR_RTL (exp);
5714	  break;
5715
5716	case WITH_CLEANUP_EXPR:
5717	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5718	  break;
5719
5720	case CLEANUP_POINT_EXPR:
5721	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5722
5723	case SAVE_EXPR:
5724	  exp_rtl = SAVE_EXPR_RTL (exp);
5725	  if (exp_rtl)
5726	    break;
5727
5728	  /* If we've already scanned this, don't do it again.  Otherwise,
5729	     show we've scanned it and record for clearing the flag if we're
5730	     going on.  */
5731	  if (TREE_PRIVATE (exp))
5732	    return 1;
5733
5734	  TREE_PRIVATE (exp) = 1;
5735	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5736	    {
5737	      TREE_PRIVATE (exp) = 0;
5738	      return 0;
5739	    }
5740
5741	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5742	  return 1;
5743
5744	case BIND_EXPR:
5745	  /* The only operand we look at is operand 1.  The rest aren't
5746	     part of the expression.  */
5747	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5748
5749	case METHOD_CALL_EXPR:
5750	  /* This takes an rtx argument, but shouldn't appear here.  */
5751	  abort ();
5752
5753	default:
5754	  break;
5755	}
5756
5757      /* If we have an rtx, we do not need to scan our operands.  */
5758      if (exp_rtl)
5759	break;
5760
5761      nops = first_rtl_op (TREE_CODE (exp));
5762      for (i = 0; i < nops; i++)
5763	if (TREE_OPERAND (exp, i) != 0
5764	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5765	  return 0;
5766
5767      /* If this is a language-specific tree code, it may require
5768	 special handling.  */
5769      if ((unsigned int) TREE_CODE (exp)
5770	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5771	  && !(*lang_hooks.safe_from_p) (x, exp))
5772	return 0;
5773    }
5774
5775  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5776     with it.  */
5777  if (exp_rtl)
5778    {
5779      if (GET_CODE (exp_rtl) == SUBREG)
5780	{
5781	  exp_rtl = SUBREG_REG (exp_rtl);
5782	  if (GET_CODE (exp_rtl) == REG
5783	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5784	    return 0;
5785	}
5786
5787      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5788	 are memory and they conflict.  */
5789      return ! (rtx_equal_p (x, exp_rtl)
5790		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5791		    && true_dependence (exp_rtl, VOIDmode, x,
5792					rtx_addr_varies_p)));
5793    }
5794
5795  /* If we reach here, it is safe.  */
5796  return 1;
5797}
5798
5799/* Subroutine of expand_expr: return rtx if EXP is a
5800   variable or parameter; else return 0.  */
5801
5802static rtx
5803var_rtx (exp)
5804     tree exp;
5805{
5806  STRIP_NOPS (exp);
5807  switch (TREE_CODE (exp))
5808    {
5809    case PARM_DECL:
5810    case VAR_DECL:
5811      return DECL_RTL (exp);
5812    default:
5813      return 0;
5814    }
5815}
5816
5817#ifdef MAX_INTEGER_COMPUTATION_MODE
5818
5819void
5820check_max_integer_computation_mode (exp)
5821     tree exp;
5822{
5823  enum tree_code code;
5824  enum machine_mode mode;
5825
5826  /* Strip any NOPs that don't change the mode.  */
5827  STRIP_NOPS (exp);
5828  code = TREE_CODE (exp);
5829
5830  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5831  if (code == NOP_EXPR
5832      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5833    return;
5834
5835  /* First check the type of the overall operation.   We need only look at
5836     unary, binary and relational operations.  */
5837  if (TREE_CODE_CLASS (code) == '1'
5838      || TREE_CODE_CLASS (code) == '2'
5839      || TREE_CODE_CLASS (code) == '<')
5840    {
5841      mode = TYPE_MODE (TREE_TYPE (exp));
5842      if (GET_MODE_CLASS (mode) == MODE_INT
5843	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5844	internal_error ("unsupported wide integer operation");
5845    }
5846
5847  /* Check operand of a unary op.  */
5848  if (TREE_CODE_CLASS (code) == '1')
5849    {
5850      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5851      if (GET_MODE_CLASS (mode) == MODE_INT
5852	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5853	internal_error ("unsupported wide integer operation");
5854    }
5855
5856  /* Check operands of a binary/comparison op.  */
5857  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5858    {
5859      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5860      if (GET_MODE_CLASS (mode) == MODE_INT
5861	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5862	internal_error ("unsupported wide integer operation");
5863
5864      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5865      if (GET_MODE_CLASS (mode) == MODE_INT
5866	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5867	internal_error ("unsupported wide integer operation");
5868    }
5869}
5870#endif
5871
5872/* Return the highest power of two that EXP is known to be a multiple of.
5873   This is used in updating alignment of MEMs in array references.  */
5874
5875static HOST_WIDE_INT
5876highest_pow2_factor (exp)
5877     tree exp;
5878{
5879  HOST_WIDE_INT c0, c1;
5880
5881  switch (TREE_CODE (exp))
5882    {
5883    case INTEGER_CST:
5884      /* We can find the lowest bit that's a one.  If the low
5885	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5886	 We need to handle this case since we can find it in a COND_EXPR,
5887	 a MIN_EXPR, or a MAX_EXPR.  If the constant overlows, we have an
5888	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5889	 later ICE.  */
5890      if (TREE_CONSTANT_OVERFLOW (exp))
5891	return BIGGEST_ALIGNMENT;
5892      else
5893	{
5894	  /* Note: tree_low_cst is intentionally not used here,
5895	     we don't care about the upper bits.  */
5896	  c0 = TREE_INT_CST_LOW (exp);
5897	  c0 &= -c0;
5898	  return c0 ? c0 : BIGGEST_ALIGNMENT;
5899	}
5900      break;
5901
5902    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
5903      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5904      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5905      return MIN (c0, c1);
5906
5907    case MULT_EXPR:
5908      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5909      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5910      return c0 * c1;
5911
5912    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
5913    case CEIL_DIV_EXPR:
5914      if (integer_pow2p (TREE_OPERAND (exp, 1))
5915	  && host_integerp (TREE_OPERAND (exp, 1), 1))
5916	{
5917	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5918	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5919	  return MAX (1, c0 / c1);
5920	}
5921      break;
5922
5923    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
5924    case SAVE_EXPR: case WITH_RECORD_EXPR:
5925      return highest_pow2_factor (TREE_OPERAND (exp, 0));
5926
5927    case COMPOUND_EXPR:
5928      return highest_pow2_factor (TREE_OPERAND (exp, 1));
5929
5930    case COND_EXPR:
5931      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5932      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5933      return MIN (c0, c1);
5934
5935    default:
5936      break;
5937    }
5938
5939  return 1;
5940}
5941
5942/* Similar, except that it is known that the expression must be a multiple
5943   of the alignment of TYPE.  */
5944
5945static HOST_WIDE_INT
5946highest_pow2_factor_for_type (type, exp)
5947     tree type;
5948     tree exp;
5949{
5950  HOST_WIDE_INT type_align, factor;
5951
5952  factor = highest_pow2_factor (exp);
5953  type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
5954  return MAX (factor, type_align);
5955}
5956
5957/* Return an object on the placeholder list that matches EXP, a
5958   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
5959   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
5960   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
5961   is a location which initially points to a starting location in the
5962   placeholder list (zero means start of the list) and where a pointer into
5963   the placeholder list at which the object is found is placed.  */
5964
5965tree
5966find_placeholder (exp, plist)
5967     tree exp;
5968     tree *plist;
5969{
5970  tree type = TREE_TYPE (exp);
5971  tree placeholder_expr;
5972
5973  for (placeholder_expr
5974       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5975       placeholder_expr != 0;
5976       placeholder_expr = TREE_CHAIN (placeholder_expr))
5977    {
5978      tree need_type = TYPE_MAIN_VARIANT (type);
5979      tree elt;
5980
5981      /* Find the outermost reference that is of the type we want.  If none,
5982	 see if any object has a type that is a pointer to the type we
5983	 want.  */
5984      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5985	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5986		   || TREE_CODE (elt) == COND_EXPR)
5987		  ? TREE_OPERAND (elt, 1)
5988		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5989		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5990		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5991		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5992		  ? TREE_OPERAND (elt, 0) : 0))
5993	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5994	  {
5995	    if (plist)
5996	      *plist = placeholder_expr;
5997	    return elt;
5998	  }
5999
6000      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6001	   elt
6002	   = ((TREE_CODE (elt) == COMPOUND_EXPR
6003	       || TREE_CODE (elt) == COND_EXPR)
6004	      ? TREE_OPERAND (elt, 1)
6005	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6006		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6007		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6008		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6009	      ? TREE_OPERAND (elt, 0) : 0))
6010	if (POINTER_TYPE_P (TREE_TYPE (elt))
6011	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6012		== need_type))
6013	  {
6014	    if (plist)
6015	      *plist = placeholder_expr;
6016	    return build1 (INDIRECT_REF, need_type, elt);
6017	  }
6018    }
6019
6020  return 0;
6021}
6022
6023/* expand_expr: generate code for computing expression EXP.
6024   An rtx for the computed value is returned.  The value is never null.
6025   In the case of a void EXP, const0_rtx is returned.
6026
6027   The value may be stored in TARGET if TARGET is nonzero.
6028   TARGET is just a suggestion; callers must assume that
6029   the rtx returned may not be the same as TARGET.
6030
6031   If TARGET is CONST0_RTX, it means that the value will be ignored.
6032
6033   If TMODE is not VOIDmode, it suggests generating the
6034   result in mode TMODE.  But this is done only when convenient.
6035   Otherwise, TMODE is ignored and the value generated in its natural mode.
6036   TMODE is just a suggestion; callers must assume that
6037   the rtx returned may not have mode TMODE.
6038
6039   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6040   probably will not be used.
6041
6042   If MODIFIER is EXPAND_SUM then when EXP is an addition
6043   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6044   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6045   products as above, or REG or MEM, or constant.
6046   Ordinarily in such cases we would output mul or add instructions
6047   and then return a pseudo reg containing the sum.
6048
6049   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6050   it also marks a label as absolutely required (it can't be dead).
6051   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6052   This is used for outputting expressions used in initializers.
6053
6054   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6055   with a constant address even if that address is not normally legitimate.
6056   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
6057
6058rtx
6059expand_expr (exp, target, tmode, modifier)
6060     tree exp;
6061     rtx target;
6062     enum machine_mode tmode;
6063     enum expand_modifier modifier;
6064{
6065  rtx op0, op1, temp;
6066  tree type = TREE_TYPE (exp);
6067  int unsignedp = TREE_UNSIGNED (type);
6068  enum machine_mode mode;
6069  enum tree_code code = TREE_CODE (exp);
6070  optab this_optab;
6071  rtx subtarget, original_target;
6072  int ignore;
6073  tree context;
6074
6075  /* Handle ERROR_MARK before anybody tries to access its type.  */
6076  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6077    {
6078      op0 = CONST0_RTX (tmode);
6079      if (op0 != 0)
6080	return op0;
6081      return const0_rtx;
6082    }
6083
6084  mode = TYPE_MODE (type);
6085  /* Use subtarget as the target for operand 0 of a binary operation.  */
6086  subtarget = get_subtarget (target);
6087  original_target = target;
6088  ignore = (target == const0_rtx
6089	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6090		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6091		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6092		&& TREE_CODE (type) == VOID_TYPE));
6093
6094  /* If we are going to ignore this result, we need only do something
6095     if there is a side-effect somewhere in the expression.  If there
6096     is, short-circuit the most common cases here.  Note that we must
6097     not call expand_expr with anything but const0_rtx in case this
6098     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6099
6100  if (ignore)
6101    {
6102      if (! TREE_SIDE_EFFECTS (exp))
6103	return const0_rtx;
6104
6105      /* Ensure we reference a volatile object even if value is ignored, but
6106	 don't do this if all we are doing is taking its address.  */
6107      if (TREE_THIS_VOLATILE (exp)
6108	  && TREE_CODE (exp) != FUNCTION_DECL
6109	  && mode != VOIDmode && mode != BLKmode
6110	  && modifier != EXPAND_CONST_ADDRESS)
6111	{
6112	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6113	  if (GET_CODE (temp) == MEM)
6114	    temp = copy_to_reg (temp);
6115	  return const0_rtx;
6116	}
6117
6118      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6119	  || code == INDIRECT_REF || code == BUFFER_REF)
6120	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6121			    modifier);
6122
6123      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6124	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6125	{
6126	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6127	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6128	  return const0_rtx;
6129	}
6130      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6131	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6132	/* If the second operand has no side effects, just evaluate
6133	   the first.  */
6134	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6135			    modifier);
6136      else if (code == BIT_FIELD_REF)
6137	{
6138	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6139	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6140	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6141	  return const0_rtx;
6142	}
6143
6144      target = 0;
6145    }
6146
6147#ifdef MAX_INTEGER_COMPUTATION_MODE
6148  /* Only check stuff here if the mode we want is different from the mode
6149     of the expression; if it's the same, check_max_integer_computiation_mode
6150     will handle it.  Do we really need to check this stuff at all?  */
6151
6152  if (target
6153      && GET_MODE (target) != mode
6154      && TREE_CODE (exp) != INTEGER_CST
6155      && TREE_CODE (exp) != PARM_DECL
6156      && TREE_CODE (exp) != ARRAY_REF
6157      && TREE_CODE (exp) != ARRAY_RANGE_REF
6158      && TREE_CODE (exp) != COMPONENT_REF
6159      && TREE_CODE (exp) != BIT_FIELD_REF
6160      && TREE_CODE (exp) != INDIRECT_REF
6161      && TREE_CODE (exp) != CALL_EXPR
6162      && TREE_CODE (exp) != VAR_DECL
6163      && TREE_CODE (exp) != RTL_EXPR)
6164    {
6165      enum machine_mode mode = GET_MODE (target);
6166
6167      if (GET_MODE_CLASS (mode) == MODE_INT
6168	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6169	internal_error ("unsupported wide integer operation");
6170    }
6171
6172  if (tmode != mode
6173      && TREE_CODE (exp) != INTEGER_CST
6174      && TREE_CODE (exp) != PARM_DECL
6175      && TREE_CODE (exp) != ARRAY_REF
6176      && TREE_CODE (exp) != ARRAY_RANGE_REF
6177      && TREE_CODE (exp) != COMPONENT_REF
6178      && TREE_CODE (exp) != BIT_FIELD_REF
6179      && TREE_CODE (exp) != INDIRECT_REF
6180      && TREE_CODE (exp) != VAR_DECL
6181      && TREE_CODE (exp) != CALL_EXPR
6182      && TREE_CODE (exp) != RTL_EXPR
6183      && GET_MODE_CLASS (tmode) == MODE_INT
6184      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6185    internal_error ("unsupported wide integer operation");
6186
6187  check_max_integer_computation_mode (exp);
6188#endif
6189
6190  /* If will do cse, generate all results into pseudo registers
6191     since 1) that allows cse to find more things
6192     and 2) otherwise cse could produce an insn the machine
6193     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6194     MEM: that's much more likely to be most efficient into the MEM.  */
6195
6196  if (! cse_not_expected && mode != BLKmode && target
6197      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6198      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6199    target = subtarget;
6200
6201  switch (code)
6202    {
6203    case LABEL_DECL:
6204      {
6205	tree function = decl_function_context (exp);
6206	/* Handle using a label in a containing function.  */
6207	if (function != current_function_decl
6208	    && function != inline_function_decl && function != 0)
6209	  {
6210	    struct function *p = find_function_data (function);
6211	    p->expr->x_forced_labels
6212	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6213				   p->expr->x_forced_labels);
6214	  }
6215	else
6216	  {
6217	    if (modifier == EXPAND_INITIALIZER)
6218	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6219						 label_rtx (exp),
6220						 forced_labels);
6221	  }
6222
6223	temp = gen_rtx_MEM (FUNCTION_MODE,
6224			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6225	if (function != current_function_decl
6226	    && function != inline_function_decl && function != 0)
6227	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6228	return temp;
6229      }
6230
6231    case PARM_DECL:
6232      if (! DECL_RTL_SET_P (exp))
6233	{
6234	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6235	  return CONST0_RTX (mode);
6236	}
6237
6238      /* ... fall through ...  */
6239
6240    case VAR_DECL:
6241      /* If a static var's type was incomplete when the decl was written,
6242	 but the type is complete now, lay out the decl now.  */
6243      if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6244	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6245	{
6246	  rtx value = DECL_RTL_IF_SET (exp);
6247
6248	  layout_decl (exp, 0);
6249
6250	  /* If the RTL was already set, update its mode and memory
6251	     attributes.  */
6252	  if (value != 0)
6253	    {
6254	      PUT_MODE (value, DECL_MODE (exp));
6255	      SET_DECL_RTL (exp, 0);
6256	      set_mem_attributes (value, exp, 1);
6257	      SET_DECL_RTL (exp, value);
6258	    }
6259	}
6260
6261      /* ... fall through ...  */
6262
6263    case FUNCTION_DECL:
6264    case RESULT_DECL:
6265      if (DECL_RTL (exp) == 0)
6266	abort ();
6267
6268      /* Ensure variable marked as used even if it doesn't go through
6269	 a parser.  If it hasn't be used yet, write out an external
6270	 definition.  */
6271      if (! TREE_USED (exp))
6272	{
6273	  assemble_external (exp);
6274	  TREE_USED (exp) = 1;
6275	}
6276
6277      /* Show we haven't gotten RTL for this yet.  */
6278      temp = 0;
6279
6280      /* Handle variables inherited from containing functions.  */
6281      context = decl_function_context (exp);
6282
6283      /* We treat inline_function_decl as an alias for the current function
6284	 because that is the inline function whose vars, types, etc.
6285	 are being merged into the current function.
6286	 See expand_inline_function.  */
6287
6288      if (context != 0 && context != current_function_decl
6289	  && context != inline_function_decl
6290	  /* If var is static, we don't need a static chain to access it.  */
6291	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6292		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6293	{
6294	  rtx addr;
6295
6296	  /* Mark as non-local and addressable.  */
6297	  DECL_NONLOCAL (exp) = 1;
6298	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6299	    abort ();
6300	  mark_addressable (exp);
6301	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6302	    abort ();
6303	  addr = XEXP (DECL_RTL (exp), 0);
6304	  if (GET_CODE (addr) == MEM)
6305	    addr
6306	      = replace_equiv_address (addr,
6307				       fix_lexical_addr (XEXP (addr, 0), exp));
6308	  else
6309	    addr = fix_lexical_addr (addr, exp);
6310
6311	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6312	}
6313
6314      /* This is the case of an array whose size is to be determined
6315	 from its initializer, while the initializer is still being parsed.
6316	 See expand_decl.  */
6317
6318      else if (GET_CODE (DECL_RTL (exp)) == MEM
6319	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6320	temp = validize_mem (DECL_RTL (exp));
6321
6322      /* If DECL_RTL is memory, we are in the normal case and either
6323	 the address is not valid or it is not a register and -fforce-addr
6324	 is specified, get the address into a register.  */
6325
6326      else if (GET_CODE (DECL_RTL (exp)) == MEM
6327	       && modifier != EXPAND_CONST_ADDRESS
6328	       && modifier != EXPAND_SUM
6329	       && modifier != EXPAND_INITIALIZER
6330	       && (! memory_address_p (DECL_MODE (exp),
6331				       XEXP (DECL_RTL (exp), 0))
6332		   || (flag_force_addr
6333		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6334	temp = replace_equiv_address (DECL_RTL (exp),
6335				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6336
6337      /* If we got something, return it.  But first, set the alignment
6338	 if the address is a register.  */
6339      if (temp != 0)
6340	{
6341	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6342	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6343
6344	  return temp;
6345	}
6346
6347      /* If the mode of DECL_RTL does not match that of the decl, it
6348	 must be a promoted value.  We return a SUBREG of the wanted mode,
6349	 but mark it so that we know that it was already extended.  */
6350
6351      if (GET_CODE (DECL_RTL (exp)) == REG
6352	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6353	{
6354	  /* Get the signedness used for this variable.  Ensure we get the
6355	     same mode we got when the variable was declared.  */
6356	  if (GET_MODE (DECL_RTL (exp))
6357	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6358			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6359	    abort ();
6360
6361	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6362	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6363	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6364	  return temp;
6365	}
6366
6367      return DECL_RTL (exp);
6368
6369    case INTEGER_CST:
6370      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6371				 TREE_INT_CST_HIGH (exp), mode);
6372
6373      /* ??? If overflow is set, fold will have done an incomplete job,
6374	 which can result in (plus xx (const_int 0)), which can get
6375	 simplified by validate_replace_rtx during virtual register
6376	 instantiation, which can result in unrecognizable insns.
6377	 Avoid this by forcing all overflows into registers.  */
6378      if (TREE_CONSTANT_OVERFLOW (exp)
6379	  && modifier != EXPAND_INITIALIZER)
6380	temp = force_reg (mode, temp);
6381
6382      return temp;
6383
6384    case CONST_DECL:
6385      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6386
6387    case REAL_CST:
6388      /* If optimized, generate immediate CONST_DOUBLE
6389	 which will be turned into memory by reload if necessary.
6390
6391	 We used to force a register so that loop.c could see it.  But
6392	 this does not allow gen_* patterns to perform optimizations with
6393	 the constants.  It also produces two insns in cases like "x = 1.0;".
6394	 On most machines, floating-point constants are not permitted in
6395	 many insns, so we'd end up copying it to a register in any case.
6396
6397	 Now, we do the copying in expand_binop, if appropriate.  */
6398      return immed_real_const (exp);
6399
6400    case COMPLEX_CST:
6401    case STRING_CST:
6402      if (! TREE_CST_RTL (exp))
6403	output_constant_def (exp, 1);
6404
6405      /* TREE_CST_RTL probably contains a constant address.
6406	 On RISC machines where a constant address isn't valid,
6407	 make some insns to get that address into a register.  */
6408      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6409	  && modifier != EXPAND_CONST_ADDRESS
6410	  && modifier != EXPAND_INITIALIZER
6411	  && modifier != EXPAND_SUM
6412	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6413	      || (flag_force_addr
6414		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6415	return replace_equiv_address (TREE_CST_RTL (exp),
6416				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6417      return TREE_CST_RTL (exp);
6418
6419    case EXPR_WITH_FILE_LOCATION:
6420      {
6421	rtx to_return;
6422	const char *saved_input_filename = input_filename;
6423	int saved_lineno = lineno;
6424	input_filename = EXPR_WFL_FILENAME (exp);
6425	lineno = EXPR_WFL_LINENO (exp);
6426	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6427	  emit_line_note (input_filename, lineno);
6428	/* Possibly avoid switching back and forth here.  */
6429	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6430	input_filename = saved_input_filename;
6431	lineno = saved_lineno;
6432	return to_return;
6433      }
6434
6435    case SAVE_EXPR:
6436      context = decl_function_context (exp);
6437
6438      /* If this SAVE_EXPR was at global context, assume we are an
6439	 initialization function and move it into our context.  */
6440      if (context == 0)
6441	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6442
6443      /* We treat inline_function_decl as an alias for the current function
6444	 because that is the inline function whose vars, types, etc.
6445	 are being merged into the current function.
6446	 See expand_inline_function.  */
6447      if (context == current_function_decl || context == inline_function_decl)
6448	context = 0;
6449
6450      /* If this is non-local, handle it.  */
6451      if (context)
6452	{
6453	  /* The following call just exists to abort if the context is
6454	     not of a containing function.  */
6455	  find_function_data (context);
6456
6457	  temp = SAVE_EXPR_RTL (exp);
6458	  if (temp && GET_CODE (temp) == REG)
6459	    {
6460	      put_var_into_stack (exp);
6461	      temp = SAVE_EXPR_RTL (exp);
6462	    }
6463	  if (temp == 0 || GET_CODE (temp) != MEM)
6464	    abort ();
6465	  return
6466	    replace_equiv_address (temp,
6467				   fix_lexical_addr (XEXP (temp, 0), exp));
6468	}
6469      if (SAVE_EXPR_RTL (exp) == 0)
6470	{
6471	  if (mode == VOIDmode)
6472	    temp = const0_rtx;
6473	  else
6474	    temp = assign_temp (build_qualified_type (type,
6475						      (TYPE_QUALS (type)
6476						       | TYPE_QUAL_CONST)),
6477				3, 0, 0);
6478
6479	  SAVE_EXPR_RTL (exp) = temp;
6480	  if (!optimize && GET_CODE (temp) == REG)
6481	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6482						save_expr_regs);
6483
6484	  /* If the mode of TEMP does not match that of the expression, it
6485	     must be a promoted value.  We pass store_expr a SUBREG of the
6486	     wanted mode but mark it so that we know that it was already
6487	     extended.  Note that `unsignedp' was modified above in
6488	     this case.  */
6489
6490	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6491	    {
6492	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6493	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6494	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6495	    }
6496
6497	  if (temp == const0_rtx)
6498	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6499	  else
6500	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6501
6502	  TREE_USED (exp) = 1;
6503	}
6504
6505      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6506	 must be a promoted value.  We return a SUBREG of the wanted mode,
6507	 but mark it so that we know that it was already extended.  */
6508
6509      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6510	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6511	{
6512	  /* Compute the signedness and make the proper SUBREG.  */
6513	  promote_mode (type, mode, &unsignedp, 0);
6514	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6515	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6516	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6517	  return temp;
6518	}
6519
6520      return SAVE_EXPR_RTL (exp);
6521
6522    case UNSAVE_EXPR:
6523      {
6524	rtx temp;
6525	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6526	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6527	return temp;
6528      }
6529
6530    case PLACEHOLDER_EXPR:
6531      {
6532	tree old_list = placeholder_list;
6533	tree placeholder_expr = 0;
6534
6535	exp = find_placeholder (exp, &placeholder_expr);
6536	if (exp == 0)
6537	  abort ();
6538
6539	placeholder_list = TREE_CHAIN (placeholder_expr);
6540	temp = expand_expr (exp, original_target, tmode, modifier);
6541	placeholder_list = old_list;
6542	return temp;
6543      }
6544
6545      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6546      abort ();
6547
6548    case WITH_RECORD_EXPR:
6549      /* Put the object on the placeholder list, expand our first operand,
6550	 and pop the list.  */
6551      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6552				    placeholder_list);
6553      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6554			    modifier);
6555      placeholder_list = TREE_CHAIN (placeholder_list);
6556      return target;
6557
6558    case GOTO_EXPR:
6559      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6560	expand_goto (TREE_OPERAND (exp, 0));
6561      else
6562	expand_computed_goto (TREE_OPERAND (exp, 0));
6563      return const0_rtx;
6564
6565    case EXIT_EXPR:
6566      expand_exit_loop_if_false (NULL,
6567				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6568      return const0_rtx;
6569
6570    case LABELED_BLOCK_EXPR:
6571      if (LABELED_BLOCK_BODY (exp))
6572	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6573      /* Should perhaps use expand_label, but this is simpler and safer.  */
6574      do_pending_stack_adjust ();
6575      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6576      return const0_rtx;
6577
6578    case EXIT_BLOCK_EXPR:
6579      if (EXIT_BLOCK_RETURN (exp))
6580	sorry ("returned value in block_exit_expr");
6581      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6582      return const0_rtx;
6583
6584    case LOOP_EXPR:
6585      push_temp_slots ();
6586      expand_start_loop (1);
6587      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6588      expand_end_loop ();
6589      pop_temp_slots ();
6590
6591      return const0_rtx;
6592
6593    case BIND_EXPR:
6594      {
6595	tree vars = TREE_OPERAND (exp, 0);
6596	int vars_need_expansion = 0;
6597
6598	/* Need to open a binding contour here because
6599	   if there are any cleanups they must be contained here.  */
6600	expand_start_bindings (2);
6601
6602	/* Mark the corresponding BLOCK for output in its proper place.  */
6603	if (TREE_OPERAND (exp, 2) != 0
6604	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6605	  insert_block (TREE_OPERAND (exp, 2));
6606
6607	/* If VARS have not yet been expanded, expand them now.  */
6608	while (vars)
6609	  {
6610	    if (!DECL_RTL_SET_P (vars))
6611	      {
6612		vars_need_expansion = 1;
6613		expand_decl (vars);
6614	      }
6615	    expand_decl_init (vars);
6616	    vars = TREE_CHAIN (vars);
6617	  }
6618
6619	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6620
6621	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6622
6623	return temp;
6624      }
6625
6626    case RTL_EXPR:
6627      if (RTL_EXPR_SEQUENCE (exp))
6628	{
6629	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6630	    abort ();
6631	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6632	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6633	}
6634      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6635      free_temps_for_rtl_expr (exp);
6636      return RTL_EXPR_RTL (exp);
6637
6638    case CONSTRUCTOR:
6639      /* If we don't need the result, just ensure we evaluate any
6640	 subexpressions.  */
6641      if (ignore)
6642	{
6643	  tree elt;
6644
6645	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6646	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6647
6648	  return const0_rtx;
6649	}
6650
6651      /* All elts simple constants => refer to a constant in memory.  But
6652	 if this is a non-BLKmode mode, let it store a field at a time
6653	 since that should make a CONST_INT or CONST_DOUBLE when we
6654	 fold.  Likewise, if we have a target we can use, it is best to
6655	 store directly into the target unless the type is large enough
6656	 that memcpy will be used.  If we are making an initializer and
6657	 all operands are constant, put it in memory as well.  */
6658      else if ((TREE_STATIC (exp)
6659		&& ((mode == BLKmode
6660		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6661		    || TREE_ADDRESSABLE (exp)
6662		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6663			&& (! MOVE_BY_PIECES_P
6664			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6665			     TYPE_ALIGN (type)))
6666			&& ! mostly_zeros_p (exp))))
6667	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6668	{
6669	  rtx constructor = output_constant_def (exp, 1);
6670
6671	  if (modifier != EXPAND_CONST_ADDRESS
6672	      && modifier != EXPAND_INITIALIZER
6673	      && modifier != EXPAND_SUM)
6674	    constructor = validize_mem (constructor);
6675
6676	  return constructor;
6677	}
6678      else
6679	{
6680	  /* Handle calls that pass values in multiple non-contiguous
6681	     locations.  The Irix 6 ABI has examples of this.  */
6682	  if (target == 0 || ! safe_from_p (target, exp, 1)
6683	      || GET_CODE (target) == PARALLEL)
6684	    target
6685	      = assign_temp (build_qualified_type (type,
6686						   (TYPE_QUALS (type)
6687						    | (TREE_READONLY (exp)
6688						       * TYPE_QUAL_CONST))),
6689			     0, TREE_ADDRESSABLE (exp), 1);
6690
6691	  store_constructor (exp, target, 0, int_expr_size (exp));
6692	  return target;
6693	}
6694
6695    case INDIRECT_REF:
6696      {
6697	tree exp1 = TREE_OPERAND (exp, 0);
6698	tree index;
6699	tree string = string_constant (exp1, &index);
6700
6701	/* Try to optimize reads from const strings.  */
6702 	if (string
6703 	    && TREE_CODE (string) == STRING_CST
6704 	    && TREE_CODE (index) == INTEGER_CST
6705	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6706 	    && GET_MODE_CLASS (mode) == MODE_INT
6707 	    && GET_MODE_SIZE (mode) == 1
6708	    && modifier != EXPAND_WRITE)
6709 	  return
6710	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (string)
6711					 [TREE_INT_CST_LOW (index)], mode));
6712
6713	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6714	op0 = memory_address (mode, op0);
6715	temp = gen_rtx_MEM (mode, op0);
6716	set_mem_attributes (temp, exp, 0);
6717
6718	/* If we are writing to this object and its type is a record with
6719	   readonly fields, we must mark it as readonly so it will
6720	   conflict with readonly references to those fields.  */
6721	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6722	  RTX_UNCHANGING_P (temp) = 1;
6723
6724	return temp;
6725      }
6726
6727    case ARRAY_REF:
6728      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6729	abort ();
6730
6731      {
6732	tree array = TREE_OPERAND (exp, 0);
6733	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6734	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6735	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6736	HOST_WIDE_INT i;
6737
6738	/* Optimize the special-case of a zero lower bound.
6739
6740	   We convert the low_bound to sizetype to avoid some problems
6741	   with constant folding.  (E.g. suppose the lower bound is 1,
6742	   and its mode is QI.  Without the conversion,  (ARRAY
6743	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6744	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
6745
6746	if (! integer_zerop (low_bound))
6747	  index = size_diffop (index, convert (sizetype, low_bound));
6748
6749	/* Fold an expression like: "foo"[2].
6750	   This is not done in fold so it won't happen inside &.
6751	   Don't fold if this is for wide characters since it's too
6752	   difficult to do correctly and this is a very rare case.  */
6753
6754	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6755	    && TREE_CODE (array) == STRING_CST
6756	    && TREE_CODE (index) == INTEGER_CST
6757	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6758	    && GET_MODE_CLASS (mode) == MODE_INT
6759	    && GET_MODE_SIZE (mode) == 1)
6760	  return
6761	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (array)
6762					 [TREE_INT_CST_LOW (index)], mode));
6763
6764	/* If this is a constant index into a constant array,
6765	   just get the value from the array.  Handle both the cases when
6766	   we have an explicit constructor and when our operand is a variable
6767	   that was declared const.  */
6768
6769	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6770	    && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6771	    && TREE_CODE (index) == INTEGER_CST
6772	    && 0 > compare_tree_int (index,
6773				     list_length (CONSTRUCTOR_ELTS
6774						  (TREE_OPERAND (exp, 0)))))
6775	  {
6776	    tree elem;
6777
6778	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6779		 i = TREE_INT_CST_LOW (index);
6780		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6781	      ;
6782
6783	    if (elem)
6784	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6785				  modifier);
6786	  }
6787
6788	else if (optimize >= 1
6789		 && modifier != EXPAND_CONST_ADDRESS
6790		 && modifier != EXPAND_INITIALIZER
6791		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6792		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6793		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6794	  {
6795	    if (TREE_CODE (index) == INTEGER_CST)
6796	      {
6797		tree init = DECL_INITIAL (array);
6798
6799		if (TREE_CODE (init) == CONSTRUCTOR)
6800		  {
6801		    tree elem;
6802
6803		    for (elem = CONSTRUCTOR_ELTS (init);
6804			 (elem
6805			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6806			 elem = TREE_CHAIN (elem))
6807		      ;
6808
6809		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6810		      return expand_expr (fold (TREE_VALUE (elem)), target,
6811					  tmode, modifier);
6812		  }
6813		else if (TREE_CODE (init) == STRING_CST
6814			 && 0 > compare_tree_int (index,
6815						  TREE_STRING_LENGTH (init)))
6816		  {
6817		    tree type = TREE_TYPE (TREE_TYPE (init));
6818		    enum machine_mode mode = TYPE_MODE (type);
6819
6820		    if (GET_MODE_CLASS (mode) == MODE_INT
6821			&& GET_MODE_SIZE (mode) == 1)
6822		      return GEN_INT (trunc_int_for_mode
6823				      (TREE_STRING_POINTER (init)
6824				       [TREE_INT_CST_LOW (index)], mode));
6825		  }
6826	      }
6827	  }
6828      }
6829      /* Fall through.  */
6830
6831    case COMPONENT_REF:
6832    case BIT_FIELD_REF:
6833    case ARRAY_RANGE_REF:
6834      /* If the operand is a CONSTRUCTOR, we can just extract the
6835	 appropriate field if it is present.  Don't do this if we have
6836	 already written the data since we want to refer to that copy
6837	 and varasm.c assumes that's what we'll do.  */
6838      if (code == COMPONENT_REF
6839	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6840	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6841	{
6842	  tree elt;
6843
6844	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6845	       elt = TREE_CHAIN (elt))
6846	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6847		/* We can normally use the value of the field in the
6848		   CONSTRUCTOR.  However, if this is a bitfield in
6849		   an integral mode that we can fit in a HOST_WIDE_INT,
6850		   we must mask only the number of bits in the bitfield,
6851		   since this is done implicitly by the constructor.  If
6852		   the bitfield does not meet either of those conditions,
6853		   we can't do this optimization.  */
6854		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6855		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6856			 == MODE_INT)
6857			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6858			    <= HOST_BITS_PER_WIDE_INT))))
6859	      {
6860		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6861		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6862		  {
6863		    HOST_WIDE_INT bitsize
6864		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6865		    enum machine_mode imode
6866		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6867
6868		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6869		      {
6870			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6871			op0 = expand_and (imode, op0, op1, target);
6872		      }
6873		    else
6874		      {
6875			tree count
6876			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6877					 0);
6878
6879			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6880					    target, 0);
6881			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6882					    target, 0);
6883		      }
6884		  }
6885
6886		return op0;
6887	      }
6888	}
6889
6890      {
6891	enum machine_mode mode1;
6892	HOST_WIDE_INT bitsize, bitpos;
6893	tree offset;
6894	int volatilep = 0;
6895	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6896					&mode1, &unsignedp, &volatilep);
6897	rtx orig_op0;
6898
6899	/* If we got back the original object, something is wrong.  Perhaps
6900	   we are evaluating an expression too early.  In any event, don't
6901	   infinitely recurse.  */
6902	if (tem == exp)
6903	  abort ();
6904
6905	/* If TEM's type is a union of variable size, pass TARGET to the inner
6906	   computation, since it will need a temporary and TARGET is known
6907	   to have to do.  This occurs in unchecked conversion in Ada.  */
6908
6909	orig_op0 = op0
6910	  = expand_expr (tem,
6911			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6912			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6913			      != INTEGER_CST)
6914			  ? target : NULL_RTX),
6915			 VOIDmode,
6916			 (modifier == EXPAND_INITIALIZER
6917			  || modifier == EXPAND_CONST_ADDRESS)
6918			 ? modifier : EXPAND_NORMAL);
6919
6920	/* If this is a constant, put it into a register if it is a
6921	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
6922	if (CONSTANT_P (op0))
6923	  {
6924	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6925	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6926		&& offset == 0)
6927	      op0 = force_reg (mode, op0);
6928	    else
6929	      op0 = validize_mem (force_const_mem (mode, op0));
6930	  }
6931
6932	if (offset != 0)
6933	  {
6934	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6935
6936	    /* If this object is in a register, put it into memory.
6937	       This case can't occur in C, but can in Ada if we have
6938	       unchecked conversion of an expression from a scalar type to
6939	       an array or record type.  */
6940	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6941		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6942	      {
6943		/* If the operand is a SAVE_EXPR, we can deal with this by
6944		   forcing the SAVE_EXPR into memory.  */
6945		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6946		  {
6947		    put_var_into_stack (TREE_OPERAND (exp, 0));
6948		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6949		  }
6950		else
6951		  {
6952		    tree nt
6953		      = build_qualified_type (TREE_TYPE (tem),
6954					      (TYPE_QUALS (TREE_TYPE (tem))
6955					       | TYPE_QUAL_CONST));
6956		    rtx memloc = assign_temp (nt, 1, 1, 1);
6957
6958		    emit_move_insn (memloc, op0);
6959		    op0 = memloc;
6960		  }
6961	      }
6962
6963	    if (GET_CODE (op0) != MEM)
6964	      abort ();
6965
6966#ifdef POINTERS_EXTEND_UNSIGNED
6967	    if (GET_MODE (offset_rtx) != Pmode)
6968	      offset_rtx = convert_memory_address (Pmode, offset_rtx);
6969#else
6970	    if (GET_MODE (offset_rtx) != ptr_mode)
6971	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6972#endif
6973
6974	    /* A constant address in OP0 can have VOIDmode, we must not try
6975	       to call force_reg for that case.  Avoid that case.  */
6976	    if (GET_CODE (op0) == MEM
6977		&& GET_MODE (op0) == BLKmode
6978		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6979		&& bitsize != 0
6980		&& (bitpos % bitsize) == 0
6981		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6982		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6983	      {
6984		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6985		bitpos = 0;
6986	      }
6987
6988	    op0 = offset_address (op0, offset_rtx,
6989				  highest_pow2_factor (offset));
6990	  }
6991
6992	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6993	   record its alignment as BIGGEST_ALIGNMENT.  */
6994	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
6995	    && is_aligning_offset (offset, tem))
6996	  set_mem_align (op0, BIGGEST_ALIGNMENT);
6997
6998	/* Don't forget about volatility even if this is a bitfield.  */
6999	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7000	  {
7001	    if (op0 == orig_op0)
7002	      op0 = copy_rtx (op0);
7003
7004	    MEM_VOLATILE_P (op0) = 1;
7005	  }
7006
7007	/* The following code doesn't handle CONCAT.
7008	   Assume only bitpos == 0 can be used for CONCAT, due to
7009	   one element arrays having the same mode as its element.  */
7010	if (GET_CODE (op0) == CONCAT)
7011	  {
7012	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7013	      abort ();
7014	    return op0;
7015	  }
7016
7017	/* In cases where an aligned union has an unaligned object
7018	   as a field, we might be extracting a BLKmode value from
7019	   an integer-mode (e.g., SImode) object.  Handle this case
7020	   by doing the extract into an object as wide as the field
7021	   (which we know to be the width of a basic mode), then
7022	   storing into memory, and changing the mode to BLKmode.  */
7023	if (mode1 == VOIDmode
7024	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7025	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7026		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7027		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7028		&& modifier != EXPAND_CONST_ADDRESS
7029		&& modifier != EXPAND_INITIALIZER)
7030	    /* If the field isn't aligned enough to fetch as a memref,
7031	       fetch it as a bit field.  */
7032	    || (mode1 != BLKmode
7033		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7034		&& ((TYPE_ALIGN (TREE_TYPE (tem))
7035		     < GET_MODE_ALIGNMENT (mode))
7036		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7037	    /* If the type and the field are a constant size and the
7038	       size of the type isn't the same size as the bitfield,
7039	       we must use bitfield operations.  */
7040	    || (bitsize >= 0
7041		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7042		    == INTEGER_CST)
7043		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7044					  bitsize)))
7045	  {
7046	    enum machine_mode ext_mode = mode;
7047
7048	    if (ext_mode == BLKmode
7049		&& ! (target != 0 && GET_CODE (op0) == MEM
7050		      && GET_CODE (target) == MEM
7051		      && bitpos % BITS_PER_UNIT == 0))
7052	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7053
7054	    if (ext_mode == BLKmode)
7055	      {
7056		/* In this case, BITPOS must start at a byte boundary and
7057		   TARGET, if specified, must be a MEM.  */
7058		if (GET_CODE (op0) != MEM
7059		    || (target != 0 && GET_CODE (target) != MEM)
7060		    || bitpos % BITS_PER_UNIT != 0)
7061		  abort ();
7062
7063		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7064		if (target == 0)
7065		  target = assign_temp (type, 0, 1, 1);
7066
7067		emit_block_move (target, op0,
7068				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7069					  / BITS_PER_UNIT));
7070
7071		return target;
7072	      }
7073
7074	    op0 = validize_mem (op0);
7075
7076	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7077	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7078
7079	    op0 = extract_bit_field (op0, bitsize, bitpos,
7080				     unsignedp, target, ext_mode, ext_mode,
7081				     int_size_in_bytes (TREE_TYPE (tem)));
7082
7083	    /* If the result is a record type and BITSIZE is narrower than
7084	       the mode of OP0, an integral mode, and this is a big endian
7085	       machine, we must put the field into the high-order bits.  */
7086	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7087		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7088		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7089	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7090				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7091					    - bitsize),
7092				  op0, 1);
7093
7094	    if (mode == BLKmode)
7095	      {
7096		rtx new = assign_temp (build_qualified_type
7097				       (type_for_mode (ext_mode, 0),
7098					TYPE_QUAL_CONST), 0, 1, 1);
7099
7100		emit_move_insn (new, op0);
7101		op0 = copy_rtx (new);
7102		PUT_MODE (op0, BLKmode);
7103		set_mem_attributes (op0, exp, 1);
7104	      }
7105
7106	    return op0;
7107	  }
7108
7109	/* If the result is BLKmode, use that to access the object
7110	   now as well.  */
7111	if (mode == BLKmode)
7112	  mode1 = BLKmode;
7113
7114	/* Get a reference to just this component.  */
7115	if (modifier == EXPAND_CONST_ADDRESS
7116	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7117	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7118	else
7119	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7120
7121	if (op0 == orig_op0)
7122	  op0 = copy_rtx (op0);
7123
7124	set_mem_attributes (op0, exp, 0);
7125	if (GET_CODE (XEXP (op0, 0)) == REG)
7126	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7127
7128	MEM_VOLATILE_P (op0) |= volatilep;
7129	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7130	    || modifier == EXPAND_CONST_ADDRESS
7131	    || modifier == EXPAND_INITIALIZER)
7132	  return op0;
7133	else if (target == 0)
7134	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7135
7136	convert_move (target, op0, unsignedp);
7137	return target;
7138      }
7139
7140    case VTABLE_REF:
7141      {
7142	rtx insn, before = get_last_insn (), vtbl_ref;
7143
7144	/* Evaluate the interior expression.  */
7145	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7146				 tmode, modifier);
7147
7148	/* Get or create an instruction off which to hang a note.  */
7149	if (REG_P (subtarget))
7150	  {
7151	    target = subtarget;
7152	    insn = get_last_insn ();
7153	    if (insn == before)
7154	      abort ();
7155	    if (! INSN_P (insn))
7156	      insn = prev_nonnote_insn (insn);
7157	  }
7158	else
7159	  {
7160	    target = gen_reg_rtx (GET_MODE (subtarget));
7161	    insn = emit_move_insn (target, subtarget);
7162	  }
7163
7164	/* Collect the data for the note.  */
7165	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7166	vtbl_ref = plus_constant (vtbl_ref,
7167				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7168	/* Discard the initial CONST that was added.  */
7169	vtbl_ref = XEXP (vtbl_ref, 0);
7170
7171	REG_NOTES (insn)
7172	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7173
7174	return target;
7175      }
7176
7177      /* Intended for a reference to a buffer of a file-object in Pascal.
7178	 But it's not certain that a special tree code will really be
7179	 necessary for these.  INDIRECT_REF might work for them.  */
7180    case BUFFER_REF:
7181      abort ();
7182
7183    case IN_EXPR:
7184      {
7185	/* Pascal set IN expression.
7186
7187	   Algorithm:
7188	       rlo       = set_low - (set_low%bits_per_word);
7189	       the_word  = set [ (index - rlo)/bits_per_word ];
7190	       bit_index = index % bits_per_word;
7191	       bitmask   = 1 << bit_index;
7192	       return !!(the_word & bitmask);  */
7193
7194	tree set = TREE_OPERAND (exp, 0);
7195	tree index = TREE_OPERAND (exp, 1);
7196	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7197	tree set_type = TREE_TYPE (set);
7198	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7199	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7200	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7201	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7202	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7203	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7204	rtx setaddr = XEXP (setval, 0);
7205	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7206	rtx rlow;
7207	rtx diff, quo, rem, addr, bit, result;
7208
7209	/* If domain is empty, answer is no.  Likewise if index is constant
7210	   and out of bounds.  */
7211	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7212	     && TREE_CODE (set_low_bound) == INTEGER_CST
7213	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7214	     || (TREE_CODE (index) == INTEGER_CST
7215		 && TREE_CODE (set_low_bound) == INTEGER_CST
7216		 && tree_int_cst_lt (index, set_low_bound))
7217	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7218		 && TREE_CODE (index) == INTEGER_CST
7219		 && tree_int_cst_lt (set_high_bound, index))))
7220	  return const0_rtx;
7221
7222	if (target == 0)
7223	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7224
7225	/* If we get here, we have to generate the code for both cases
7226	   (in range and out of range).  */
7227
7228	op0 = gen_label_rtx ();
7229	op1 = gen_label_rtx ();
7230
7231	if (! (GET_CODE (index_val) == CONST_INT
7232	       && GET_CODE (lo_r) == CONST_INT))
7233	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7234				   GET_MODE (index_val), iunsignedp, op1);
7235
7236	if (! (GET_CODE (index_val) == CONST_INT
7237	       && GET_CODE (hi_r) == CONST_INT))
7238	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7239				   GET_MODE (index_val), iunsignedp, op1);
7240
7241	/* Calculate the element number of bit zero in the first word
7242	   of the set.  */
7243	if (GET_CODE (lo_r) == CONST_INT)
7244	  rlow = GEN_INT (INTVAL (lo_r)
7245			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7246	else
7247	  rlow = expand_binop (index_mode, and_optab, lo_r,
7248			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7249			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7250
7251	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7252			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7253
7254	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7255			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7256	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7257			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7258
7259	addr = memory_address (byte_mode,
7260			       expand_binop (index_mode, add_optab, diff,
7261					     setaddr, NULL_RTX, iunsignedp,
7262					     OPTAB_LIB_WIDEN));
7263
7264	/* Extract the bit we want to examine.  */
7265	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7266			    gen_rtx_MEM (byte_mode, addr),
7267			    make_tree (TREE_TYPE (index), rem),
7268			    NULL_RTX, 1);
7269	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7270			       GET_MODE (target) == byte_mode ? target : 0,
7271			       1, OPTAB_LIB_WIDEN);
7272
7273	if (result != target)
7274	  convert_move (target, result, 1);
7275
7276	/* Output the code to handle the out-of-range case.  */
7277	emit_jump (op0);
7278	emit_label (op1);
7279	emit_move_insn (target, const0_rtx);
7280	emit_label (op0);
7281	return target;
7282      }
7283
7284    case WITH_CLEANUP_EXPR:
7285      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7286	{
7287	  WITH_CLEANUP_EXPR_RTL (exp)
7288	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7289	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7290
7291	  /* That's it for this cleanup.  */
7292	  TREE_OPERAND (exp, 1) = 0;
7293	}
7294      return WITH_CLEANUP_EXPR_RTL (exp);
7295
7296    case CLEANUP_POINT_EXPR:
7297      {
7298	/* Start a new binding layer that will keep track of all cleanup
7299	   actions to be performed.  */
7300	expand_start_bindings (2);
7301
7302	target_temp_slot_level = temp_slot_level;
7303
7304	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7305	/* If we're going to use this value, load it up now.  */
7306	if (! ignore)
7307	  op0 = force_not_mem (op0);
7308	preserve_temp_slots (op0);
7309	expand_end_bindings (NULL_TREE, 0, 0);
7310      }
7311      return op0;
7312
7313    case CALL_EXPR:
7314      /* Check for a built-in function.  */
7315      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7316	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7317	      == FUNCTION_DECL)
7318	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7319        {
7320	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7321	      == BUILT_IN_FRONTEND)
7322	    return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7323	  else
7324	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7325	}
7326
7327      return expand_call (exp, target, ignore);
7328
7329    case NON_LVALUE_EXPR:
7330    case NOP_EXPR:
7331    case CONVERT_EXPR:
7332    case REFERENCE_EXPR:
7333      if (TREE_OPERAND (exp, 0) == error_mark_node)
7334	return const0_rtx;
7335
7336      if (TREE_CODE (type) == UNION_TYPE)
7337	{
7338	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7339
7340	  /* If both input and output are BLKmode, this conversion isn't doing
7341	     anything except possibly changing memory attribute.  */
7342	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7343	    {
7344	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7345					modifier);
7346
7347	      result = copy_rtx (result);
7348	      set_mem_attributes (result, exp, 0);
7349	      return result;
7350	    }
7351
7352	  if (target == 0)
7353	    target = assign_temp (type, 0, 1, 1);
7354
7355	  if (GET_CODE (target) == MEM)
7356	    /* Store data into beginning of memory target.  */
7357	    store_expr (TREE_OPERAND (exp, 0),
7358			adjust_address (target, TYPE_MODE (valtype), 0), 0);
7359
7360	  else if (GET_CODE (target) == REG)
7361	    /* Store this field into a union of the proper type.  */
7362	    store_field (target,
7363			 MIN ((int_size_in_bytes (TREE_TYPE
7364						  (TREE_OPERAND (exp, 0)))
7365			       * BITS_PER_UNIT),
7366			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7367			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7368			 VOIDmode, 0, type, 0);
7369	  else
7370	    abort ();
7371
7372	  /* Return the entire union.  */
7373	  return target;
7374	}
7375
7376      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7377	{
7378	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7379			     modifier);
7380
7381	  /* If the signedness of the conversion differs and OP0 is
7382	     a promoted SUBREG, clear that indication since we now
7383	     have to do the proper extension.  */
7384	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7385	      && GET_CODE (op0) == SUBREG)
7386	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7387
7388	  return op0;
7389	}
7390
7391      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7392      if (GET_MODE (op0) == mode)
7393	return op0;
7394
7395      /* If OP0 is a constant, just convert it into the proper mode.  */
7396      if (CONSTANT_P (op0))
7397	{
7398	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7399	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7400
7401          if (modifier == EXPAND_INITIALIZER)
7402	    return simplify_gen_subreg (mode, op0, inner_mode,
7403					subreg_lowpart_offset (mode,
7404							       inner_mode));
7405	  else
7406	    return convert_modes (mode, inner_mode, op0,
7407				  TREE_UNSIGNED (inner_type));
7408	}
7409
7410      if (modifier == EXPAND_INITIALIZER)
7411	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7412
7413      if (target == 0)
7414	return
7415	  convert_to_mode (mode, op0,
7416			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7417      else
7418	convert_move (target, op0,
7419		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7420      return target;
7421
7422    case VIEW_CONVERT_EXPR:
7423      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7424
7425      /* If the input and output modes are both the same, we are done.
7426	 Otherwise, if neither mode is BLKmode and both are within a word, we
7427	 can use gen_lowpart.  If neither is true, make sure the operand is
7428	 in memory and convert the MEM to the new mode.  */
7429      if (TYPE_MODE (type) == GET_MODE (op0))
7430	;
7431      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7432	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7433	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7434	op0 = gen_lowpart (TYPE_MODE (type), op0);
7435      else if (GET_CODE (op0) != MEM)
7436	{
7437	  /* If the operand is not a MEM, force it into memory.  Since we
7438	     are going to be be changing the mode of the MEM, don't call
7439	     force_const_mem for constants because we don't allow pool
7440	     constants to change mode.  */
7441	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7442
7443	  if (TREE_ADDRESSABLE (exp))
7444	    abort ();
7445
7446	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7447	    target
7448	      = assign_stack_temp_for_type
7449		(TYPE_MODE (inner_type),
7450		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7451
7452	  emit_move_insn (target, op0);
7453	  op0 = target;
7454	}
7455
7456      /* At this point, OP0 is in the correct mode.  If the output type is such
7457	 that the operand is known to be aligned, indicate that it is.
7458	 Otherwise, we need only be concerned about alignment for non-BLKmode
7459	 results.  */
7460      if (GET_CODE (op0) == MEM)
7461	{
7462	  op0 = copy_rtx (op0);
7463
7464	  if (TYPE_ALIGN_OK (type))
7465	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7466	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7467		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7468	    {
7469	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7470	      HOST_WIDE_INT temp_size
7471		= MAX (int_size_in_bytes (inner_type),
7472		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7473	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7474						    temp_size, 0, type);
7475	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7476
7477	      if (TREE_ADDRESSABLE (exp))
7478		abort ();
7479
7480	      if (GET_MODE (op0) == BLKmode)
7481		emit_block_move (new_with_op0_mode, op0,
7482				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7483	      else
7484		emit_move_insn (new_with_op0_mode, op0);
7485
7486	      op0 = new;
7487	    }
7488
7489	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7490	}
7491
7492      return op0;
7493
7494    case PLUS_EXPR:
7495      /* We come here from MINUS_EXPR when the second operand is a
7496         constant.  */
7497    plus_expr:
7498      this_optab = ! unsignedp && flag_trapv
7499                   && (GET_MODE_CLASS (mode) == MODE_INT)
7500                   ? addv_optab : add_optab;
7501
7502      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7503	 something else, make sure we add the register to the constant and
7504	 then to the other thing.  This case can occur during strength
7505	 reduction and doing it this way will produce better code if the
7506	 frame pointer or argument pointer is eliminated.
7507
7508	 fold-const.c will ensure that the constant is always in the inner
7509	 PLUS_EXPR, so the only case we need to do anything about is if
7510	 sp, ap, or fp is our second argument, in which case we must swap
7511	 the innermost first argument and our second argument.  */
7512
7513      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7514	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7515	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7516	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7517	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7518	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7519	{
7520	  tree t = TREE_OPERAND (exp, 1);
7521
7522	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7523	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7524	}
7525
7526      /* If the result is to be ptr_mode and we are adding an integer to
7527	 something, we might be forming a constant.  So try to use
7528	 plus_constant.  If it produces a sum and we can't accept it,
7529	 use force_operand.  This allows P = &ARR[const] to generate
7530	 efficient code on machines where a SYMBOL_REF is not a valid
7531	 address.
7532
7533	 If this is an EXPAND_SUM call, always return the sum.  */
7534      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7535          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7536	{
7537	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7538	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7539	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7540	    {
7541	      rtx constant_part;
7542
7543	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7544				 EXPAND_SUM);
7545	      /* Use immed_double_const to ensure that the constant is
7546		 truncated according to the mode of OP1, then sign extended
7547		 to a HOST_WIDE_INT.  Using the constant directly can result
7548		 in non-canonical RTL in a 64x32 cross compile.  */
7549	      constant_part
7550		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7551				      (HOST_WIDE_INT) 0,
7552				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7553	      op1 = plus_constant (op1, INTVAL (constant_part));
7554	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7555		op1 = force_operand (op1, target);
7556	      return op1;
7557	    }
7558
7559	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7560		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7561		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7562	    {
7563	      rtx constant_part;
7564
7565	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7566				 (modifier == EXPAND_INITIALIZER
7567				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7568	      if (! CONSTANT_P (op0))
7569		{
7570		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7571				     VOIDmode, modifier);
7572		  /* Don't go to both_summands if modifier
7573		     says it's not right to return a PLUS.  */
7574		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7575		    goto binop2;
7576		  goto both_summands;
7577		}
7578	      /* Use immed_double_const to ensure that the constant is
7579		 truncated according to the mode of OP1, then sign extended
7580		 to a HOST_WIDE_INT.  Using the constant directly can result
7581		 in non-canonical RTL in a 64x32 cross compile.  */
7582	      constant_part
7583		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7584				      (HOST_WIDE_INT) 0,
7585				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7586	      op0 = plus_constant (op0, INTVAL (constant_part));
7587	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7588		op0 = force_operand (op0, target);
7589	      return op0;
7590	    }
7591	}
7592
7593      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7594	subtarget = 0;
7595
7596      /* No sense saving up arithmetic to be done
7597	 if it's all in the wrong mode to form part of an address.
7598	 And force_operand won't know whether to sign-extend or
7599	 zero-extend.  */
7600      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7601	  || mode != ptr_mode)
7602	{
7603	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7604	  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7605	  if (op0 == const0_rtx)
7606	    return op1;
7607	  if (op1 == const0_rtx)
7608	    return op0;
7609	  goto binop2;
7610	}
7611
7612      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7613      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7614
7615    both_summands:
7616      /* Make sure any term that's a sum with a constant comes last.  */
7617      if (GET_CODE (op0) == PLUS
7618	  && CONSTANT_P (XEXP (op0, 1)))
7619	{
7620	  temp = op0;
7621	  op0 = op1;
7622	  op1 = temp;
7623	}
7624      /* If adding to a sum including a constant,
7625	 associate it to put the constant outside.  */
7626      if (GET_CODE (op1) == PLUS
7627	  && CONSTANT_P (XEXP (op1, 1)))
7628	{
7629	  rtx constant_term = const0_rtx;
7630
7631	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7632	  if (temp != 0)
7633	    op0 = temp;
7634	  /* Ensure that MULT comes first if there is one.  */
7635	  else if (GET_CODE (op0) == MULT)
7636	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7637	  else
7638	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7639
7640	  /* Let's also eliminate constants from op0 if possible.  */
7641	  op0 = eliminate_constant_term (op0, &constant_term);
7642
7643	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7644	     their sum should be a constant.  Form it into OP1, since the
7645	     result we want will then be OP0 + OP1.  */
7646
7647	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7648					    XEXP (op1, 1));
7649	  if (temp != 0)
7650	    op1 = temp;
7651	  else
7652	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7653	}
7654
7655      /* Put a constant term last and put a multiplication first.  */
7656      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7657	temp = op1, op1 = op0, op0 = temp;
7658
7659      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7660      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7661
7662    case MINUS_EXPR:
7663      /* For initializers, we are allowed to return a MINUS of two
7664	 symbolic constants.  Here we handle all cases when both operands
7665	 are constant.  */
7666      /* Handle difference of two symbolic constants,
7667	 for the sake of an initializer.  */
7668      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7669	  && really_constant_p (TREE_OPERAND (exp, 0))
7670	  && really_constant_p (TREE_OPERAND (exp, 1)))
7671	{
7672	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7673				 modifier);
7674	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7675				 modifier);
7676
7677	  /* If the last operand is a CONST_INT, use plus_constant of
7678	     the negated constant.  Else make the MINUS.  */
7679	  if (GET_CODE (op1) == CONST_INT)
7680	    return plus_constant (op0, - INTVAL (op1));
7681	  else
7682	    return gen_rtx_MINUS (mode, op0, op1);
7683	}
7684      /* Convert A - const to A + (-const).  */
7685      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7686	{
7687	  tree negated = fold (build1 (NEGATE_EXPR, type,
7688				       TREE_OPERAND (exp, 1)));
7689
7690	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7691	    /* If we can't negate the constant in TYPE, leave it alone and
7692	       expand_binop will negate it for us.  We used to try to do it
7693	       here in the signed version of TYPE, but that doesn't work
7694	       on POINTER_TYPEs.  */;
7695	  else
7696	    {
7697	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7698	      goto plus_expr;
7699	    }
7700	}
7701      this_optab = ! unsignedp && flag_trapv
7702                   && (GET_MODE_CLASS(mode) == MODE_INT)
7703                   ? subv_optab : sub_optab;
7704      goto binop;
7705
7706    case MULT_EXPR:
7707      /* If first operand is constant, swap them.
7708	 Thus the following special case checks need only
7709	 check the second operand.  */
7710      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7711	{
7712	  tree t1 = TREE_OPERAND (exp, 0);
7713	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7714	  TREE_OPERAND (exp, 1) = t1;
7715	}
7716
7717      /* Attempt to return something suitable for generating an
7718	 indexed address, for machines that support that.  */
7719
7720      if (modifier == EXPAND_SUM && mode == ptr_mode
7721	  && host_integerp (TREE_OPERAND (exp, 1), 0))
7722	{
7723	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7724			     EXPAND_SUM);
7725
7726	  /* If we knew for certain that this is arithmetic for an array
7727	     reference, and we knew the bounds of the array, then we could
7728	     apply the distributive law across (PLUS X C) for constant C.
7729	     Without such knowledge, we risk overflowing the computation
7730	     when both X and C are large, but X+C isn't.  */
7731	  /* ??? Could perhaps special-case EXP being unsigned and C being
7732	     positive.  In that case we are certain that X+C is no smaller
7733	     than X and so the transformed expression will overflow iff the
7734	     original would have.  */
7735
7736	  if (GET_CODE (op0) != REG)
7737	    op0 = force_operand (op0, NULL_RTX);
7738	  if (GET_CODE (op0) != REG)
7739	    op0 = copy_to_mode_reg (mode, op0);
7740
7741	  return
7742	    gen_rtx_MULT (mode, op0,
7743			  GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7744	}
7745
7746      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7747	subtarget = 0;
7748
7749      /* Check for multiplying things that have been extended
7750	 from a narrower type.  If this machine supports multiplying
7751	 in that narrower type with a result in the desired type,
7752	 do it that way, and avoid the explicit type-conversion.  */
7753      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7754	  && TREE_CODE (type) == INTEGER_TYPE
7755	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7756	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7757	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7758	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7759				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7760	       /* Don't use a widening multiply if a shift will do.  */
7761	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7762		    > HOST_BITS_PER_WIDE_INT)
7763		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7764	      ||
7765	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7766	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7767		   ==
7768		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7769	       /* If both operands are extended, they must either both
7770		  be zero-extended or both be sign-extended.  */
7771	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7772		   ==
7773		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7774	{
7775	  enum machine_mode innermode
7776	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7777	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7778			? smul_widen_optab : umul_widen_optab);
7779	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7780			? umul_widen_optab : smul_widen_optab);
7781	  if (mode == GET_MODE_WIDER_MODE (innermode))
7782	    {
7783	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7784		{
7785		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7786				     NULL_RTX, VOIDmode, 0);
7787		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7788		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7789				       VOIDmode, 0);
7790		  else
7791		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7792				       NULL_RTX, VOIDmode, 0);
7793		  goto binop2;
7794		}
7795	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7796		       && innermode == word_mode)
7797		{
7798		  rtx htem;
7799		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7800				     NULL_RTX, VOIDmode, 0);
7801		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7802		    op1 = convert_modes (innermode, mode,
7803					 expand_expr (TREE_OPERAND (exp, 1),
7804						      NULL_RTX, VOIDmode, 0),
7805					 unsignedp);
7806		  else
7807		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7808				       NULL_RTX, VOIDmode, 0);
7809		  temp = expand_binop (mode, other_optab, op0, op1, target,
7810				       unsignedp, OPTAB_LIB_WIDEN);
7811		  htem = expand_mult_highpart_adjust (innermode,
7812						      gen_highpart (innermode, temp),
7813						      op0, op1,
7814						      gen_highpart (innermode, temp),
7815						      unsignedp);
7816		  emit_move_insn (gen_highpart (innermode, temp), htem);
7817		  return temp;
7818		}
7819	    }
7820	}
7821      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7822      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7823      return expand_mult (mode, op0, op1, target, unsignedp);
7824
7825    case TRUNC_DIV_EXPR:
7826    case FLOOR_DIV_EXPR:
7827    case CEIL_DIV_EXPR:
7828    case ROUND_DIV_EXPR:
7829    case EXACT_DIV_EXPR:
7830      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7831	subtarget = 0;
7832      /* Possible optimization: compute the dividend with EXPAND_SUM
7833	 then if the divisor is constant can optimize the case
7834	 where some terms of the dividend have coeffs divisible by it.  */
7835      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7836      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7837      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7838
7839    case RDIV_EXPR:
7840      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
7841         expensive divide.  If not, combine will rebuild the original
7842         computation.  */
7843      if (flag_unsafe_math_optimizations && optimize && !optimize_size
7844	  && TREE_CODE (type) == REAL_TYPE
7845	  && !real_onep (TREE_OPERAND (exp, 0)))
7846        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7847				   build (RDIV_EXPR, type,
7848					  build_real (type, dconst1),
7849					  TREE_OPERAND (exp, 1))),
7850			    target, tmode, unsignedp);
7851      this_optab = sdiv_optab;
7852      goto binop;
7853
7854    case TRUNC_MOD_EXPR:
7855    case FLOOR_MOD_EXPR:
7856    case CEIL_MOD_EXPR:
7857    case ROUND_MOD_EXPR:
7858      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7859	subtarget = 0;
7860      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7861      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7862      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7863
7864    case FIX_ROUND_EXPR:
7865    case FIX_FLOOR_EXPR:
7866    case FIX_CEIL_EXPR:
7867      abort ();			/* Not used for C.  */
7868
7869    case FIX_TRUNC_EXPR:
7870      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7871      if (target == 0)
7872	target = gen_reg_rtx (mode);
7873      expand_fix (target, op0, unsignedp);
7874      return target;
7875
7876    case FLOAT_EXPR:
7877      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7878      if (target == 0)
7879	target = gen_reg_rtx (mode);
7880      /* expand_float can't figure out what to do if FROM has VOIDmode.
7881	 So give it the correct mode.  With -O, cse will optimize this.  */
7882      if (GET_MODE (op0) == VOIDmode)
7883	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7884				op0);
7885      expand_float (target, op0,
7886		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7887      return target;
7888
7889    case NEGATE_EXPR:
7890      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7891      temp = expand_unop (mode,
7892                          ! unsignedp && flag_trapv
7893                          && (GET_MODE_CLASS(mode) == MODE_INT)
7894                          ? negv_optab : neg_optab, op0, target, 0);
7895      if (temp == 0)
7896	abort ();
7897      return temp;
7898
7899    case ABS_EXPR:
7900      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7901
7902      /* Handle complex values specially.  */
7903      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7904	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7905	return expand_complex_abs (mode, op0, target, unsignedp);
7906
7907      /* Unsigned abs is simply the operand.  Testing here means we don't
7908	 risk generating incorrect code below.  */
7909      if (TREE_UNSIGNED (type))
7910	return op0;
7911
7912      return expand_abs (mode, op0, target, unsignedp,
7913			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7914
7915    case MAX_EXPR:
7916    case MIN_EXPR:
7917      target = original_target;
7918      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7919	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7920	  || GET_MODE (target) != mode
7921	  || (GET_CODE (target) == REG
7922	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7923	target = gen_reg_rtx (mode);
7924      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7925      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7926
7927      /* First try to do it with a special MIN or MAX instruction.
7928	 If that does not win, use a conditional jump to select the proper
7929	 value.  */
7930      this_optab = (TREE_UNSIGNED (type)
7931		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7932		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7933
7934      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7935			   OPTAB_WIDEN);
7936      if (temp != 0)
7937	return temp;
7938
7939      /* At this point, a MEM target is no longer useful; we will get better
7940	 code without it.  */
7941
7942      if (GET_CODE (target) == MEM)
7943	target = gen_reg_rtx (mode);
7944
7945      if (target != op0)
7946	emit_move_insn (target, op0);
7947
7948      op0 = gen_label_rtx ();
7949
7950      /* If this mode is an integer too wide to compare properly,
7951	 compare word by word.  Rely on cse to optimize constant cases.  */
7952      if (GET_MODE_CLASS (mode) == MODE_INT
7953	  && ! can_compare_p (GE, mode, ccp_jump))
7954	{
7955	  if (code == MAX_EXPR)
7956	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7957					  target, op1, NULL_RTX, op0);
7958	  else
7959	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7960					  op1, target, NULL_RTX, op0);
7961	}
7962      else
7963	{
7964	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7965	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7966				   unsignedp, mode, NULL_RTX, NULL_RTX,
7967				   op0);
7968	}
7969      emit_move_insn (target, op1);
7970      emit_label (op0);
7971      return target;
7972
7973    case BIT_NOT_EXPR:
7974      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7975      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7976      if (temp == 0)
7977	abort ();
7978      return temp;
7979
7980    case FFS_EXPR:
7981      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7982      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7983      if (temp == 0)
7984	abort ();
7985      return temp;
7986
7987      /* ??? Can optimize bitwise operations with one arg constant.
7988	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7989	 and (a bitwise1 b) bitwise2 b (etc)
7990	 but that is probably not worth while.  */
7991
7992      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7993	 boolean values when we want in all cases to compute both of them.  In
7994	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7995	 as actual zero-or-1 values and then bitwise anding.  In cases where
7996	 there cannot be any side effects, better code would be made by
7997	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7998	 how to recognize those cases.  */
7999
8000    case TRUTH_AND_EXPR:
8001    case BIT_AND_EXPR:
8002      this_optab = and_optab;
8003      goto binop;
8004
8005    case TRUTH_OR_EXPR:
8006    case BIT_IOR_EXPR:
8007      this_optab = ior_optab;
8008      goto binop;
8009
8010    case TRUTH_XOR_EXPR:
8011    case BIT_XOR_EXPR:
8012      this_optab = xor_optab;
8013      goto binop;
8014
8015    case LSHIFT_EXPR:
8016    case RSHIFT_EXPR:
8017    case LROTATE_EXPR:
8018    case RROTATE_EXPR:
8019      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8020	subtarget = 0;
8021      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8022      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8023			   unsignedp);
8024
8025      /* Could determine the answer when only additive constants differ.  Also,
8026	 the addition of one can be handled by changing the condition.  */
8027    case LT_EXPR:
8028    case LE_EXPR:
8029    case GT_EXPR:
8030    case GE_EXPR:
8031    case EQ_EXPR:
8032    case NE_EXPR:
8033    case UNORDERED_EXPR:
8034    case ORDERED_EXPR:
8035    case UNLT_EXPR:
8036    case UNLE_EXPR:
8037    case UNGT_EXPR:
8038    case UNGE_EXPR:
8039    case UNEQ_EXPR:
8040      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8041      if (temp != 0)
8042	return temp;
8043
8044      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8045      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8046	  && original_target
8047	  && GET_CODE (original_target) == REG
8048	  && (GET_MODE (original_target)
8049	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8050	{
8051	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8052			      VOIDmode, 0);
8053
8054	  /* If temp is constant, we can just compute the result.  */
8055	  if (GET_CODE (temp) == CONST_INT)
8056	    {
8057	      if (INTVAL (temp) != 0)
8058	        emit_move_insn (target, const1_rtx);
8059	      else
8060	        emit_move_insn (target, const0_rtx);
8061
8062	      return target;
8063	    }
8064
8065	  if (temp != original_target)
8066	    {
8067	      enum machine_mode mode1 = GET_MODE (temp);
8068	      if (mode1 == VOIDmode)
8069		mode1 = tmode != VOIDmode ? tmode : mode;
8070
8071	      temp = copy_to_mode_reg (mode1, temp);
8072	    }
8073
8074	  op1 = gen_label_rtx ();
8075	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8076				   GET_MODE (temp), unsignedp, op1);
8077	  emit_move_insn (temp, const1_rtx);
8078	  emit_label (op1);
8079	  return temp;
8080	}
8081
8082      /* If no set-flag instruction, must generate a conditional
8083	 store into a temporary variable.  Drop through
8084	 and handle this like && and ||.  */
8085
8086    case TRUTH_ANDIF_EXPR:
8087    case TRUTH_ORIF_EXPR:
8088      if (! ignore
8089	  && (target == 0 || ! safe_from_p (target, exp, 1)
8090	      /* Make sure we don't have a hard reg (such as function's return
8091		 value) live across basic blocks, if not optimizing.  */
8092	      || (!optimize && GET_CODE (target) == REG
8093		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8094	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8095
8096      if (target)
8097	emit_clr_insn (target);
8098
8099      op1 = gen_label_rtx ();
8100      jumpifnot (exp, op1);
8101
8102      if (target)
8103	emit_0_to_1_insn (target);
8104
8105      emit_label (op1);
8106      return ignore ? const0_rtx : target;
8107
8108    case TRUTH_NOT_EXPR:
8109      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8110      /* The parser is careful to generate TRUTH_NOT_EXPR
8111	 only with operands that are always zero or one.  */
8112      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8113			   target, 1, OPTAB_LIB_WIDEN);
8114      if (temp == 0)
8115	abort ();
8116      return temp;
8117
8118    case COMPOUND_EXPR:
8119      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8120      emit_queue ();
8121      return expand_expr (TREE_OPERAND (exp, 1),
8122			  (ignore ? const0_rtx : target),
8123			  VOIDmode, 0);
8124
8125    case COND_EXPR:
8126      /* If we would have a "singleton" (see below) were it not for a
8127	 conversion in each arm, bring that conversion back out.  */
8128      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8129	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8130	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8131	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8132	{
8133	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8134	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8135
8136	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8137	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8138	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8139		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8140	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8141		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8142	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8143		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8144	    return expand_expr (build1 (NOP_EXPR, type,
8145					build (COND_EXPR, TREE_TYPE (iftrue),
8146					       TREE_OPERAND (exp, 0),
8147					       iftrue, iffalse)),
8148				target, tmode, modifier);
8149	}
8150
8151      {
8152	/* Note that COND_EXPRs whose type is a structure or union
8153	   are required to be constructed to contain assignments of
8154	   a temporary variable, so that we can evaluate them here
8155	   for side effect only.  If type is void, we must do likewise.  */
8156
8157	/* If an arm of the branch requires a cleanup,
8158	   only that cleanup is performed.  */
8159
8160	tree singleton = 0;
8161	tree binary_op = 0, unary_op = 0;
8162
8163	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8164	   convert it to our mode, if necessary.  */
8165	if (integer_onep (TREE_OPERAND (exp, 1))
8166	    && integer_zerop (TREE_OPERAND (exp, 2))
8167	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8168	  {
8169	    if (ignore)
8170	      {
8171		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8172			     modifier);
8173		return const0_rtx;
8174	      }
8175
8176	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8177	    if (GET_MODE (op0) == mode)
8178	      return op0;
8179
8180	    if (target == 0)
8181	      target = gen_reg_rtx (mode);
8182	    convert_move (target, op0, unsignedp);
8183	    return target;
8184	  }
8185
8186	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8187	   output and conditionally add B.  Similarly for unary operations.
8188	   Don't do this if X has side-effects because those side effects
8189	   might affect A or B and the "?" operation is a sequence point in
8190	   ANSI.  (operand_equal_p tests for side effects.)  */
8191
8192	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8193	    && operand_equal_p (TREE_OPERAND (exp, 2),
8194				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8195	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8196	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8197		 && operand_equal_p (TREE_OPERAND (exp, 1),
8198				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8199	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8200	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8201		 && operand_equal_p (TREE_OPERAND (exp, 2),
8202				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8203	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8204	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8205		 && operand_equal_p (TREE_OPERAND (exp, 1),
8206				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8207	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8208
8209	/* If we are not to produce a result, we have no target.  Otherwise,
8210	   if a target was specified use it; it will not be used as an
8211	   intermediate target unless it is safe.  If no target, use a
8212	   temporary.  */
8213
8214	if (ignore)
8215	  temp = 0;
8216	else if (original_target
8217		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8218		     || (singleton && GET_CODE (original_target) == REG
8219			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8220			 && original_target == var_rtx (singleton)))
8221		 && GET_MODE (original_target) == mode
8222#ifdef HAVE_conditional_move
8223		 && (! can_conditionally_move_p (mode)
8224		     || GET_CODE (original_target) == REG
8225		     || TREE_ADDRESSABLE (type))
8226#endif
8227		 && (GET_CODE (original_target) != MEM
8228		     || TREE_ADDRESSABLE (type)))
8229	  temp = original_target;
8230	else if (TREE_ADDRESSABLE (type))
8231	  abort ();
8232	else
8233	  temp = assign_temp (type, 0, 0, 1);
8234
8235	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8236	   do the test of X as a store-flag operation, do this as
8237	   A + ((X != 0) << log C).  Similarly for other simple binary
8238	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8239	if (temp && singleton && binary_op
8240	    && (TREE_CODE (binary_op) == PLUS_EXPR
8241		|| TREE_CODE (binary_op) == MINUS_EXPR
8242		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8243		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8244	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8245		: integer_onep (TREE_OPERAND (binary_op, 1)))
8246	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8247	  {
8248	    rtx result;
8249	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8250                            ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8251                               ? addv_optab : add_optab)
8252                            : TREE_CODE (binary_op) == MINUS_EXPR
8253                              ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8254                                 ? subv_optab : sub_optab)
8255                            : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8256                            : xor_optab);
8257
8258	    /* If we had X ? A : A + 1, do this as A + (X == 0).
8259
8260	       We have to invert the truth value here and then put it
8261	       back later if do_store_flag fails.  We cannot simply copy
8262	       TREE_OPERAND (exp, 0) to another variable and modify that
8263	       because invert_truthvalue can modify the tree pointed to
8264	       by its argument.  */
8265	    if (singleton == TREE_OPERAND (exp, 1))
8266	      TREE_OPERAND (exp, 0)
8267		= invert_truthvalue (TREE_OPERAND (exp, 0));
8268
8269	    result = do_store_flag (TREE_OPERAND (exp, 0),
8270				    (safe_from_p (temp, singleton, 1)
8271				     ? temp : NULL_RTX),
8272				    mode, BRANCH_COST <= 1);
8273
8274	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8275	      result = expand_shift (LSHIFT_EXPR, mode, result,
8276				     build_int_2 (tree_log2
8277						  (TREE_OPERAND
8278						   (binary_op, 1)),
8279						  0),
8280				     (safe_from_p (temp, singleton, 1)
8281				      ? temp : NULL_RTX), 0);
8282
8283	    if (result)
8284	      {
8285		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8286		return expand_binop (mode, boptab, op1, result, temp,
8287				     unsignedp, OPTAB_LIB_WIDEN);
8288	      }
8289	    else if (singleton == TREE_OPERAND (exp, 1))
8290	      TREE_OPERAND (exp, 0)
8291		= invert_truthvalue (TREE_OPERAND (exp, 0));
8292	  }
8293
8294	do_pending_stack_adjust ();
8295	NO_DEFER_POP;
8296	op0 = gen_label_rtx ();
8297
8298	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8299	  {
8300	    if (temp != 0)
8301	      {
8302		/* If the target conflicts with the other operand of the
8303		   binary op, we can't use it.  Also, we can't use the target
8304		   if it is a hard register, because evaluating the condition
8305		   might clobber it.  */
8306		if ((binary_op
8307		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8308		    || (GET_CODE (temp) == REG
8309			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8310		  temp = gen_reg_rtx (mode);
8311		store_expr (singleton, temp, 0);
8312	      }
8313	    else
8314	      expand_expr (singleton,
8315			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8316	    if (singleton == TREE_OPERAND (exp, 1))
8317	      jumpif (TREE_OPERAND (exp, 0), op0);
8318	    else
8319	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8320
8321	    start_cleanup_deferral ();
8322	    if (binary_op && temp == 0)
8323	      /* Just touch the other operand.  */
8324	      expand_expr (TREE_OPERAND (binary_op, 1),
8325			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8326	    else if (binary_op)
8327	      store_expr (build (TREE_CODE (binary_op), type,
8328				 make_tree (type, temp),
8329				 TREE_OPERAND (binary_op, 1)),
8330			  temp, 0);
8331	    else
8332	      store_expr (build1 (TREE_CODE (unary_op), type,
8333				  make_tree (type, temp)),
8334			  temp, 0);
8335	    op1 = op0;
8336	  }
8337	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8338	   comparison operator.  If we have one of these cases, set the
8339	   output to A, branch on A (cse will merge these two references),
8340	   then set the output to FOO.  */
8341	else if (temp
8342		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8343		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8344		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8345				     TREE_OPERAND (exp, 1), 0)
8346		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8347		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8348		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8349	  {
8350	    if (GET_CODE (temp) == REG
8351		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8352	      temp = gen_reg_rtx (mode);
8353	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8354	    jumpif (TREE_OPERAND (exp, 0), op0);
8355
8356	    start_cleanup_deferral ();
8357	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8358	    op1 = op0;
8359	  }
8360	else if (temp
8361		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8362		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8363		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8364				     TREE_OPERAND (exp, 2), 0)
8365		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8366		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8367		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8368	  {
8369	    if (GET_CODE (temp) == REG
8370		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8371	      temp = gen_reg_rtx (mode);
8372	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8373	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8374
8375	    start_cleanup_deferral ();
8376	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8377	    op1 = op0;
8378	  }
8379	else
8380	  {
8381	    op1 = gen_label_rtx ();
8382	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8383
8384	    start_cleanup_deferral ();
8385
8386	    /* One branch of the cond can be void, if it never returns. For
8387	       example A ? throw : E  */
8388	    if (temp != 0
8389		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8390	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
8391	    else
8392	      expand_expr (TREE_OPERAND (exp, 1),
8393			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8394	    end_cleanup_deferral ();
8395	    emit_queue ();
8396	    emit_jump_insn (gen_jump (op1));
8397	    emit_barrier ();
8398	    emit_label (op0);
8399	    start_cleanup_deferral ();
8400	    if (temp != 0
8401		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8402	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
8403	    else
8404	      expand_expr (TREE_OPERAND (exp, 2),
8405			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8406	  }
8407
8408	end_cleanup_deferral ();
8409
8410	emit_queue ();
8411	emit_label (op1);
8412	OK_DEFER_POP;
8413
8414	return temp;
8415      }
8416
8417    case TARGET_EXPR:
8418      {
8419	/* Something needs to be initialized, but we didn't know
8420	   where that thing was when building the tree.  For example,
8421	   it could be the return value of a function, or a parameter
8422	   to a function which lays down in the stack, or a temporary
8423	   variable which must be passed by reference.
8424
8425	   We guarantee that the expression will either be constructed
8426	   or copied into our original target.  */
8427
8428	tree slot = TREE_OPERAND (exp, 0);
8429	tree cleanups = NULL_TREE;
8430	tree exp1;
8431
8432	if (TREE_CODE (slot) != VAR_DECL)
8433	  abort ();
8434
8435	if (! ignore)
8436	  target = original_target;
8437
8438	/* Set this here so that if we get a target that refers to a
8439	   register variable that's already been used, put_reg_into_stack
8440	   knows that it should fix up those uses.  */
8441	TREE_USED (slot) = 1;
8442
8443	if (target == 0)
8444	  {
8445	    if (DECL_RTL_SET_P (slot))
8446	      {
8447		target = DECL_RTL (slot);
8448		/* If we have already expanded the slot, so don't do
8449		   it again.  (mrs)  */
8450		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8451		  return target;
8452	      }
8453	    else
8454	      {
8455		target = assign_temp (type, 2, 0, 1);
8456		/* All temp slots at this level must not conflict.  */
8457		preserve_temp_slots (target);
8458		SET_DECL_RTL (slot, target);
8459		if (TREE_ADDRESSABLE (slot))
8460		  put_var_into_stack (slot);
8461
8462		/* Since SLOT is not known to the called function
8463		   to belong to its stack frame, we must build an explicit
8464		   cleanup.  This case occurs when we must build up a reference
8465		   to pass the reference as an argument.  In this case,
8466		   it is very likely that such a reference need not be
8467		   built here.  */
8468
8469		if (TREE_OPERAND (exp, 2) == 0)
8470		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8471		cleanups = TREE_OPERAND (exp, 2);
8472	      }
8473	  }
8474	else
8475	  {
8476	    /* This case does occur, when expanding a parameter which
8477	       needs to be constructed on the stack.  The target
8478	       is the actual stack address that we want to initialize.
8479	       The function we call will perform the cleanup in this case.  */
8480
8481	    /* If we have already assigned it space, use that space,
8482	       not target that we were passed in, as our target
8483	       parameter is only a hint.  */
8484	    if (DECL_RTL_SET_P (slot))
8485	      {
8486		target = DECL_RTL (slot);
8487		/* If we have already expanded the slot, so don't do
8488                   it again.  (mrs)  */
8489		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8490		  return target;
8491	      }
8492	    else
8493	      {
8494		SET_DECL_RTL (slot, target);
8495		/* If we must have an addressable slot, then make sure that
8496		   the RTL that we just stored in slot is OK.  */
8497		if (TREE_ADDRESSABLE (slot))
8498		  put_var_into_stack (slot);
8499	      }
8500	  }
8501
8502	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8503	/* Mark it as expanded.  */
8504	TREE_OPERAND (exp, 1) = NULL_TREE;
8505
8506	store_expr (exp1, target, 0);
8507
8508	expand_decl_cleanup (NULL_TREE, cleanups);
8509
8510	return target;
8511      }
8512
8513    case INIT_EXPR:
8514      {
8515	tree lhs = TREE_OPERAND (exp, 0);
8516	tree rhs = TREE_OPERAND (exp, 1);
8517
8518	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8519	return temp;
8520      }
8521
8522    case MODIFY_EXPR:
8523      {
8524	/* If lhs is complex, expand calls in rhs before computing it.
8525	   That's so we don't compute a pointer and save it over a
8526	   call.  If lhs is simple, compute it first so we can give it
8527	   as a target if the rhs is just a call.  This avoids an
8528	   extra temp and copy and that prevents a partial-subsumption
8529	   which makes bad code.  Actually we could treat
8530	   component_ref's of vars like vars.  */
8531
8532	tree lhs = TREE_OPERAND (exp, 0);
8533	tree rhs = TREE_OPERAND (exp, 1);
8534
8535	temp = 0;
8536
8537	/* Check for |= or &= of a bitfield of size one into another bitfield
8538	   of size 1.  In this case, (unless we need the result of the
8539	   assignment) we can do this more efficiently with a
8540	   test followed by an assignment, if necessary.
8541
8542	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8543	   things change so we do, this code should be enhanced to
8544	   support it.  */
8545	if (ignore
8546	    && TREE_CODE (lhs) == COMPONENT_REF
8547	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8548		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8549	    && TREE_OPERAND (rhs, 0) == lhs
8550	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8551	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8552	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8553	  {
8554	    rtx label = gen_label_rtx ();
8555
8556	    do_jump (TREE_OPERAND (rhs, 1),
8557		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8558		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8559	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8560					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8561					      ? integer_one_node
8562					      : integer_zero_node)),
8563			       0, 0);
8564	    do_pending_stack_adjust ();
8565	    emit_label (label);
8566	    return const0_rtx;
8567	  }
8568
8569	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8570
8571	return temp;
8572      }
8573
8574    case RETURN_EXPR:
8575      if (!TREE_OPERAND (exp, 0))
8576	expand_null_return ();
8577      else
8578	expand_return (TREE_OPERAND (exp, 0));
8579      return const0_rtx;
8580
8581    case PREINCREMENT_EXPR:
8582    case PREDECREMENT_EXPR:
8583      return expand_increment (exp, 0, ignore);
8584
8585    case POSTINCREMENT_EXPR:
8586    case POSTDECREMENT_EXPR:
8587      /* Faster to treat as pre-increment if result is not used.  */
8588      return expand_increment (exp, ! ignore, ignore);
8589
8590    case ADDR_EXPR:
8591      /* Are we taking the address of a nested function?  */
8592      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8593	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8594	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8595	  && ! TREE_STATIC (exp))
8596	{
8597	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8598	  op0 = force_operand (op0, target);
8599	}
8600      /* If we are taking the address of something erroneous, just
8601	 return a zero.  */
8602      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8603	return const0_rtx;
8604      /* If we are taking the address of a constant and are at the
8605	 top level, we have to use output_constant_def since we can't
8606	 call force_const_mem at top level.  */
8607      else if (cfun == 0
8608	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8609		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8610		       == 'c')))
8611	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8612      else
8613	{
8614	  /* We make sure to pass const0_rtx down if we came in with
8615	     ignore set, to avoid doing the cleanups twice for something.  */
8616	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8617			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8618			     (modifier == EXPAND_INITIALIZER
8619			      ? modifier : EXPAND_CONST_ADDRESS));
8620
8621	  /* If we are going to ignore the result, OP0 will have been set
8622	     to const0_rtx, so just return it.  Don't get confused and
8623	     think we are taking the address of the constant.  */
8624	  if (ignore)
8625	    return op0;
8626
8627	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8628	     clever and returns a REG when given a MEM.  */
8629	  op0 = protect_from_queue (op0, 1);
8630
8631	  /* We would like the object in memory.  If it is a constant, we can
8632	     have it be statically allocated into memory.  For a non-constant,
8633	     we need to allocate some memory and store the value into it.  */
8634
8635	  if (CONSTANT_P (op0))
8636	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8637				   op0);
8638	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8639		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8640		   || GET_CODE (op0) == PARALLEL)
8641	    {
8642	      /* If the operand is a SAVE_EXPR, we can deal with this by
8643		 forcing the SAVE_EXPR into memory.  */
8644	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8645		{
8646		  put_var_into_stack (TREE_OPERAND (exp, 0));
8647		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8648		}
8649	      else
8650		{
8651		  /* If this object is in a register, it can't be BLKmode.  */
8652		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8653		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
8654
8655		  if (GET_CODE (op0) == PARALLEL)
8656		    /* Handle calls that pass values in multiple
8657		       non-contiguous locations.  The Irix 6 ABI has examples
8658		       of this.  */
8659		    emit_group_store (memloc, op0,
8660				      int_size_in_bytes (inner_type));
8661		  else
8662		    emit_move_insn (memloc, op0);
8663
8664		  op0 = memloc;
8665		}
8666	    }
8667
8668	  if (GET_CODE (op0) != MEM)
8669	    abort ();
8670
8671	  mark_temp_addr_taken (op0);
8672	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8673	    {
8674	      op0 = XEXP (op0, 0);
8675#ifdef POINTERS_EXTEND_UNSIGNED
8676	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8677		  && mode == ptr_mode)
8678		op0 = convert_memory_address (ptr_mode, op0);
8679#endif
8680	      return op0;
8681	    }
8682
8683	  /* If OP0 is not aligned as least as much as the type requires, we
8684	     need to make a temporary, copy OP0 to it, and take the address of
8685	     the temporary.  We want to use the alignment of the type, not of
8686	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
8687	     the test for BLKmode means that can't happen.  The test for
8688	     BLKmode is because we never make mis-aligned MEMs with
8689	     non-BLKmode.
8690
8691	     We don't need to do this at all if the machine doesn't have
8692	     strict alignment.  */
8693	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8694	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8695		  > MEM_ALIGN (op0))
8696	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8697	    {
8698	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8699	      rtx new
8700		= assign_stack_temp_for_type
8701		  (TYPE_MODE (inner_type),
8702		   MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8703		   : int_size_in_bytes (inner_type),
8704		   1, build_qualified_type (inner_type,
8705					    (TYPE_QUALS (inner_type)
8706					     | TYPE_QUAL_CONST)));
8707
8708	      if (TYPE_ALIGN_OK (inner_type))
8709		abort ();
8710
8711	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8712	      op0 = new;
8713	    }
8714
8715	  op0 = force_operand (XEXP (op0, 0), target);
8716	}
8717
8718      if (flag_force_addr
8719	  && GET_CODE (op0) != REG
8720	  && modifier != EXPAND_CONST_ADDRESS
8721	  && modifier != EXPAND_INITIALIZER
8722	  && modifier != EXPAND_SUM)
8723	op0 = force_reg (Pmode, op0);
8724
8725      if (GET_CODE (op0) == REG
8726	  && ! REG_USERVAR_P (op0))
8727	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8728
8729#ifdef POINTERS_EXTEND_UNSIGNED
8730      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8731	  && mode == ptr_mode)
8732	op0 = convert_memory_address (ptr_mode, op0);
8733#endif
8734
8735      return op0;
8736
8737    case ENTRY_VALUE_EXPR:
8738      abort ();
8739
8740    /* COMPLEX type for Extended Pascal & Fortran  */
8741    case COMPLEX_EXPR:
8742      {
8743	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8744	rtx insns;
8745
8746	/* Get the rtx code of the operands.  */
8747	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8748	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8749
8750	if (! target)
8751	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8752
8753	start_sequence ();
8754
8755	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8756	emit_move_insn (gen_realpart (mode, target), op0);
8757	emit_move_insn (gen_imagpart (mode, target), op1);
8758
8759	insns = get_insns ();
8760	end_sequence ();
8761
8762	/* Complex construction should appear as a single unit.  */
8763	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8764	   each with a separate pseudo as destination.
8765	   It's not correct for flow to treat them as a unit.  */
8766	if (GET_CODE (target) != CONCAT)
8767	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8768	else
8769	  emit_insns (insns);
8770
8771	return target;
8772      }
8773
8774    case REALPART_EXPR:
8775      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8776      return gen_realpart (mode, op0);
8777
8778    case IMAGPART_EXPR:
8779      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8780      return gen_imagpart (mode, op0);
8781
8782    case CONJ_EXPR:
8783      {
8784	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8785	rtx imag_t;
8786	rtx insns;
8787
8788	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8789
8790	if (! target)
8791	  target = gen_reg_rtx (mode);
8792
8793	start_sequence ();
8794
8795	/* Store the realpart and the negated imagpart to target.  */
8796	emit_move_insn (gen_realpart (partmode, target),
8797			gen_realpart (partmode, op0));
8798
8799	imag_t = gen_imagpart (partmode, target);
8800	temp = expand_unop (partmode,
8801                            ! unsignedp && flag_trapv
8802                            && (GET_MODE_CLASS(partmode) == MODE_INT)
8803                            ? negv_optab : neg_optab,
8804			    gen_imagpart (partmode, op0), imag_t, 0);
8805	if (temp != imag_t)
8806	  emit_move_insn (imag_t, temp);
8807
8808	insns = get_insns ();
8809	end_sequence ();
8810
8811	/* Conjugate should appear as a single unit
8812	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8813	   each with a separate pseudo as destination.
8814	   It's not correct for flow to treat them as a unit.  */
8815	if (GET_CODE (target) != CONCAT)
8816	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8817	else
8818	  emit_insns (insns);
8819
8820	return target;
8821      }
8822
8823    case TRY_CATCH_EXPR:
8824      {
8825	tree handler = TREE_OPERAND (exp, 1);
8826
8827	expand_eh_region_start ();
8828
8829	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8830
8831	expand_eh_region_end_cleanup (handler);
8832
8833	return op0;
8834      }
8835
8836    case TRY_FINALLY_EXPR:
8837      {
8838	tree try_block = TREE_OPERAND (exp, 0);
8839	tree finally_block = TREE_OPERAND (exp, 1);
8840	rtx finally_label = gen_label_rtx ();
8841	rtx done_label = gen_label_rtx ();
8842	rtx return_link = gen_reg_rtx (Pmode);
8843	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8844			      (tree) finally_label, (tree) return_link);
8845	TREE_SIDE_EFFECTS (cleanup) = 1;
8846
8847	/* Start a new binding layer that will keep track of all cleanup
8848	   actions to be performed.  */
8849	expand_start_bindings (2);
8850
8851	target_temp_slot_level = temp_slot_level;
8852
8853	expand_decl_cleanup (NULL_TREE, cleanup);
8854	op0 = expand_expr (try_block, target, tmode, modifier);
8855
8856	preserve_temp_slots (op0);
8857	expand_end_bindings (NULL_TREE, 0, 0);
8858	emit_jump (done_label);
8859	emit_label (finally_label);
8860	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8861	emit_indirect_jump (return_link);
8862	emit_label (done_label);
8863	return op0;
8864      }
8865
8866    case GOTO_SUBROUTINE_EXPR:
8867      {
8868	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8869	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8870	rtx return_address = gen_label_rtx ();
8871	emit_move_insn (return_link,
8872			gen_rtx_LABEL_REF (Pmode, return_address));
8873	emit_jump (subr);
8874	emit_label (return_address);
8875	return const0_rtx;
8876      }
8877
8878    case VA_ARG_EXPR:
8879      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8880
8881    case EXC_PTR_EXPR:
8882      return get_exception_pointer (cfun);
8883
8884    case FDESC_EXPR:
8885      /* Function descriptors are not valid except for as
8886	 initialization constants, and should not be expanded.  */
8887      abort ();
8888
8889    default:
8890      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8891    }
8892
8893  /* Here to do an ordinary binary operator, generating an instruction
8894     from the optab already placed in `this_optab'.  */
8895 binop:
8896  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8897    subtarget = 0;
8898  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8899  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8900 binop2:
8901  temp = expand_binop (mode, this_optab, op0, op1, target,
8902		       unsignedp, OPTAB_LIB_WIDEN);
8903  if (temp == 0)
8904    abort ();
8905  return temp;
8906}
8907
8908/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8909   when applied to the address of EXP produces an address known to be
8910   aligned more than BIGGEST_ALIGNMENT.  */
8911
8912static int
8913is_aligning_offset (offset, exp)
8914     tree offset;
8915     tree exp;
8916{
8917  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
8918  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8919	 || TREE_CODE (offset) == NOP_EXPR
8920	 || TREE_CODE (offset) == CONVERT_EXPR
8921	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
8922    offset = TREE_OPERAND (offset, 0);
8923
8924  /* We must now have a BIT_AND_EXPR with a constant that is one less than
8925     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8926  if (TREE_CODE (offset) != BIT_AND_EXPR
8927      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8928      || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
8929      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8930    return 0;
8931
8932  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8933     It must be NEGATE_EXPR.  Then strip any more conversions.  */
8934  offset = TREE_OPERAND (offset, 0);
8935  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8936	 || TREE_CODE (offset) == NOP_EXPR
8937	 || TREE_CODE (offset) == CONVERT_EXPR)
8938    offset = TREE_OPERAND (offset, 0);
8939
8940  if (TREE_CODE (offset) != NEGATE_EXPR)
8941    return 0;
8942
8943  offset = TREE_OPERAND (offset, 0);
8944  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8945	 || TREE_CODE (offset) == NOP_EXPR
8946	 || TREE_CODE (offset) == CONVERT_EXPR)
8947    offset = TREE_OPERAND (offset, 0);
8948
8949  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8950     whose type is the same as EXP.  */
8951  return (TREE_CODE (offset) == ADDR_EXPR
8952	  && (TREE_OPERAND (offset, 0) == exp
8953	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
8954		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
8955		      == TREE_TYPE (exp)))));
8956}
8957
8958/* Return the tree node if a ARG corresponds to a string constant or zero
8959   if it doesn't.  If we return non-zero, set *PTR_OFFSET to the offset
8960   in bytes within the string that ARG is accessing.  The type of the
8961   offset will be `sizetype'.  */
8962
8963tree
8964string_constant (arg, ptr_offset)
8965     tree arg;
8966     tree *ptr_offset;
8967{
8968  STRIP_NOPS (arg);
8969
8970  if (TREE_CODE (arg) == ADDR_EXPR
8971      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8972    {
8973      *ptr_offset = size_zero_node;
8974      return TREE_OPERAND (arg, 0);
8975    }
8976  else if (TREE_CODE (arg) == PLUS_EXPR)
8977    {
8978      tree arg0 = TREE_OPERAND (arg, 0);
8979      tree arg1 = TREE_OPERAND (arg, 1);
8980
8981      STRIP_NOPS (arg0);
8982      STRIP_NOPS (arg1);
8983
8984      if (TREE_CODE (arg0) == ADDR_EXPR
8985	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8986	{
8987	  *ptr_offset = convert (sizetype, arg1);
8988	  return TREE_OPERAND (arg0, 0);
8989	}
8990      else if (TREE_CODE (arg1) == ADDR_EXPR
8991	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8992	{
8993	  *ptr_offset = convert (sizetype, arg0);
8994	  return TREE_OPERAND (arg1, 0);
8995	}
8996    }
8997
8998  return 0;
8999}
9000
9001/* Expand code for a post- or pre- increment or decrement
9002   and return the RTX for the result.
9003   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
9004
9005static rtx
9006expand_increment (exp, post, ignore)
9007     tree exp;
9008     int post, ignore;
9009{
9010  rtx op0, op1;
9011  rtx temp, value;
9012  tree incremented = TREE_OPERAND (exp, 0);
9013  optab this_optab = add_optab;
9014  int icode;
9015  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9016  int op0_is_copy = 0;
9017  int single_insn = 0;
9018  /* 1 means we can't store into OP0 directly,
9019     because it is a subreg narrower than a word,
9020     and we don't dare clobber the rest of the word.  */
9021  int bad_subreg = 0;
9022
9023  /* Stabilize any component ref that might need to be
9024     evaluated more than once below.  */
9025  if (!post
9026      || TREE_CODE (incremented) == BIT_FIELD_REF
9027      || (TREE_CODE (incremented) == COMPONENT_REF
9028	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9029	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9030    incremented = stabilize_reference (incremented);
9031  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
9032     ones into save exprs so that they don't accidentally get evaluated
9033     more than once by the code below.  */
9034  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9035      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9036    incremented = save_expr (incremented);
9037
9038  /* Compute the operands as RTX.
9039     Note whether OP0 is the actual lvalue or a copy of it:
9040     I believe it is a copy iff it is a register or subreg
9041     and insns were generated in computing it.  */
9042
9043  temp = get_last_insn ();
9044  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9045
9046  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9047     in place but instead must do sign- or zero-extension during assignment,
9048     so we copy it into a new register and let the code below use it as
9049     a copy.
9050
9051     Note that we can safely modify this SUBREG since it is know not to be
9052     shared (it was made by the expand_expr call above).  */
9053
9054  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9055    {
9056      if (post)
9057	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9058      else
9059	bad_subreg = 1;
9060    }
9061  else if (GET_CODE (op0) == SUBREG
9062	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9063    {
9064      /* We cannot increment this SUBREG in place.  If we are
9065	 post-incrementing, get a copy of the old value.  Otherwise,
9066	 just mark that we cannot increment in place.  */
9067      if (post)
9068	op0 = copy_to_reg (op0);
9069      else
9070	bad_subreg = 1;
9071    }
9072
9073  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9074		 && temp != get_last_insn ());
9075  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9076
9077  /* Decide whether incrementing or decrementing.  */
9078  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9079      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9080    this_optab = sub_optab;
9081
9082  /* Convert decrement by a constant into a negative increment.  */
9083  if (this_optab == sub_optab
9084      && GET_CODE (op1) == CONST_INT)
9085    {
9086      op1 = GEN_INT (-INTVAL (op1));
9087      this_optab = add_optab;
9088    }
9089
9090  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9091    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9092
9093  /* For a preincrement, see if we can do this with a single instruction.  */
9094  if (!post)
9095    {
9096      icode = (int) this_optab->handlers[(int) mode].insn_code;
9097      if (icode != (int) CODE_FOR_nothing
9098	  /* Make sure that OP0 is valid for operands 0 and 1
9099	     of the insn we want to queue.  */
9100	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9101	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9102	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9103	single_insn = 1;
9104    }
9105
9106  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9107     then we cannot just increment OP0.  We must therefore contrive to
9108     increment the original value.  Then, for postincrement, we can return
9109     OP0 since it is a copy of the old value.  For preincrement, expand here
9110     unless we can do it with a single insn.
9111
9112     Likewise if storing directly into OP0 would clobber high bits
9113     we need to preserve (bad_subreg).  */
9114  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9115    {
9116      /* This is the easiest way to increment the value wherever it is.
9117	 Problems with multiple evaluation of INCREMENTED are prevented
9118	 because either (1) it is a component_ref or preincrement,
9119	 in which case it was stabilized above, or (2) it is an array_ref
9120	 with constant index in an array in a register, which is
9121	 safe to reevaluate.  */
9122      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9123			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9124			    ? MINUS_EXPR : PLUS_EXPR),
9125			   TREE_TYPE (exp),
9126			   incremented,
9127			   TREE_OPERAND (exp, 1));
9128
9129      while (TREE_CODE (incremented) == NOP_EXPR
9130	     || TREE_CODE (incremented) == CONVERT_EXPR)
9131	{
9132	  newexp = convert (TREE_TYPE (incremented), newexp);
9133	  incremented = TREE_OPERAND (incremented, 0);
9134	}
9135
9136      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9137      return post ? op0 : temp;
9138    }
9139
9140  if (post)
9141    {
9142      /* We have a true reference to the value in OP0.
9143	 If there is an insn to add or subtract in this mode, queue it.
9144	 Queueing the increment insn avoids the register shuffling
9145	 that often results if we must increment now and first save
9146	 the old value for subsequent use.  */
9147
9148#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9149      op0 = stabilize (op0);
9150#endif
9151
9152      icode = (int) this_optab->handlers[(int) mode].insn_code;
9153      if (icode != (int) CODE_FOR_nothing
9154	  /* Make sure that OP0 is valid for operands 0 and 1
9155	     of the insn we want to queue.  */
9156	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9157	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9158	{
9159	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9160	    op1 = force_reg (mode, op1);
9161
9162	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9163	}
9164      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9165	{
9166	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9167		      ? force_reg (Pmode, XEXP (op0, 0))
9168		      : copy_to_reg (XEXP (op0, 0)));
9169	  rtx temp, result;
9170
9171	  op0 = replace_equiv_address (op0, addr);
9172	  temp = force_reg (GET_MODE (op0), op0);
9173	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9174	    op1 = force_reg (mode, op1);
9175
9176	  /* The increment queue is LIFO, thus we have to `queue'
9177	     the instructions in reverse order.  */
9178	  enqueue_insn (op0, gen_move_insn (op0, temp));
9179	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9180	  return result;
9181	}
9182    }
9183
9184  /* Preincrement, or we can't increment with one simple insn.  */
9185  if (post)
9186    /* Save a copy of the value before inc or dec, to return it later.  */
9187    temp = value = copy_to_reg (op0);
9188  else
9189    /* Arrange to return the incremented value.  */
9190    /* Copy the rtx because expand_binop will protect from the queue,
9191       and the results of that would be invalid for us to return
9192       if our caller does emit_queue before using our result.  */
9193    temp = copy_rtx (value = op0);
9194
9195  /* Increment however we can.  */
9196  op1 = expand_binop (mode, this_optab, value, op1, op0,
9197		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9198
9199  /* Make sure the value is stored into OP0.  */
9200  if (op1 != op0)
9201    emit_move_insn (op0, op1);
9202
9203  return temp;
9204}
9205
9206/* At the start of a function, record that we have no previously-pushed
9207   arguments waiting to be popped.  */
9208
9209void
9210init_pending_stack_adjust ()
9211{
9212  pending_stack_adjust = 0;
9213}
9214
9215/* When exiting from function, if safe, clear out any pending stack adjust
9216   so the adjustment won't get done.
9217
9218   Note, if the current function calls alloca, then it must have a
9219   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9220
9221void
9222clear_pending_stack_adjust ()
9223{
9224#ifdef EXIT_IGNORE_STACK
9225  if (optimize > 0
9226      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9227      && EXIT_IGNORE_STACK
9228      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9229      && ! flag_inline_functions)
9230    {
9231      stack_pointer_delta -= pending_stack_adjust,
9232      pending_stack_adjust = 0;
9233    }
9234#endif
9235}
9236
9237/* Pop any previously-pushed arguments that have not been popped yet.  */
9238
9239void
9240do_pending_stack_adjust ()
9241{
9242  if (inhibit_defer_pop == 0)
9243    {
9244      if (pending_stack_adjust != 0)
9245	adjust_stack (GEN_INT (pending_stack_adjust));
9246      pending_stack_adjust = 0;
9247    }
9248}
9249
9250/* Expand conditional expressions.  */
9251
9252/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9253   LABEL is an rtx of code CODE_LABEL, in this function and all the
9254   functions here.  */
9255
9256void
9257jumpifnot (exp, label)
9258     tree exp;
9259     rtx label;
9260{
9261  do_jump (exp, label, NULL_RTX);
9262}
9263
9264/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9265
9266void
9267jumpif (exp, label)
9268     tree exp;
9269     rtx label;
9270{
9271  do_jump (exp, NULL_RTX, label);
9272}
9273
9274/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9275   the result is zero, or IF_TRUE_LABEL if the result is one.
9276   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9277   meaning fall through in that case.
9278
9279   do_jump always does any pending stack adjust except when it does not
9280   actually perform a jump.  An example where there is no jump
9281   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9282
9283   This function is responsible for optimizing cases such as
9284   &&, || and comparison operators in EXP.  */
9285
9286void
9287do_jump (exp, if_false_label, if_true_label)
9288     tree exp;
9289     rtx if_false_label, if_true_label;
9290{
9291  enum tree_code code = TREE_CODE (exp);
9292  /* Some cases need to create a label to jump to
9293     in order to properly fall through.
9294     These cases set DROP_THROUGH_LABEL nonzero.  */
9295  rtx drop_through_label = 0;
9296  rtx temp;
9297  int i;
9298  tree type;
9299  enum machine_mode mode;
9300
9301#ifdef MAX_INTEGER_COMPUTATION_MODE
9302  check_max_integer_computation_mode (exp);
9303#endif
9304
9305  emit_queue ();
9306
9307  switch (code)
9308    {
9309    case ERROR_MARK:
9310      break;
9311
9312    case INTEGER_CST:
9313      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9314      if (temp)
9315	emit_jump (temp);
9316      break;
9317
9318#if 0
9319      /* This is not true with #pragma weak  */
9320    case ADDR_EXPR:
9321      /* The address of something can never be zero.  */
9322      if (if_true_label)
9323	emit_jump (if_true_label);
9324      break;
9325#endif
9326
9327    case NOP_EXPR:
9328      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9329	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9330	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9331	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9332	goto normal;
9333    case CONVERT_EXPR:
9334      /* If we are narrowing the operand, we have to do the compare in the
9335	 narrower mode.  */
9336      if ((TYPE_PRECISION (TREE_TYPE (exp))
9337	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9338	goto normal;
9339    case NON_LVALUE_EXPR:
9340    case REFERENCE_EXPR:
9341    case ABS_EXPR:
9342    case NEGATE_EXPR:
9343    case LROTATE_EXPR:
9344    case RROTATE_EXPR:
9345      /* These cannot change zero->non-zero or vice versa.  */
9346      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9347      break;
9348
9349    case WITH_RECORD_EXPR:
9350      /* Put the object on the placeholder list, recurse through our first
9351	 operand, and pop the list.  */
9352      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9353				    placeholder_list);
9354      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9355      placeholder_list = TREE_CHAIN (placeholder_list);
9356      break;
9357
9358#if 0
9359      /* This is never less insns than evaluating the PLUS_EXPR followed by
9360	 a test and can be longer if the test is eliminated.  */
9361    case PLUS_EXPR:
9362      /* Reduce to minus.  */
9363      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9364		   TREE_OPERAND (exp, 0),
9365		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9366				 TREE_OPERAND (exp, 1))));
9367      /* Process as MINUS.  */
9368#endif
9369
9370    case MINUS_EXPR:
9371      /* Non-zero iff operands of minus differ.  */
9372      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9373				  TREE_OPERAND (exp, 0),
9374				  TREE_OPERAND (exp, 1)),
9375			   NE, NE, if_false_label, if_true_label);
9376      break;
9377
9378    case BIT_AND_EXPR:
9379      /* If we are AND'ing with a small constant, do this comparison in the
9380	 smallest type that fits.  If the machine doesn't have comparisons
9381	 that small, it will be converted back to the wider comparison.
9382	 This helps if we are testing the sign bit of a narrower object.
9383	 combine can't do this for us because it can't know whether a
9384	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9385
9386      if (! SLOW_BYTE_ACCESS
9387	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9388	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9389	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9390	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9391	  && (type = type_for_mode (mode, 1)) != 0
9392	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9393	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9394	      != CODE_FOR_nothing))
9395	{
9396	  do_jump (convert (type, exp), if_false_label, if_true_label);
9397	  break;
9398	}
9399      goto normal;
9400
9401    case TRUTH_NOT_EXPR:
9402      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9403      break;
9404
9405    case TRUTH_ANDIF_EXPR:
9406      if (if_false_label == 0)
9407	if_false_label = drop_through_label = gen_label_rtx ();
9408      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9409      start_cleanup_deferral ();
9410      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9411      end_cleanup_deferral ();
9412      break;
9413
9414    case TRUTH_ORIF_EXPR:
9415      if (if_true_label == 0)
9416	if_true_label = drop_through_label = gen_label_rtx ();
9417      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9418      start_cleanup_deferral ();
9419      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9420      end_cleanup_deferral ();
9421      break;
9422
9423    case COMPOUND_EXPR:
9424      push_temp_slots ();
9425      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9426      preserve_temp_slots (NULL_RTX);
9427      free_temp_slots ();
9428      pop_temp_slots ();
9429      emit_queue ();
9430      do_pending_stack_adjust ();
9431      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9432      break;
9433
9434    case COMPONENT_REF:
9435    case BIT_FIELD_REF:
9436    case ARRAY_REF:
9437    case ARRAY_RANGE_REF:
9438      {
9439	HOST_WIDE_INT bitsize, bitpos;
9440	int unsignedp;
9441	enum machine_mode mode;
9442	tree type;
9443	tree offset;
9444	int volatilep = 0;
9445
9446	/* Get description of this reference.  We don't actually care
9447	   about the underlying object here.  */
9448	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9449			     &unsignedp, &volatilep);
9450
9451	type = type_for_size (bitsize, unsignedp);
9452	if (! SLOW_BYTE_ACCESS
9453	    && type != 0 && bitsize >= 0
9454	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9455	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9456		!= CODE_FOR_nothing))
9457	  {
9458	    do_jump (convert (type, exp), if_false_label, if_true_label);
9459	    break;
9460	  }
9461	goto normal;
9462      }
9463
9464    case COND_EXPR:
9465      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9466      if (integer_onep (TREE_OPERAND (exp, 1))
9467	  && integer_zerop (TREE_OPERAND (exp, 2)))
9468	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9469
9470      else if (integer_zerop (TREE_OPERAND (exp, 1))
9471	       && integer_onep (TREE_OPERAND (exp, 2)))
9472	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9473
9474      else
9475	{
9476	  rtx label1 = gen_label_rtx ();
9477	  drop_through_label = gen_label_rtx ();
9478
9479	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9480
9481	  start_cleanup_deferral ();
9482	  /* Now the THEN-expression.  */
9483	  do_jump (TREE_OPERAND (exp, 1),
9484		   if_false_label ? if_false_label : drop_through_label,
9485		   if_true_label ? if_true_label : drop_through_label);
9486	  /* In case the do_jump just above never jumps.  */
9487	  do_pending_stack_adjust ();
9488	  emit_label (label1);
9489
9490	  /* Now the ELSE-expression.  */
9491	  do_jump (TREE_OPERAND (exp, 2),
9492		   if_false_label ? if_false_label : drop_through_label,
9493		   if_true_label ? if_true_label : drop_through_label);
9494	  end_cleanup_deferral ();
9495	}
9496      break;
9497
9498    case EQ_EXPR:
9499      {
9500	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9501
9502	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9503	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9504	  {
9505	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9506	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9507	    do_jump
9508	      (fold
9509	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9510		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9511				    fold (build1 (REALPART_EXPR,
9512						  TREE_TYPE (inner_type),
9513						  exp0)),
9514				    fold (build1 (REALPART_EXPR,
9515						  TREE_TYPE (inner_type),
9516						  exp1)))),
9517		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9518				    fold (build1 (IMAGPART_EXPR,
9519						  TREE_TYPE (inner_type),
9520						  exp0)),
9521				    fold (build1 (IMAGPART_EXPR,
9522						  TREE_TYPE (inner_type),
9523						  exp1)))))),
9524	       if_false_label, if_true_label);
9525	  }
9526
9527	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9528	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9529
9530	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9531		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9532	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9533	else
9534	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9535	break;
9536      }
9537
9538    case NE_EXPR:
9539      {
9540	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9541
9542	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9543	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9544	  {
9545	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9546	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9547	    do_jump
9548	      (fold
9549	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9550		       fold (build (NE_EXPR, TREE_TYPE (exp),
9551				    fold (build1 (REALPART_EXPR,
9552						  TREE_TYPE (inner_type),
9553						  exp0)),
9554				    fold (build1 (REALPART_EXPR,
9555						  TREE_TYPE (inner_type),
9556						  exp1)))),
9557		       fold (build (NE_EXPR, TREE_TYPE (exp),
9558				    fold (build1 (IMAGPART_EXPR,
9559						  TREE_TYPE (inner_type),
9560						  exp0)),
9561				    fold (build1 (IMAGPART_EXPR,
9562						  TREE_TYPE (inner_type),
9563						  exp1)))))),
9564	       if_false_label, if_true_label);
9565	  }
9566
9567	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9568	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9569
9570	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9571		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9572	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9573	else
9574	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9575	break;
9576      }
9577
9578    case LT_EXPR:
9579      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9580      if (GET_MODE_CLASS (mode) == MODE_INT
9581	  && ! can_compare_p (LT, mode, ccp_jump))
9582	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9583      else
9584	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9585      break;
9586
9587    case LE_EXPR:
9588      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9589      if (GET_MODE_CLASS (mode) == MODE_INT
9590	  && ! can_compare_p (LE, mode, ccp_jump))
9591	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9592      else
9593	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9594      break;
9595
9596    case GT_EXPR:
9597      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9598      if (GET_MODE_CLASS (mode) == MODE_INT
9599	  && ! can_compare_p (GT, mode, ccp_jump))
9600	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9601      else
9602	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9603      break;
9604
9605    case GE_EXPR:
9606      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9607      if (GET_MODE_CLASS (mode) == MODE_INT
9608	  && ! can_compare_p (GE, mode, ccp_jump))
9609	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9610      else
9611	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9612      break;
9613
9614    case UNORDERED_EXPR:
9615    case ORDERED_EXPR:
9616      {
9617	enum rtx_code cmp, rcmp;
9618	int do_rev;
9619
9620	if (code == UNORDERED_EXPR)
9621	  cmp = UNORDERED, rcmp = ORDERED;
9622	else
9623	  cmp = ORDERED, rcmp = UNORDERED;
9624	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9625
9626	do_rev = 0;
9627	if (! can_compare_p (cmp, mode, ccp_jump)
9628	    && (can_compare_p (rcmp, mode, ccp_jump)
9629		/* If the target doesn't provide either UNORDERED or ORDERED
9630		   comparisons, canonicalize on UNORDERED for the library.  */
9631		|| rcmp == UNORDERED))
9632	  do_rev = 1;
9633
9634        if (! do_rev)
9635	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9636	else
9637	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9638      }
9639      break;
9640
9641    {
9642      enum rtx_code rcode1;
9643      enum tree_code tcode2;
9644
9645      case UNLT_EXPR:
9646	rcode1 = UNLT;
9647	tcode2 = LT_EXPR;
9648	goto unordered_bcc;
9649      case UNLE_EXPR:
9650	rcode1 = UNLE;
9651	tcode2 = LE_EXPR;
9652	goto unordered_bcc;
9653      case UNGT_EXPR:
9654	rcode1 = UNGT;
9655	tcode2 = GT_EXPR;
9656	goto unordered_bcc;
9657      case UNGE_EXPR:
9658	rcode1 = UNGE;
9659	tcode2 = GE_EXPR;
9660	goto unordered_bcc;
9661      case UNEQ_EXPR:
9662	rcode1 = UNEQ;
9663	tcode2 = EQ_EXPR;
9664	goto unordered_bcc;
9665
9666      unordered_bcc:
9667        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9668	if (can_compare_p (rcode1, mode, ccp_jump))
9669	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9670			       if_true_label);
9671	else
9672	  {
9673	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
9674	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
9675	    tree cmp0, cmp1;
9676
9677	    /* If the target doesn't support combined unordered
9678	       compares, decompose into UNORDERED + comparison.  */
9679	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9680	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9681	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9682	    do_jump (exp, if_false_label, if_true_label);
9683	  }
9684      }
9685      break;
9686
9687      /* Special case:
9688		__builtin_expect (<test>, 0)	and
9689		__builtin_expect (<test>, 1)
9690
9691	 We need to do this here, so that <test> is not converted to a SCC
9692	 operation on machines that use condition code registers and COMPARE
9693	 like the PowerPC, and then the jump is done based on whether the SCC
9694	 operation produced a 1 or 0.  */
9695    case CALL_EXPR:
9696      /* Check for a built-in function.  */
9697      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9698	{
9699	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9700	  tree arglist = TREE_OPERAND (exp, 1);
9701
9702	  if (TREE_CODE (fndecl) == FUNCTION_DECL
9703	      && DECL_BUILT_IN (fndecl)
9704	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9705	      && arglist != NULL_TREE
9706	      && TREE_CHAIN (arglist) != NULL_TREE)
9707	    {
9708	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9709						    if_true_label);
9710
9711	      if (seq != NULL_RTX)
9712		{
9713		  emit_insn (seq);
9714		  return;
9715		}
9716	    }
9717	}
9718      /* fall through and generate the normal code.  */
9719
9720    default:
9721    normal:
9722      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9723#if 0
9724      /* This is not needed any more and causes poor code since it causes
9725	 comparisons and tests from non-SI objects to have different code
9726	 sequences.  */
9727      /* Copy to register to avoid generating bad insns by cse
9728	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
9729      if (!cse_not_expected && GET_CODE (temp) == MEM)
9730	temp = copy_to_reg (temp);
9731#endif
9732      do_pending_stack_adjust ();
9733      /* Do any postincrements in the expression that was tested.  */
9734      emit_queue ();
9735
9736      if (GET_CODE (temp) == CONST_INT
9737	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9738	  || GET_CODE (temp) == LABEL_REF)
9739	{
9740	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9741	  if (target)
9742	    emit_jump (target);
9743	}
9744      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9745	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9746	/* Note swapping the labels gives us not-equal.  */
9747	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9748      else if (GET_MODE (temp) != VOIDmode)
9749	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9750				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9751				 GET_MODE (temp), NULL_RTX,
9752				 if_false_label, if_true_label);
9753      else
9754	abort ();
9755    }
9756
9757  if (drop_through_label)
9758    {
9759      /* If do_jump produces code that might be jumped around,
9760	 do any stack adjusts from that code, before the place
9761	 where control merges in.  */
9762      do_pending_stack_adjust ();
9763      emit_label (drop_through_label);
9764    }
9765}
9766
9767/* Given a comparison expression EXP for values too wide to be compared
9768   with one insn, test the comparison and jump to the appropriate label.
9769   The code of EXP is ignored; we always test GT if SWAP is 0,
9770   and LT if SWAP is 1.  */
9771
9772static void
9773do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9774     tree exp;
9775     int swap;
9776     rtx if_false_label, if_true_label;
9777{
9778  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9779  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9780  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9781  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9782
9783  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9784}
9785
9786/* Compare OP0 with OP1, word at a time, in mode MODE.
9787   UNSIGNEDP says to do unsigned comparison.
9788   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
9789
9790void
9791do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9792     enum machine_mode mode;
9793     int unsignedp;
9794     rtx op0, op1;
9795     rtx if_false_label, if_true_label;
9796{
9797  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9798  rtx drop_through_label = 0;
9799  int i;
9800
9801  if (! if_true_label || ! if_false_label)
9802    drop_through_label = gen_label_rtx ();
9803  if (! if_true_label)
9804    if_true_label = drop_through_label;
9805  if (! if_false_label)
9806    if_false_label = drop_through_label;
9807
9808  /* Compare a word at a time, high order first.  */
9809  for (i = 0; i < nwords; i++)
9810    {
9811      rtx op0_word, op1_word;
9812
9813      if (WORDS_BIG_ENDIAN)
9814	{
9815	  op0_word = operand_subword_force (op0, i, mode);
9816	  op1_word = operand_subword_force (op1, i, mode);
9817	}
9818      else
9819	{
9820	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9821	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9822	}
9823
9824      /* All but high-order word must be compared as unsigned.  */
9825      do_compare_rtx_and_jump (op0_word, op1_word, GT,
9826			       (unsignedp || i > 0), word_mode, NULL_RTX,
9827			       NULL_RTX, if_true_label);
9828
9829      /* Consider lower words only if these are equal.  */
9830      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9831			       NULL_RTX, NULL_RTX, if_false_label);
9832    }
9833
9834  if (if_false_label)
9835    emit_jump (if_false_label);
9836  if (drop_through_label)
9837    emit_label (drop_through_label);
9838}
9839
9840/* Given an EQ_EXPR expression EXP for values too wide to be compared
9841   with one insn, test the comparison and jump to the appropriate label.  */
9842
9843static void
9844do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9845     tree exp;
9846     rtx if_false_label, if_true_label;
9847{
9848  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9849  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9850  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9851  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9852  int i;
9853  rtx drop_through_label = 0;
9854
9855  if (! if_false_label)
9856    drop_through_label = if_false_label = gen_label_rtx ();
9857
9858  for (i = 0; i < nwords; i++)
9859    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9860			     operand_subword_force (op1, i, mode),
9861			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9862			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
9863
9864  if (if_true_label)
9865    emit_jump (if_true_label);
9866  if (drop_through_label)
9867    emit_label (drop_through_label);
9868}
9869
9870/* Jump according to whether OP0 is 0.
9871   We assume that OP0 has an integer mode that is too wide
9872   for the available compare insns.  */
9873
9874void
9875do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9876     rtx op0;
9877     rtx if_false_label, if_true_label;
9878{
9879  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9880  rtx part;
9881  int i;
9882  rtx drop_through_label = 0;
9883
9884  /* The fastest way of doing this comparison on almost any machine is to
9885     "or" all the words and compare the result.  If all have to be loaded
9886     from memory and this is a very wide item, it's possible this may
9887     be slower, but that's highly unlikely.  */
9888
9889  part = gen_reg_rtx (word_mode);
9890  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9891  for (i = 1; i < nwords && part != 0; i++)
9892    part = expand_binop (word_mode, ior_optab, part,
9893			 operand_subword_force (op0, i, GET_MODE (op0)),
9894			 part, 1, OPTAB_WIDEN);
9895
9896  if (part != 0)
9897    {
9898      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9899			       NULL_RTX, if_false_label, if_true_label);
9900
9901      return;
9902    }
9903
9904  /* If we couldn't do the "or" simply, do this with a series of compares.  */
9905  if (! if_false_label)
9906    drop_through_label = if_false_label = gen_label_rtx ();
9907
9908  for (i = 0; i < nwords; i++)
9909    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9910			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
9911			     if_false_label, NULL_RTX);
9912
9913  if (if_true_label)
9914    emit_jump (if_true_label);
9915
9916  if (drop_through_label)
9917    emit_label (drop_through_label);
9918}
9919
9920/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9921   (including code to compute the values to be compared)
9922   and set (CC0) according to the result.
9923   The decision as to signed or unsigned comparison must be made by the caller.
9924
9925   We force a stack adjustment unless there are currently
9926   things pushed on the stack that aren't yet used.
9927
9928   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9929   compared.  */
9930
9931rtx
9932compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9933     rtx op0, op1;
9934     enum rtx_code code;
9935     int unsignedp;
9936     enum machine_mode mode;
9937     rtx size;
9938{
9939  rtx tem;
9940
9941  /* If one operand is constant, make it the second one.  Only do this
9942     if the other operand is not constant as well.  */
9943
9944  if (swap_commutative_operands_p (op0, op1))
9945    {
9946      tem = op0;
9947      op0 = op1;
9948      op1 = tem;
9949      code = swap_condition (code);
9950    }
9951
9952  if (flag_force_mem)
9953    {
9954      op0 = force_not_mem (op0);
9955      op1 = force_not_mem (op1);
9956    }
9957
9958  do_pending_stack_adjust ();
9959
9960  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9961      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9962    return tem;
9963
9964#if 0
9965  /* There's no need to do this now that combine.c can eliminate lots of
9966     sign extensions.  This can be less efficient in certain cases on other
9967     machines.  */
9968
9969  /* If this is a signed equality comparison, we can do it as an
9970     unsigned comparison since zero-extension is cheaper than sign
9971     extension and comparisons with zero are done as unsigned.  This is
9972     the case even on machines that can do fast sign extension, since
9973     zero-extension is easier to combine with other operations than
9974     sign-extension is.  If we are comparing against a constant, we must
9975     convert it to what it would look like unsigned.  */
9976  if ((code == EQ || code == NE) && ! unsignedp
9977      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9978    {
9979      if (GET_CODE (op1) == CONST_INT
9980	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9981	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9982      unsignedp = 1;
9983    }
9984#endif
9985
9986  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9987
9988  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9989}
9990
9991/* Like do_compare_and_jump but expects the values to compare as two rtx's.
9992   The decision as to signed or unsigned comparison must be made by the caller.
9993
9994   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9995   compared.  */
9996
9997void
9998do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9999			 if_false_label, if_true_label)
10000     rtx op0, op1;
10001     enum rtx_code code;
10002     int unsignedp;
10003     enum machine_mode mode;
10004     rtx size;
10005     rtx if_false_label, if_true_label;
10006{
10007  rtx tem;
10008  int dummy_true_label = 0;
10009
10010  /* Reverse the comparison if that is safe and we want to jump if it is
10011     false.  */
10012  if (! if_true_label && ! FLOAT_MODE_P (mode))
10013    {
10014      if_true_label = if_false_label;
10015      if_false_label = 0;
10016      code = reverse_condition (code);
10017    }
10018
10019  /* If one operand is constant, make it the second one.  Only do this
10020     if the other operand is not constant as well.  */
10021
10022  if (swap_commutative_operands_p (op0, op1))
10023    {
10024      tem = op0;
10025      op0 = op1;
10026      op1 = tem;
10027      code = swap_condition (code);
10028    }
10029
10030  if (flag_force_mem)
10031    {
10032      op0 = force_not_mem (op0);
10033      op1 = force_not_mem (op1);
10034    }
10035
10036  do_pending_stack_adjust ();
10037
10038  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10039      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10040    {
10041      if (tem == const_true_rtx)
10042	{
10043	  if (if_true_label)
10044	    emit_jump (if_true_label);
10045	}
10046      else
10047	{
10048	  if (if_false_label)
10049	    emit_jump (if_false_label);
10050	}
10051      return;
10052    }
10053
10054#if 0
10055  /* There's no need to do this now that combine.c can eliminate lots of
10056     sign extensions.  This can be less efficient in certain cases on other
10057     machines.  */
10058
10059  /* If this is a signed equality comparison, we can do it as an
10060     unsigned comparison since zero-extension is cheaper than sign
10061     extension and comparisons with zero are done as unsigned.  This is
10062     the case even on machines that can do fast sign extension, since
10063     zero-extension is easier to combine with other operations than
10064     sign-extension is.  If we are comparing against a constant, we must
10065     convert it to what it would look like unsigned.  */
10066  if ((code == EQ || code == NE) && ! unsignedp
10067      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10068    {
10069      if (GET_CODE (op1) == CONST_INT
10070	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10071	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10072      unsignedp = 1;
10073    }
10074#endif
10075
10076  if (! if_true_label)
10077    {
10078      dummy_true_label = 1;
10079      if_true_label = gen_label_rtx ();
10080    }
10081
10082  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10083			   if_true_label);
10084
10085  if (if_false_label)
10086    emit_jump (if_false_label);
10087  if (dummy_true_label)
10088    emit_label (if_true_label);
10089}
10090
10091/* Generate code for a comparison expression EXP (including code to compute
10092   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10093   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
10094   generated code will drop through.
10095   SIGNED_CODE should be the rtx operation for this comparison for
10096   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10097
10098   We force a stack adjustment unless there are currently
10099   things pushed on the stack that aren't yet used.  */
10100
10101static void
10102do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10103		     if_true_label)
10104     tree exp;
10105     enum rtx_code signed_code, unsigned_code;
10106     rtx if_false_label, if_true_label;
10107{
10108  rtx op0, op1;
10109  tree type;
10110  enum machine_mode mode;
10111  int unsignedp;
10112  enum rtx_code code;
10113
10114  /* Don't crash if the comparison was erroneous.  */
10115  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10116  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10117    return;
10118
10119  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10120  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10121    return;
10122
10123  type = TREE_TYPE (TREE_OPERAND (exp, 0));
10124  mode = TYPE_MODE (type);
10125  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10126      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10127	  || (GET_MODE_BITSIZE (mode)
10128	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10129								      1)))))))
10130    {
10131      /* op0 might have been replaced by promoted constant, in which
10132	 case the type of second argument should be used.  */
10133      type = TREE_TYPE (TREE_OPERAND (exp, 1));
10134      mode = TYPE_MODE (type);
10135    }
10136  unsignedp = TREE_UNSIGNED (type);
10137  code = unsignedp ? unsigned_code : signed_code;
10138
10139#ifdef HAVE_canonicalize_funcptr_for_compare
10140  /* If function pointers need to be "canonicalized" before they can
10141     be reliably compared, then canonicalize them.  */
10142  if (HAVE_canonicalize_funcptr_for_compare
10143      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10144      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10145	  == FUNCTION_TYPE))
10146    {
10147      rtx new_op0 = gen_reg_rtx (mode);
10148
10149      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10150      op0 = new_op0;
10151    }
10152
10153  if (HAVE_canonicalize_funcptr_for_compare
10154      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10155      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10156	  == FUNCTION_TYPE))
10157    {
10158      rtx new_op1 = gen_reg_rtx (mode);
10159
10160      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10161      op1 = new_op1;
10162    }
10163#endif
10164
10165  /* Do any postincrements in the expression that was tested.  */
10166  emit_queue ();
10167
10168  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10169			   ((mode == BLKmode)
10170			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10171			   if_false_label, if_true_label);
10172}
10173
10174/* Generate code to calculate EXP using a store-flag instruction
10175   and return an rtx for the result.  EXP is either a comparison
10176   or a TRUTH_NOT_EXPR whose operand is a comparison.
10177
10178   If TARGET is nonzero, store the result there if convenient.
10179
10180   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10181   cheap.
10182
10183   Return zero if there is no suitable set-flag instruction
10184   available on this machine.
10185
10186   Once expand_expr has been called on the arguments of the comparison,
10187   we are committed to doing the store flag, since it is not safe to
10188   re-evaluate the expression.  We emit the store-flag insn by calling
10189   emit_store_flag, but only expand the arguments if we have a reason
10190   to believe that emit_store_flag will be successful.  If we think that
10191   it will, but it isn't, we have to simulate the store-flag with a
10192   set/jump/set sequence.  */
10193
10194static rtx
10195do_store_flag (exp, target, mode, only_cheap)
10196     tree exp;
10197     rtx target;
10198     enum machine_mode mode;
10199     int only_cheap;
10200{
10201  enum rtx_code code;
10202  tree arg0, arg1, type;
10203  tree tem;
10204  enum machine_mode operand_mode;
10205  int invert = 0;
10206  int unsignedp;
10207  rtx op0, op1;
10208  enum insn_code icode;
10209  rtx subtarget = target;
10210  rtx result, label;
10211
10212  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10213     result at the end.  We can't simply invert the test since it would
10214     have already been inverted if it were valid.  This case occurs for
10215     some floating-point comparisons.  */
10216
10217  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10218    invert = 1, exp = TREE_OPERAND (exp, 0);
10219
10220  arg0 = TREE_OPERAND (exp, 0);
10221  arg1 = TREE_OPERAND (exp, 1);
10222
10223  /* Don't crash if the comparison was erroneous.  */
10224  if (arg0 == error_mark_node || arg1 == error_mark_node)
10225    return const0_rtx;
10226
10227  type = TREE_TYPE (arg0);
10228  operand_mode = TYPE_MODE (type);
10229  unsignedp = TREE_UNSIGNED (type);
10230
10231  /* We won't bother with BLKmode store-flag operations because it would mean
10232     passing a lot of information to emit_store_flag.  */
10233  if (operand_mode == BLKmode)
10234    return 0;
10235
10236  /* We won't bother with store-flag operations involving function pointers
10237     when function pointers must be canonicalized before comparisons.  */
10238#ifdef HAVE_canonicalize_funcptr_for_compare
10239  if (HAVE_canonicalize_funcptr_for_compare
10240      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10241	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10242	       == FUNCTION_TYPE))
10243	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10244	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10245		  == FUNCTION_TYPE))))
10246    return 0;
10247#endif
10248
10249  STRIP_NOPS (arg0);
10250  STRIP_NOPS (arg1);
10251
10252  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10253     operation of some type.  Some comparisons against 1 and -1 can be
10254     converted to comparisons with zero.  Do so here so that the tests
10255     below will be aware that we have a comparison with zero.   These
10256     tests will not catch constants in the first operand, but constants
10257     are rarely passed as the first operand.  */
10258
10259  switch (TREE_CODE (exp))
10260    {
10261    case EQ_EXPR:
10262      code = EQ;
10263      break;
10264    case NE_EXPR:
10265      code = NE;
10266      break;
10267    case LT_EXPR:
10268      if (integer_onep (arg1))
10269	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10270      else
10271	code = unsignedp ? LTU : LT;
10272      break;
10273    case LE_EXPR:
10274      if (! unsignedp && integer_all_onesp (arg1))
10275	arg1 = integer_zero_node, code = LT;
10276      else
10277	code = unsignedp ? LEU : LE;
10278      break;
10279    case GT_EXPR:
10280      if (! unsignedp && integer_all_onesp (arg1))
10281	arg1 = integer_zero_node, code = GE;
10282      else
10283	code = unsignedp ? GTU : GT;
10284      break;
10285    case GE_EXPR:
10286      if (integer_onep (arg1))
10287	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10288      else
10289	code = unsignedp ? GEU : GE;
10290      break;
10291
10292    case UNORDERED_EXPR:
10293      code = UNORDERED;
10294      break;
10295    case ORDERED_EXPR:
10296      code = ORDERED;
10297      break;
10298    case UNLT_EXPR:
10299      code = UNLT;
10300      break;
10301    case UNLE_EXPR:
10302      code = UNLE;
10303      break;
10304    case UNGT_EXPR:
10305      code = UNGT;
10306      break;
10307    case UNGE_EXPR:
10308      code = UNGE;
10309      break;
10310    case UNEQ_EXPR:
10311      code = UNEQ;
10312      break;
10313
10314    default:
10315      abort ();
10316    }
10317
10318  /* Put a constant second.  */
10319  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10320    {
10321      tem = arg0; arg0 = arg1; arg1 = tem;
10322      code = swap_condition (code);
10323    }
10324
10325  /* If this is an equality or inequality test of a single bit, we can
10326     do this by shifting the bit being tested to the low-order bit and
10327     masking the result with the constant 1.  If the condition was EQ,
10328     we xor it with 1.  This does not require an scc insn and is faster
10329     than an scc insn even if we have it.  */
10330
10331  if ((code == NE || code == EQ)
10332      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10333      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10334    {
10335      tree inner = TREE_OPERAND (arg0, 0);
10336      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10337      int ops_unsignedp;
10338
10339      /* If INNER is a right shift of a constant and it plus BITNUM does
10340	 not overflow, adjust BITNUM and INNER.  */
10341
10342      if (TREE_CODE (inner) == RSHIFT_EXPR
10343	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10344	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10345	  && bitnum < TYPE_PRECISION (type)
10346	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10347				   bitnum - TYPE_PRECISION (type)))
10348	{
10349	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10350	  inner = TREE_OPERAND (inner, 0);
10351	}
10352
10353      /* If we are going to be able to omit the AND below, we must do our
10354	 operations as unsigned.  If we must use the AND, we have a choice.
10355	 Normally unsigned is faster, but for some machines signed is.  */
10356      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10357#ifdef LOAD_EXTEND_OP
10358		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10359#else
10360		       : 1
10361#endif
10362		       );
10363
10364      if (! get_subtarget (subtarget)
10365	  || GET_MODE (subtarget) != operand_mode
10366	  || ! safe_from_p (subtarget, inner, 1))
10367	subtarget = 0;
10368
10369      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10370
10371      if (bitnum != 0)
10372	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10373			    size_int (bitnum), subtarget, ops_unsignedp);
10374
10375      if (GET_MODE (op0) != mode)
10376	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10377
10378      if ((code == EQ && ! invert) || (code == NE && invert))
10379	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10380			    ops_unsignedp, OPTAB_LIB_WIDEN);
10381
10382      /* Put the AND last so it can combine with more things.  */
10383      if (bitnum != TYPE_PRECISION (type) - 1)
10384	op0 = expand_and (mode, op0, const1_rtx, subtarget);
10385
10386      return op0;
10387    }
10388
10389  /* Now see if we are likely to be able to do this.  Return if not.  */
10390  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10391    return 0;
10392
10393  icode = setcc_gen_code[(int) code];
10394  if (icode == CODE_FOR_nothing
10395      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10396    {
10397      /* We can only do this if it is one of the special cases that
10398	 can be handled without an scc insn.  */
10399      if ((code == LT && integer_zerop (arg1))
10400	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10401	;
10402      else if (BRANCH_COST >= 0
10403	       && ! only_cheap && (code == NE || code == EQ)
10404	       && TREE_CODE (type) != REAL_TYPE
10405	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10406		    != CODE_FOR_nothing)
10407		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10408		       != CODE_FOR_nothing)))
10409	;
10410      else
10411	return 0;
10412    }
10413
10414  if (! get_subtarget (target)
10415      || GET_MODE (subtarget) != operand_mode
10416      || ! safe_from_p (subtarget, arg1, 1))
10417    subtarget = 0;
10418
10419  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10420  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10421
10422  if (target == 0)
10423    target = gen_reg_rtx (mode);
10424
10425  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10426     because, if the emit_store_flag does anything it will succeed and
10427     OP0 and OP1 will not be used subsequently.  */
10428
10429  result = emit_store_flag (target, code,
10430			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10431			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10432			    operand_mode, unsignedp, 1);
10433
10434  if (result)
10435    {
10436      if (invert)
10437	result = expand_binop (mode, xor_optab, result, const1_rtx,
10438			       result, 0, OPTAB_LIB_WIDEN);
10439      return result;
10440    }
10441
10442  /* If this failed, we have to do this with set/compare/jump/set code.  */
10443  if (GET_CODE (target) != REG
10444      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10445    target = gen_reg_rtx (GET_MODE (target));
10446
10447  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10448  result = compare_from_rtx (op0, op1, code, unsignedp,
10449			     operand_mode, NULL_RTX);
10450  if (GET_CODE (result) == CONST_INT)
10451    return (((result == const0_rtx && ! invert)
10452	     || (result != const0_rtx && invert))
10453	    ? const0_rtx : const1_rtx);
10454
10455  /* The code of RESULT may not match CODE if compare_from_rtx
10456     decided to swap its operands and reverse the original code.
10457
10458     We know that compare_from_rtx returns either a CONST_INT or
10459     a new comparison code, so it is safe to just extract the
10460     code from RESULT.  */
10461  code = GET_CODE (result);
10462
10463  label = gen_label_rtx ();
10464  if (bcc_gen_fctn[(int) code] == 0)
10465    abort ();
10466
10467  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10468  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10469  emit_label (label);
10470
10471  return target;
10472}
10473
10474
10475/* Stubs in case we haven't got a casesi insn.  */
10476#ifndef HAVE_casesi
10477# define HAVE_casesi 0
10478# define gen_casesi(a, b, c, d, e) (0)
10479# define CODE_FOR_casesi CODE_FOR_nothing
10480#endif
10481
10482/* If the machine does not have a case insn that compares the bounds,
10483   this means extra overhead for dispatch tables, which raises the
10484   threshold for using them.  */
10485#ifndef CASE_VALUES_THRESHOLD
10486#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10487#endif /* CASE_VALUES_THRESHOLD */
10488
10489unsigned int
10490case_values_threshold ()
10491{
10492  return CASE_VALUES_THRESHOLD;
10493}
10494
10495/* Attempt to generate a casesi instruction.  Returns 1 if successful,
10496   0 otherwise (i.e. if there is no casesi instruction).  */
10497int
10498try_casesi (index_type, index_expr, minval, range,
10499	    table_label, default_label)
10500     tree index_type, index_expr, minval, range;
10501     rtx table_label ATTRIBUTE_UNUSED;
10502     rtx default_label;
10503{
10504  enum machine_mode index_mode = SImode;
10505  int index_bits = GET_MODE_BITSIZE (index_mode);
10506  rtx op1, op2, index;
10507  enum machine_mode op_mode;
10508
10509  if (! HAVE_casesi)
10510    return 0;
10511
10512  /* Convert the index to SImode.  */
10513  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10514    {
10515      enum machine_mode omode = TYPE_MODE (index_type);
10516      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10517
10518      /* We must handle the endpoints in the original mode.  */
10519      index_expr = build (MINUS_EXPR, index_type,
10520			  index_expr, minval);
10521      minval = integer_zero_node;
10522      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10523      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10524			       omode, 1, default_label);
10525      /* Now we can safely truncate.  */
10526      index = convert_to_mode (index_mode, index, 0);
10527    }
10528  else
10529    {
10530      if (TYPE_MODE (index_type) != index_mode)
10531	{
10532	  index_expr = convert (type_for_size (index_bits, 0),
10533				index_expr);
10534	  index_type = TREE_TYPE (index_expr);
10535	}
10536
10537      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10538    }
10539  emit_queue ();
10540  index = protect_from_queue (index, 0);
10541  do_pending_stack_adjust ();
10542
10543  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10544  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10545      (index, op_mode))
10546    index = copy_to_mode_reg (op_mode, index);
10547
10548  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10549
10550  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10551  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10552		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10553  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10554      (op1, op_mode))
10555    op1 = copy_to_mode_reg (op_mode, op1);
10556
10557  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10558
10559  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10560  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10561		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
10562  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10563      (op2, op_mode))
10564    op2 = copy_to_mode_reg (op_mode, op2);
10565
10566  emit_jump_insn (gen_casesi (index, op1, op2,
10567			      table_label, default_label));
10568  return 1;
10569}
10570
10571/* Attempt to generate a tablejump instruction; same concept.  */
10572#ifndef HAVE_tablejump
10573#define HAVE_tablejump 0
10574#define gen_tablejump(x, y) (0)
10575#endif
10576
10577/* Subroutine of the next function.
10578
10579   INDEX is the value being switched on, with the lowest value
10580   in the table already subtracted.
10581   MODE is its expected mode (needed if INDEX is constant).
10582   RANGE is the length of the jump table.
10583   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10584
10585   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10586   index value is out of range.  */
10587
10588static void
10589do_tablejump (index, mode, range, table_label, default_label)
10590     rtx index, range, table_label, default_label;
10591     enum machine_mode mode;
10592{
10593  rtx temp, vector;
10594
10595  /* Do an unsigned comparison (in the proper mode) between the index
10596     expression and the value which represents the length of the range.
10597     Since we just finished subtracting the lower bound of the range
10598     from the index expression, this comparison allows us to simultaneously
10599     check that the original index expression value is both greater than
10600     or equal to the minimum value of the range and less than or equal to
10601     the maximum value of the range.  */
10602
10603  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10604			   default_label);
10605
10606  /* If index is in range, it must fit in Pmode.
10607     Convert to Pmode so we can index with it.  */
10608  if (mode != Pmode)
10609    index = convert_to_mode (Pmode, index, 1);
10610
10611  /* Don't let a MEM slip thru, because then INDEX that comes
10612     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10613     and break_out_memory_refs will go to work on it and mess it up.  */
10614#ifdef PIC_CASE_VECTOR_ADDRESS
10615  if (flag_pic && GET_CODE (index) != REG)
10616    index = copy_to_mode_reg (Pmode, index);
10617#endif
10618
10619  /* If flag_force_addr were to affect this address
10620     it could interfere with the tricky assumptions made
10621     about addresses that contain label-refs,
10622     which may be valid only very near the tablejump itself.  */
10623  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10624     GET_MODE_SIZE, because this indicates how large insns are.  The other
10625     uses should all be Pmode, because they are addresses.  This code
10626     could fail if addresses and insns are not the same size.  */
10627  index = gen_rtx_PLUS (Pmode,
10628			gen_rtx_MULT (Pmode, index,
10629				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10630			gen_rtx_LABEL_REF (Pmode, table_label));
10631#ifdef PIC_CASE_VECTOR_ADDRESS
10632  if (flag_pic)
10633    index = PIC_CASE_VECTOR_ADDRESS (index);
10634  else
10635#endif
10636    index = memory_address_noforce (CASE_VECTOR_MODE, index);
10637  temp = gen_reg_rtx (CASE_VECTOR_MODE);
10638  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10639  RTX_UNCHANGING_P (vector) = 1;
10640  convert_move (temp, vector, 0);
10641
10642  emit_jump_insn (gen_tablejump (temp, table_label));
10643
10644  /* If we are generating PIC code or if the table is PC-relative, the
10645     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10646  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10647    emit_barrier ();
10648}
10649
10650int
10651try_tablejump (index_type, index_expr, minval, range,
10652	       table_label, default_label)
10653     tree index_type, index_expr, minval, range;
10654     rtx table_label, default_label;
10655{
10656  rtx index;
10657
10658  if (! HAVE_tablejump)
10659    return 0;
10660
10661  index_expr = fold (build (MINUS_EXPR, index_type,
10662			    convert (index_type, index_expr),
10663			    convert (index_type, minval)));
10664  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10665  emit_queue ();
10666  index = protect_from_queue (index, 0);
10667  do_pending_stack_adjust ();
10668
10669  do_tablejump (index, TYPE_MODE (index_type),
10670		convert_modes (TYPE_MODE (index_type),
10671			       TYPE_MODE (TREE_TYPE (range)),
10672			       expand_expr (range, NULL_RTX,
10673					    VOIDmode, 0),
10674			       TREE_UNSIGNED (TREE_TYPE (range))),
10675		table_label, default_label);
10676  return 1;
10677}
10678