expr.c revision 50397
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "tree.h"
27#include "obstack.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
35#include "insn-config.h"
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
38#include "recog.h"
39#include "output.h"
40#include "typeclass.h"
41#include "defaults.h"
42#include "toplev.h"
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
47   from first to last or from last to first.
48
49   They should if the stack and args grow in opposite directions, but
50   only if we have push insns.  */
51
52#ifdef PUSH_ROUNDING
53
54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55#define PUSH_ARGS_REVERSED	/* If it's last to first */
56#endif
57
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
68/* Like STACK_BOUNDARY but in units of bytes, not bits.  */
69#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70
71/* Assume that case vectors are not pc-relative.  */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
76/* If this is nonzero, we do not bother generating VOLATILE
77   around volatile memory references, and we are willing to
78   output indirect addresses.  If cse is to follow, we reject
79   indirect addresses so a useful potential cse is generated;
80   if it is used only once, instruction combination will produce
81   the same indirect address eventually.  */
82int cse_not_expected;
83
84/* Nonzero to generate code for all the subroutines within an
85   expression before generating the upper levels of the expression.
86   Nowadays this is never zero.  */
87int do_preexpand_calls = 1;
88
89/* Number of units that we should eventually pop off the stack.
90   These are the arguments to function calls that have already returned.  */
91int pending_stack_adjust;
92
93/* Nonzero means stack pops must not be deferred, and deferred stack
94   pops must not be output.  It is nonzero inside a function call,
95   inside a conditional expression, inside a statement expression,
96   and in other cases as well.  */
97int inhibit_defer_pop;
98
99/* Nonzero means __builtin_saveregs has already been done in this function.
100   The value is the pseudoreg containing the value __builtin_saveregs
101   returned.  */
102static rtx saveregs_value;
103
104/* Similarly for __builtin_apply_args.  */
105static rtx apply_args_value;
106
107/* Nonzero if the machine description has been fixed to accept
108   CONSTANT_P_RTX patterns.  We will emit a warning and continue
109   if we find we must actually use such a beast.  */
110static int can_handle_constant_p;
111
112/* Don't check memory usage, since code is being emitted to check a memory
113   usage.  Used when flag_check_memory_usage is true, to avoid infinite
114   recursion.  */
115static int in_check_memory_usage;
116
117/* Postincrements that still need to be expanded.  */
118static rtx pending_chain;
119
120/* This structure is used by move_by_pieces to describe the move to
121   be performed.  */
122struct move_by_pieces
123{
124  rtx to;
125  rtx to_addr;
126  int autinc_to;
127  int explicit_inc_to;
128  int to_struct;
129  rtx from;
130  rtx from_addr;
131  int autinc_from;
132  int explicit_inc_from;
133  int from_struct;
134  int len;
135  int offset;
136  int reverse;
137};
138
139/* This structure is used by clear_by_pieces to describe the clear to
140   be performed.  */
141
142struct clear_by_pieces
143{
144  rtx to;
145  rtx to_addr;
146  int autinc_to;
147  int explicit_inc_to;
148  int to_struct;
149  int len;
150  int offset;
151  int reverse;
152};
153
154extern struct obstack permanent_obstack;
155extern rtx arg_pointer_save_area;
156
157static rtx get_push_address	PROTO ((int));
158
159static rtx enqueue_insn		PROTO((rtx, rtx));
160static int queued_subexp_p	PROTO((rtx));
161static void init_queue		PROTO((void));
162static void move_by_pieces	PROTO((rtx, rtx, int, int));
163static int move_by_pieces_ninsns PROTO((unsigned int, int));
164static void move_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
165				       struct move_by_pieces *));
166static void clear_by_pieces	PROTO((rtx, int, int));
167static void clear_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
168				       struct clear_by_pieces *));
169static int is_zeros_p		PROTO((tree));
170static int mostly_zeros_p	PROTO((tree));
171static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
172					   tree, tree, int));
173static void store_constructor	PROTO((tree, rtx, int));
174static rtx store_field		PROTO((rtx, int, int, enum machine_mode, tree,
175				       enum machine_mode, int, int, int));
176static enum memory_use_mode
177  get_memory_usage_from_modifier PROTO((enum expand_modifier));
178static tree save_noncopied_parts PROTO((tree, tree));
179static tree init_noncopied_parts PROTO((tree, tree));
180static int safe_from_p		PROTO((rtx, tree, int));
181static int fixed_type_p		PROTO((tree));
182static rtx var_rtx		PROTO((tree));
183static int get_pointer_alignment PROTO((tree, unsigned));
184static tree string_constant	PROTO((tree, tree *));
185static tree c_strlen		PROTO((tree));
186static rtx get_memory_rtx	PROTO((tree));
187static rtx expand_builtin	PROTO((tree, rtx, rtx,
188				       enum machine_mode, int));
189static int apply_args_size	PROTO((void));
190static int apply_result_size	PROTO((void));
191static rtx result_vector	PROTO((int, rtx));
192static rtx expand_builtin_apply_args PROTO((void));
193static rtx expand_builtin_apply	PROTO((rtx, rtx, rtx));
194static void expand_builtin_return PROTO((rtx));
195static rtx expand_increment	PROTO((tree, int, int));
196static void preexpand_calls	PROTO((tree));
197static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
198static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
199static void do_jump_for_compare	PROTO((rtx, rtx, rtx));
200static rtx compare		PROTO((tree, enum rtx_code, enum rtx_code));
201static rtx do_store_flag	PROTO((tree, rtx, enum machine_mode, int));
202
203/* Record for each mode whether we can move a register directly to or
204   from an object of that mode in memory.  If we can't, we won't try
205   to use that mode directly when accessing a field of that mode.  */
206
207static char direct_load[NUM_MACHINE_MODES];
208static char direct_store[NUM_MACHINE_MODES];
209
210/* MOVE_RATIO is the number of move instructions that is better than
211   a block move.  */
212
213#ifndef MOVE_RATIO
214#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
215#define MOVE_RATIO 2
216#else
217/* If we are optimizing for space (-Os), cut down the default move ratio */
218#define MOVE_RATIO (optimize_size ? 3 : 15)
219#endif
220#endif
221
222/* This array records the insn_code of insns to perform block moves.  */
223enum insn_code movstr_optab[NUM_MACHINE_MODES];
224
225/* This array records the insn_code of insns to perform block clears.  */
226enum insn_code clrstr_optab[NUM_MACHINE_MODES];
227
228/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
229
230#ifndef SLOW_UNALIGNED_ACCESS
231#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
232#endif
233
234/* Register mappings for target machines without register windows.  */
235#ifndef INCOMING_REGNO
236#define INCOMING_REGNO(OUT) (OUT)
237#endif
238#ifndef OUTGOING_REGNO
239#define OUTGOING_REGNO(IN) (IN)
240#endif
241
242/* This is run once per compilation to set up which modes can be used
243   directly in memory and to initialize the block move optab.  */
244
245void
246init_expr_once ()
247{
248  rtx insn, pat;
249  enum machine_mode mode;
250  int num_clobbers;
251  rtx mem, mem1;
252  char *free_point;
253
254  start_sequence ();
255
256  /* Since we are on the permanent obstack, we must be sure we save this
257     spot AFTER we call start_sequence, since it will reuse the rtl it
258     makes.  */
259  free_point = (char *) oballoc (0);
260
261  /* Try indexing by frame ptr and try by stack ptr.
262     It is known that on the Convex the stack ptr isn't a valid index.
263     With luck, one or the other is valid on any machine.  */
264  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
265  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266
267  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
268  pat = PATTERN (insn);
269
270  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
271       mode = (enum machine_mode) ((int) mode + 1))
272    {
273      int regno;
274      rtx reg;
275
276      direct_load[(int) mode] = direct_store[(int) mode] = 0;
277      PUT_MODE (mem, mode);
278      PUT_MODE (mem1, mode);
279
280      /* See if there is some register that can be used in this mode and
281	 directly loaded or stored from memory.  */
282
283      if (mode != VOIDmode && mode != BLKmode)
284	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
285	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
286	     regno++)
287	  {
288	    if (! HARD_REGNO_MODE_OK (regno, mode))
289	      continue;
290
291	    reg = gen_rtx_REG (mode, regno);
292
293	    SET_SRC (pat) = mem;
294	    SET_DEST (pat) = reg;
295	    if (recog (pat, insn, &num_clobbers) >= 0)
296	      direct_load[(int) mode] = 1;
297
298	    SET_SRC (pat) = mem1;
299	    SET_DEST (pat) = reg;
300	    if (recog (pat, insn, &num_clobbers) >= 0)
301	      direct_load[(int) mode] = 1;
302
303	    SET_SRC (pat) = reg;
304	    SET_DEST (pat) = mem;
305	    if (recog (pat, insn, &num_clobbers) >= 0)
306	      direct_store[(int) mode] = 1;
307
308	    SET_SRC (pat) = reg;
309	    SET_DEST (pat) = mem1;
310	    if (recog (pat, insn, &num_clobbers) >= 0)
311	      direct_store[(int) mode] = 1;
312	  }
313    }
314
315  /* Find out if CONSTANT_P_RTX is accepted.  */
316  SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
317			        FIRST_PSEUDO_REGISTER);
318  SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
319					  SET_DEST (pat));
320  if (recog (pat, insn, &num_clobbers) >= 0)
321    can_handle_constant_p = 1;
322
323  end_sequence ();
324  obfree (free_point);
325}
326
327/* This is run at the start of compiling a function.  */
328
329void
330init_expr ()
331{
332  init_queue ();
333
334  pending_stack_adjust = 0;
335  inhibit_defer_pop = 0;
336  saveregs_value = 0;
337  apply_args_value = 0;
338  forced_labels = 0;
339}
340
341/* Save all variables describing the current status into the structure *P.
342   This is used before starting a nested function.  */
343
344void
345save_expr_status (p)
346     struct function *p;
347{
348  p->pending_chain = pending_chain;
349  p->pending_stack_adjust = pending_stack_adjust;
350  p->inhibit_defer_pop = inhibit_defer_pop;
351  p->saveregs_value = saveregs_value;
352  p->apply_args_value = apply_args_value;
353  p->forced_labels = forced_labels;
354
355  pending_chain = NULL_RTX;
356  pending_stack_adjust = 0;
357  inhibit_defer_pop = 0;
358  saveregs_value = 0;
359  apply_args_value = 0;
360  forced_labels = 0;
361}
362
363/* Restore all variables describing the current status from the structure *P.
364   This is used after a nested function.  */
365
366void
367restore_expr_status (p)
368     struct function *p;
369{
370  pending_chain = p->pending_chain;
371  pending_stack_adjust = p->pending_stack_adjust;
372  inhibit_defer_pop = p->inhibit_defer_pop;
373  saveregs_value = p->saveregs_value;
374  apply_args_value = p->apply_args_value;
375  forced_labels = p->forced_labels;
376}
377
378/* Manage the queue of increment instructions to be output
379   for POSTINCREMENT_EXPR expressions, etc.  */
380
381/* Queue up to increment (or change) VAR later.  BODY says how:
382   BODY should be the same thing you would pass to emit_insn
383   to increment right away.  It will go to emit_insn later on.
384
385   The value is a QUEUED expression to be used in place of VAR
386   where you want to guarantee the pre-incrementation value of VAR.  */
387
388static rtx
389enqueue_insn (var, body)
390     rtx var, body;
391{
392  pending_chain = gen_rtx_QUEUED (GET_MODE (var),
393				  var, NULL_RTX, NULL_RTX, body,
394				  pending_chain);
395  return pending_chain;
396}
397
398/* Use protect_from_queue to convert a QUEUED expression
399   into something that you can put immediately into an instruction.
400   If the queued incrementation has not happened yet,
401   protect_from_queue returns the variable itself.
402   If the incrementation has happened, protect_from_queue returns a temp
403   that contains a copy of the old value of the variable.
404
405   Any time an rtx which might possibly be a QUEUED is to be put
406   into an instruction, it must be passed through protect_from_queue first.
407   QUEUED expressions are not meaningful in instructions.
408
409   Do not pass a value through protect_from_queue and then hold
410   on to it for a while before putting it in an instruction!
411   If the queue is flushed in between, incorrect code will result.  */
412
413rtx
414protect_from_queue (x, modify)
415     register rtx x;
416     int modify;
417{
418  register RTX_CODE code = GET_CODE (x);
419
420#if 0  /* A QUEUED can hang around after the queue is forced out.  */
421  /* Shortcut for most common case.  */
422  if (pending_chain == 0)
423    return x;
424#endif
425
426  if (code != QUEUED)
427    {
428      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
429	 use of autoincrement.  Make a copy of the contents of the memory
430	 location rather than a copy of the address, but not if the value is
431	 of mode BLKmode.  Don't modify X in place since it might be
432	 shared.  */
433      if (code == MEM && GET_MODE (x) != BLKmode
434	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
435	{
436	  register rtx y = XEXP (x, 0);
437	  register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
438
439	  MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
440	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441	  MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
442	  MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
443
444	  if (QUEUED_INSN (y))
445	    {
446	      register rtx temp = gen_reg_rtx (GET_MODE (new));
447	      emit_insn_before (gen_move_insn (temp, new),
448				QUEUED_INSN (y));
449	      return temp;
450	    }
451	  return new;
452	}
453      /* Otherwise, recursively protect the subexpressions of all
454	 the kinds of rtx's that can contain a QUEUED.  */
455      if (code == MEM)
456	{
457	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
458	  if (tem != XEXP (x, 0))
459	    {
460	      x = copy_rtx (x);
461	      XEXP (x, 0) = tem;
462	    }
463	}
464      else if (code == PLUS || code == MULT)
465	{
466	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
469	    {
470	      x = copy_rtx (x);
471	      XEXP (x, 0) = new0;
472	      XEXP (x, 1) = new1;
473	    }
474	}
475      return x;
476    }
477  /* If the increment has not happened, use the variable itself.  */
478  if (QUEUED_INSN (x) == 0)
479    return QUEUED_VAR (x);
480  /* If the increment has happened and a pre-increment copy exists,
481     use that copy.  */
482  if (QUEUED_COPY (x) != 0)
483    return QUEUED_COPY (x);
484  /* The increment has happened but we haven't set up a pre-increment copy.
485     Set one up now, and use it.  */
486  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488		    QUEUED_INSN (x));
489  return QUEUED_COPY (x);
490}
491
492/* Return nonzero if X contains a QUEUED expression:
493   if it contains anything that will be altered by a queued increment.
494   We handle only combinations of MEM, PLUS, MINUS and MULT operators
495   since memory addresses generally contain only those.  */
496
497static int
498queued_subexp_p (x)
499     rtx x;
500{
501  register enum rtx_code code = GET_CODE (x);
502  switch (code)
503    {
504    case QUEUED:
505      return 1;
506    case MEM:
507      return queued_subexp_p (XEXP (x, 0));
508    case MULT:
509    case PLUS:
510    case MINUS:
511      return (queued_subexp_p (XEXP (x, 0))
512	      || queued_subexp_p (XEXP (x, 1)));
513    default:
514      return 0;
515    }
516}
517
518/* Perform all the pending incrementations.  */
519
520void
521emit_queue ()
522{
523  register rtx p;
524  while ((p = pending_chain))
525    {
526      rtx body = QUEUED_BODY (p);
527
528      if (GET_CODE (body) == SEQUENCE)
529	{
530	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
531	  emit_insn (QUEUED_BODY (p));
532	}
533      else
534	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
535      pending_chain = QUEUED_NEXT (p);
536    }
537}
538
539static void
540init_queue ()
541{
542  if (pending_chain)
543    abort ();
544}
545
546/* Copy data from FROM to TO, where the machine modes are not the same.
547   Both modes may be integer, or both may be floating.
548   UNSIGNEDP should be nonzero if FROM is an unsigned type.
549   This causes zero-extension instead of sign-extension.  */
550
551void
552convert_move (to, from, unsignedp)
553     register rtx to, from;
554     int unsignedp;
555{
556  enum machine_mode to_mode = GET_MODE (to);
557  enum machine_mode from_mode = GET_MODE (from);
558  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
559  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
560  enum insn_code code;
561  rtx libcall;
562
563  /* rtx code for making an equivalent value.  */
564  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
565
566  to = protect_from_queue (to, 1);
567  from = protect_from_queue (from, 0);
568
569  if (to_real != from_real)
570    abort ();
571
572  /* If FROM is a SUBREG that indicates that we have already done at least
573     the required extension, strip it.  We don't handle such SUBREGs as
574     TO here.  */
575
576  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578	  >= GET_MODE_SIZE (to_mode))
579      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580    from = gen_lowpart (to_mode, from), from_mode = to_mode;
581
582  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
583    abort ();
584
585  if (to_mode == from_mode
586      || (from_mode == VOIDmode && CONSTANT_P (from)))
587    {
588      emit_move_insn (to, from);
589      return;
590    }
591
592  if (to_real)
593    {
594      rtx value;
595
596      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
597	{
598	  /* Try converting directly if the insn is supported.  */
599	  if ((code = can_extend_p (to_mode, from_mode, 0))
600	      != CODE_FOR_nothing)
601	    {
602	      emit_unop_insn (code, to, from, UNKNOWN);
603	      return;
604	    }
605	}
606
607#ifdef HAVE_trunchfqf2
608      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
609	{
610	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
611	  return;
612	}
613#endif
614#ifdef HAVE_trunctqfqf2
615      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
616	{
617	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
618	  return;
619	}
620#endif
621#ifdef HAVE_truncsfqf2
622      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
623	{
624	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
625	  return;
626	}
627#endif
628#ifdef HAVE_truncdfqf2
629      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
630	{
631	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
632	  return;
633	}
634#endif
635#ifdef HAVE_truncxfqf2
636      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
637	{
638	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
639	  return;
640	}
641#endif
642#ifdef HAVE_trunctfqf2
643      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
644	{
645	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
646	  return;
647	}
648#endif
649
650#ifdef HAVE_trunctqfhf2
651      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
652	{
653	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
654	  return;
655	}
656#endif
657#ifdef HAVE_truncsfhf2
658      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
659	{
660	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
661	  return;
662	}
663#endif
664#ifdef HAVE_truncdfhf2
665      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
666	{
667	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
668	  return;
669	}
670#endif
671#ifdef HAVE_truncxfhf2
672      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
673	{
674	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
675	  return;
676	}
677#endif
678#ifdef HAVE_trunctfhf2
679      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
680	{
681	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
682	  return;
683	}
684#endif
685
686#ifdef HAVE_truncsftqf2
687      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
688	{
689	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
690	  return;
691	}
692#endif
693#ifdef HAVE_truncdftqf2
694      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
695	{
696	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
697	  return;
698	}
699#endif
700#ifdef HAVE_truncxftqf2
701      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
702	{
703	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
704	  return;
705	}
706#endif
707#ifdef HAVE_trunctftqf2
708      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
709	{
710	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
711	  return;
712	}
713#endif
714
715#ifdef HAVE_truncdfsf2
716      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
717	{
718	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
719	  return;
720	}
721#endif
722#ifdef HAVE_truncxfsf2
723      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
724	{
725	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
726	  return;
727	}
728#endif
729#ifdef HAVE_trunctfsf2
730      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
731	{
732	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
733	  return;
734	}
735#endif
736#ifdef HAVE_truncxfdf2
737      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
738	{
739	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
740	  return;
741	}
742#endif
743#ifdef HAVE_trunctfdf2
744      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
745	{
746	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
747	  return;
748	}
749#endif
750
751      libcall = (rtx) 0;
752      switch (from_mode)
753	{
754	case SFmode:
755	  switch (to_mode)
756	    {
757	    case DFmode:
758	      libcall = extendsfdf2_libfunc;
759	      break;
760
761	    case XFmode:
762	      libcall = extendsfxf2_libfunc;
763	      break;
764
765	    case TFmode:
766	      libcall = extendsftf2_libfunc;
767	      break;
768
769	    default:
770	      break;
771	    }
772	  break;
773
774	case DFmode:
775	  switch (to_mode)
776	    {
777	    case SFmode:
778	      libcall = truncdfsf2_libfunc;
779	      break;
780
781	    case XFmode:
782	      libcall = extenddfxf2_libfunc;
783	      break;
784
785	    case TFmode:
786	      libcall = extenddftf2_libfunc;
787	      break;
788
789	    default:
790	      break;
791	    }
792	  break;
793
794	case XFmode:
795	  switch (to_mode)
796	    {
797	    case SFmode:
798	      libcall = truncxfsf2_libfunc;
799	      break;
800
801	    case DFmode:
802	      libcall = truncxfdf2_libfunc;
803	      break;
804
805	    default:
806	      break;
807	    }
808	  break;
809
810	case TFmode:
811	  switch (to_mode)
812	    {
813	    case SFmode:
814	      libcall = trunctfsf2_libfunc;
815	      break;
816
817	    case DFmode:
818	      libcall = trunctfdf2_libfunc;
819	      break;
820
821	    default:
822	      break;
823	    }
824	  break;
825
826	default:
827	  break;
828	}
829
830      if (libcall == (rtx) 0)
831	/* This conversion is not implemented yet.  */
832	abort ();
833
834      value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
835				       1, from, from_mode);
836      emit_move_insn (to, value);
837      return;
838    }
839
840  /* Now both modes are integers.  */
841
842  /* Handle expanding beyond a word.  */
843  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
844      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
845    {
846      rtx insns;
847      rtx lowpart;
848      rtx fill_value;
849      rtx lowfrom;
850      int i;
851      enum machine_mode lowpart_mode;
852      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
853
854      /* Try converting directly if the insn is supported.  */
855      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
856	  != CODE_FOR_nothing)
857	{
858	  /* If FROM is a SUBREG, put it into a register.  Do this
859	     so that we always generate the same set of insns for
860	     better cse'ing; if an intermediate assignment occurred,
861	     we won't be doing the operation directly on the SUBREG.  */
862	  if (optimize > 0 && GET_CODE (from) == SUBREG)
863	    from = force_reg (from_mode, from);
864	  emit_unop_insn (code, to, from, equiv_code);
865	  return;
866	}
867      /* Next, try converting via full word.  */
868      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
869	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
870		   != CODE_FOR_nothing))
871	{
872	  if (GET_CODE (to) == REG)
873	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
874	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
875	  emit_unop_insn (code, to,
876			  gen_lowpart (word_mode, to), equiv_code);
877	  return;
878	}
879
880      /* No special multiword conversion insn; do it by hand.  */
881      start_sequence ();
882
883      /* Since we will turn this into a no conflict block, we must ensure
884	 that the source does not overlap the target.  */
885
886      if (reg_overlap_mentioned_p (to, from))
887	from = force_reg (from_mode, from);
888
889      /* Get a copy of FROM widened to a word, if necessary.  */
890      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
891	lowpart_mode = word_mode;
892      else
893	lowpart_mode = from_mode;
894
895      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
896
897      lowpart = gen_lowpart (lowpart_mode, to);
898      emit_move_insn (lowpart, lowfrom);
899
900      /* Compute the value to put in each remaining word.  */
901      if (unsignedp)
902	fill_value = const0_rtx;
903      else
904	{
905#ifdef HAVE_slt
906	  if (HAVE_slt
907	      && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
908	      && STORE_FLAG_VALUE == -1)
909	    {
910	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911			     lowpart_mode, 0, 0);
912	      fill_value = gen_reg_rtx (word_mode);
913	      emit_insn (gen_slt (fill_value));
914	    }
915	  else
916#endif
917	    {
918	      fill_value
919		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
920				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
921				NULL_RTX, 0);
922	      fill_value = convert_to_mode (word_mode, fill_value, 1);
923	    }
924	}
925
926      /* Fill the remaining words.  */
927      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
928	{
929	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
930	  rtx subword = operand_subword (to, index, 1, to_mode);
931
932	  if (subword == 0)
933	    abort ();
934
935	  if (fill_value != subword)
936	    emit_move_insn (subword, fill_value);
937	}
938
939      insns = get_insns ();
940      end_sequence ();
941
942      emit_no_conflict_block (insns, to, from, NULL_RTX,
943			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
944      return;
945    }
946
947  /* Truncating multi-word to a word or less.  */
948  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
949      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
950    {
951      if (!((GET_CODE (from) == MEM
952	     && ! MEM_VOLATILE_P (from)
953	     && direct_load[(int) to_mode]
954	     && ! mode_dependent_address_p (XEXP (from, 0)))
955	    || GET_CODE (from) == REG
956	    || GET_CODE (from) == SUBREG))
957	from = force_reg (from_mode, from);
958      convert_move (to, gen_lowpart (word_mode, from), 0);
959      return;
960    }
961
962  /* Handle pointer conversion */			/* SPEE 900220 */
963  if (to_mode == PQImode)
964    {
965      if (from_mode != QImode)
966	from = convert_to_mode (QImode, from, unsignedp);
967
968#ifdef HAVE_truncqipqi2
969      if (HAVE_truncqipqi2)
970	{
971	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
972	  return;
973	}
974#endif /* HAVE_truncqipqi2 */
975      abort ();
976    }
977
978  if (from_mode == PQImode)
979    {
980      if (to_mode != QImode)
981	{
982	  from = convert_to_mode (QImode, from, unsignedp);
983	  from_mode = QImode;
984	}
985      else
986	{
987#ifdef HAVE_extendpqiqi2
988	  if (HAVE_extendpqiqi2)
989	    {
990	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
991	      return;
992	    }
993#endif /* HAVE_extendpqiqi2 */
994	  abort ();
995	}
996    }
997
998  if (to_mode == PSImode)
999    {
1000      if (from_mode != SImode)
1001	from = convert_to_mode (SImode, from, unsignedp);
1002
1003#ifdef HAVE_truncsipsi2
1004      if (HAVE_truncsipsi2)
1005	{
1006	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1007	  return;
1008	}
1009#endif /* HAVE_truncsipsi2 */
1010      abort ();
1011    }
1012
1013  if (from_mode == PSImode)
1014    {
1015      if (to_mode != SImode)
1016	{
1017	  from = convert_to_mode (SImode, from, unsignedp);
1018	  from_mode = SImode;
1019	}
1020      else
1021	{
1022#ifdef HAVE_extendpsisi2
1023	  if (HAVE_extendpsisi2)
1024	    {
1025	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1026	      return;
1027	    }
1028#endif /* HAVE_extendpsisi2 */
1029	  abort ();
1030	}
1031    }
1032
1033  if (to_mode == PDImode)
1034    {
1035      if (from_mode != DImode)
1036	from = convert_to_mode (DImode, from, unsignedp);
1037
1038#ifdef HAVE_truncdipdi2
1039      if (HAVE_truncdipdi2)
1040	{
1041	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1042	  return;
1043	}
1044#endif /* HAVE_truncdipdi2 */
1045      abort ();
1046    }
1047
1048  if (from_mode == PDImode)
1049    {
1050      if (to_mode != DImode)
1051	{
1052	  from = convert_to_mode (DImode, from, unsignedp);
1053	  from_mode = DImode;
1054	}
1055      else
1056	{
1057#ifdef HAVE_extendpdidi2
1058	  if (HAVE_extendpdidi2)
1059	    {
1060	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1061	      return;
1062	    }
1063#endif /* HAVE_extendpdidi2 */
1064	  abort ();
1065	}
1066    }
1067
1068  /* Now follow all the conversions between integers
1069     no more than a word long.  */
1070
1071  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1072  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1073      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074				GET_MODE_BITSIZE (from_mode)))
1075    {
1076      if (!((GET_CODE (from) == MEM
1077	     && ! MEM_VOLATILE_P (from)
1078	     && direct_load[(int) to_mode]
1079	     && ! mode_dependent_address_p (XEXP (from, 0)))
1080	    || GET_CODE (from) == REG
1081	    || GET_CODE (from) == SUBREG))
1082	from = force_reg (from_mode, from);
1083      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1084	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1085	from = copy_to_reg (from);
1086      emit_move_insn (to, gen_lowpart (to_mode, from));
1087      return;
1088    }
1089
1090  /* Handle extension.  */
1091  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1092    {
1093      /* Convert directly if that works.  */
1094      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1095	  != CODE_FOR_nothing)
1096	{
1097	  emit_unop_insn (code, to, from, equiv_code);
1098	  return;
1099	}
1100      else
1101	{
1102	  enum machine_mode intermediate;
1103
1104	  /* Search for a mode to convert via.  */
1105	  for (intermediate = from_mode; intermediate != VOIDmode;
1106	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1107	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1108		  != CODE_FOR_nothing)
1109		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1110		     && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1111		&& (can_extend_p (intermediate, from_mode, unsignedp)
1112		    != CODE_FOR_nothing))
1113	      {
1114		convert_move (to, convert_to_mode (intermediate, from,
1115						   unsignedp), unsignedp);
1116		return;
1117	      }
1118
1119	  /* No suitable intermediate mode.  */
1120	  abort ();
1121	}
1122    }
1123
1124  /* Support special truncate insns for certain modes.  */
1125
1126  if (from_mode == DImode && to_mode == SImode)
1127    {
1128#ifdef HAVE_truncdisi2
1129      if (HAVE_truncdisi2)
1130	{
1131	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1132	  return;
1133	}
1134#endif
1135      convert_move (to, force_reg (from_mode, from), unsignedp);
1136      return;
1137    }
1138
1139  if (from_mode == DImode && to_mode == HImode)
1140    {
1141#ifdef HAVE_truncdihi2
1142      if (HAVE_truncdihi2)
1143	{
1144	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1145	  return;
1146	}
1147#endif
1148      convert_move (to, force_reg (from_mode, from), unsignedp);
1149      return;
1150    }
1151
1152  if (from_mode == DImode && to_mode == QImode)
1153    {
1154#ifdef HAVE_truncdiqi2
1155      if (HAVE_truncdiqi2)
1156	{
1157	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1158	  return;
1159	}
1160#endif
1161      convert_move (to, force_reg (from_mode, from), unsignedp);
1162      return;
1163    }
1164
1165  if (from_mode == SImode && to_mode == HImode)
1166    {
1167#ifdef HAVE_truncsihi2
1168      if (HAVE_truncsihi2)
1169	{
1170	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1171	  return;
1172	}
1173#endif
1174      convert_move (to, force_reg (from_mode, from), unsignedp);
1175      return;
1176    }
1177
1178  if (from_mode == SImode && to_mode == QImode)
1179    {
1180#ifdef HAVE_truncsiqi2
1181      if (HAVE_truncsiqi2)
1182	{
1183	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1184	  return;
1185	}
1186#endif
1187      convert_move (to, force_reg (from_mode, from), unsignedp);
1188      return;
1189    }
1190
1191  if (from_mode == HImode && to_mode == QImode)
1192    {
1193#ifdef HAVE_trunchiqi2
1194      if (HAVE_trunchiqi2)
1195	{
1196	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1197	  return;
1198	}
1199#endif
1200      convert_move (to, force_reg (from_mode, from), unsignedp);
1201      return;
1202    }
1203
1204  if (from_mode == TImode && to_mode == DImode)
1205    {
1206#ifdef HAVE_trunctidi2
1207      if (HAVE_trunctidi2)
1208	{
1209	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1210	  return;
1211	}
1212#endif
1213      convert_move (to, force_reg (from_mode, from), unsignedp);
1214      return;
1215    }
1216
1217  if (from_mode == TImode && to_mode == SImode)
1218    {
1219#ifdef HAVE_trunctisi2
1220      if (HAVE_trunctisi2)
1221	{
1222	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1223	  return;
1224	}
1225#endif
1226      convert_move (to, force_reg (from_mode, from), unsignedp);
1227      return;
1228    }
1229
1230  if (from_mode == TImode && to_mode == HImode)
1231    {
1232#ifdef HAVE_trunctihi2
1233      if (HAVE_trunctihi2)
1234	{
1235	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1236	  return;
1237	}
1238#endif
1239      convert_move (to, force_reg (from_mode, from), unsignedp);
1240      return;
1241    }
1242
1243  if (from_mode == TImode && to_mode == QImode)
1244    {
1245#ifdef HAVE_trunctiqi2
1246      if (HAVE_trunctiqi2)
1247	{
1248	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1249	  return;
1250	}
1251#endif
1252      convert_move (to, force_reg (from_mode, from), unsignedp);
1253      return;
1254    }
1255
1256  /* Handle truncation of volatile memrefs, and so on;
1257     the things that couldn't be truncated directly,
1258     and for which there was no special instruction.  */
1259  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1260    {
1261      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1262      emit_move_insn (to, temp);
1263      return;
1264    }
1265
1266  /* Mode combination is not recognized.  */
1267  abort ();
1268}
1269
1270/* Return an rtx for a value that would result
1271   from converting X to mode MODE.
1272   Both X and MODE may be floating, or both integer.
1273   UNSIGNEDP is nonzero if X is an unsigned value.
1274   This can be done by referring to a part of X in place
1275   or by copying to a new temporary with conversion.
1276
1277   This function *must not* call protect_from_queue
1278   except when putting X into an insn (in which case convert_move does it).  */
1279
1280rtx
1281convert_to_mode (mode, x, unsignedp)
1282     enum machine_mode mode;
1283     rtx x;
1284     int unsignedp;
1285{
1286  return convert_modes (mode, VOIDmode, x, unsignedp);
1287}
1288
1289/* Return an rtx for a value that would result
1290   from converting X from mode OLDMODE to mode MODE.
1291   Both modes may be floating, or both integer.
1292   UNSIGNEDP is nonzero if X is an unsigned value.
1293
1294   This can be done by referring to a part of X in place
1295   or by copying to a new temporary with conversion.
1296
1297   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1298
1299   This function *must not* call protect_from_queue
1300   except when putting X into an insn (in which case convert_move does it).  */
1301
1302rtx
1303convert_modes (mode, oldmode, x, unsignedp)
1304     enum machine_mode mode, oldmode;
1305     rtx x;
1306     int unsignedp;
1307{
1308  register rtx temp;
1309
1310  /* If FROM is a SUBREG that indicates that we have already done at least
1311     the required extension, strip it.  */
1312
1313  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1314      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1315      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1316    x = gen_lowpart (mode, x);
1317
1318  if (GET_MODE (x) != VOIDmode)
1319    oldmode = GET_MODE (x);
1320
1321  if (mode == oldmode)
1322    return x;
1323
1324  /* There is one case that we must handle specially: If we are converting
1325     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1326     we are to interpret the constant as unsigned, gen_lowpart will do
1327     the wrong if the constant appears negative.  What we want to do is
1328     make the high-order word of the constant zero, not all ones.  */
1329
1330  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1331      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1332      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1333    {
1334      HOST_WIDE_INT val = INTVAL (x);
1335
1336      if (oldmode != VOIDmode
1337	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1338	{
1339	  int width = GET_MODE_BITSIZE (oldmode);
1340
1341	  /* We need to zero extend VAL.  */
1342	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1343	}
1344
1345      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1346    }
1347
1348  /* We can do this with a gen_lowpart if both desired and current modes
1349     are integer, and this is either a constant integer, a register, or a
1350     non-volatile MEM.  Except for the constant case where MODE is no
1351     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1352
1353  if ((GET_CODE (x) == CONST_INT
1354       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1355      || (GET_MODE_CLASS (mode) == MODE_INT
1356	  && GET_MODE_CLASS (oldmode) == MODE_INT
1357	  && (GET_CODE (x) == CONST_DOUBLE
1358	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1359		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1360		       && direct_load[(int) mode])
1361		      || (GET_CODE (x) == REG
1362			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1363						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1364    {
1365      /* ?? If we don't know OLDMODE, we have to assume here that
1366	 X does not need sign- or zero-extension.   This may not be
1367	 the case, but it's the best we can do.  */
1368      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1369	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1370	{
1371	  HOST_WIDE_INT val = INTVAL (x);
1372	  int width = GET_MODE_BITSIZE (oldmode);
1373
1374	  /* We must sign or zero-extend in this case.  Start by
1375	     zero-extending, then sign extend if we need to.  */
1376	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1377	  if (! unsignedp
1378	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1379	    val |= (HOST_WIDE_INT) (-1) << width;
1380
1381	  return GEN_INT (val);
1382	}
1383
1384      return gen_lowpart (mode, x);
1385    }
1386
1387  temp = gen_reg_rtx (mode);
1388  convert_move (temp, x, unsignedp);
1389  return temp;
1390}
1391
1392/* Generate several move instructions to copy LEN bytes
1393   from block FROM to block TO.  (These are MEM rtx's with BLKmode).
1394   The caller must pass FROM and TO
1395    through protect_from_queue before calling.
1396   ALIGN (in bytes) is maximum alignment we can assume.  */
1397
1398static void
1399move_by_pieces (to, from, len, align)
1400     rtx to, from;
1401     int len, align;
1402{
1403  struct move_by_pieces data;
1404  rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1405  int max_size = MOVE_MAX + 1;
1406
1407  data.offset = 0;
1408  data.to_addr = to_addr;
1409  data.from_addr = from_addr;
1410  data.to = to;
1411  data.from = from;
1412  data.autinc_to
1413    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1414       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1415  data.autinc_from
1416    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1417       || GET_CODE (from_addr) == POST_INC
1418       || GET_CODE (from_addr) == POST_DEC);
1419
1420  data.explicit_inc_from = 0;
1421  data.explicit_inc_to = 0;
1422  data.reverse
1423    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424  if (data.reverse) data.offset = len;
1425  data.len = len;
1426
1427  data.to_struct = MEM_IN_STRUCT_P (to);
1428  data.from_struct = MEM_IN_STRUCT_P (from);
1429
1430  /* If copying requires more than two move insns,
1431     copy addresses to registers (to make displacements shorter)
1432     and use post-increment if available.  */
1433  if (!(data.autinc_from && data.autinc_to)
1434      && move_by_pieces_ninsns (len, align) > 2)
1435    {
1436#ifdef HAVE_PRE_DECREMENT
1437      if (data.reverse && ! data.autinc_from)
1438	{
1439	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1440	  data.autinc_from = 1;
1441	  data.explicit_inc_from = -1;
1442	}
1443#endif
1444#ifdef HAVE_POST_INCREMENT
1445      if (! data.autinc_from)
1446	{
1447	  data.from_addr = copy_addr_to_reg (from_addr);
1448	  data.autinc_from = 1;
1449	  data.explicit_inc_from = 1;
1450	}
1451#endif
1452      if (!data.autinc_from && CONSTANT_P (from_addr))
1453	data.from_addr = copy_addr_to_reg (from_addr);
1454#ifdef HAVE_PRE_DECREMENT
1455      if (data.reverse && ! data.autinc_to)
1456	{
1457	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1458	  data.autinc_to = 1;
1459	  data.explicit_inc_to = -1;
1460	}
1461#endif
1462#ifdef HAVE_POST_INCREMENT
1463      if (! data.reverse && ! data.autinc_to)
1464	{
1465	  data.to_addr = copy_addr_to_reg (to_addr);
1466	  data.autinc_to = 1;
1467	  data.explicit_inc_to = 1;
1468	}
1469#endif
1470      if (!data.autinc_to && CONSTANT_P (to_addr))
1471	data.to_addr = copy_addr_to_reg (to_addr);
1472    }
1473
1474  if (! SLOW_UNALIGNED_ACCESS
1475      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1476    align = MOVE_MAX;
1477
1478  /* First move what we can in the largest integer mode, then go to
1479     successively smaller modes.  */
1480
1481  while (max_size > 1)
1482    {
1483      enum machine_mode mode = VOIDmode, tmode;
1484      enum insn_code icode;
1485
1486      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1487	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1488	if (GET_MODE_SIZE (tmode) < max_size)
1489	  mode = tmode;
1490
1491      if (mode == VOIDmode)
1492	break;
1493
1494      icode = mov_optab->handlers[(int) mode].insn_code;
1495      if (icode != CODE_FOR_nothing
1496	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1497			   GET_MODE_SIZE (mode)))
1498	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1499
1500      max_size = GET_MODE_SIZE (mode);
1501    }
1502
1503  /* The code above should have handled everything.  */
1504  if (data.len > 0)
1505    abort ();
1506}
1507
1508/* Return number of insns required to move L bytes by pieces.
1509   ALIGN (in bytes) is maximum alignment we can assume.  */
1510
1511static int
1512move_by_pieces_ninsns (l, align)
1513     unsigned int l;
1514     int align;
1515{
1516  register int n_insns = 0;
1517  int max_size = MOVE_MAX + 1;
1518
1519  if (! SLOW_UNALIGNED_ACCESS
1520      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1521    align = MOVE_MAX;
1522
1523  while (max_size > 1)
1524    {
1525      enum machine_mode mode = VOIDmode, tmode;
1526      enum insn_code icode;
1527
1528      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1529	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1530	if (GET_MODE_SIZE (tmode) < max_size)
1531	  mode = tmode;
1532
1533      if (mode == VOIDmode)
1534	break;
1535
1536      icode = mov_optab->handlers[(int) mode].insn_code;
1537      if (icode != CODE_FOR_nothing
1538	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1539			   GET_MODE_SIZE (mode)))
1540	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541
1542      max_size = GET_MODE_SIZE (mode);
1543    }
1544
1545  return n_insns;
1546}
1547
1548/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1549   with move instructions for mode MODE.  GENFUN is the gen_... function
1550   to make a move insn for that mode.  DATA has all the other info.  */
1551
1552static void
1553move_by_pieces_1 (genfun, mode, data)
1554     rtx (*genfun) PROTO ((rtx, ...));
1555     enum machine_mode mode;
1556     struct move_by_pieces *data;
1557{
1558  register int size = GET_MODE_SIZE (mode);
1559  register rtx to1, from1;
1560
1561  while (data->len >= size)
1562    {
1563      if (data->reverse) data->offset -= size;
1564
1565      to1 = (data->autinc_to
1566	     ? gen_rtx_MEM (mode, data->to_addr)
1567	     : copy_rtx (change_address (data->to, mode,
1568					 plus_constant (data->to_addr,
1569							data->offset))));
1570      MEM_IN_STRUCT_P (to1) = data->to_struct;
1571
1572      from1
1573	= (data->autinc_from
1574	   ? gen_rtx_MEM (mode, data->from_addr)
1575	   : copy_rtx (change_address (data->from, mode,
1576				       plus_constant (data->from_addr,
1577						      data->offset))));
1578      MEM_IN_STRUCT_P (from1) = data->from_struct;
1579
1580#ifdef HAVE_PRE_DECREMENT
1581      if (data->explicit_inc_to < 0)
1582	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1583      if (data->explicit_inc_from < 0)
1584	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1585#endif
1586
1587      emit_insn ((*genfun) (to1, from1));
1588#ifdef HAVE_POST_INCREMENT
1589      if (data->explicit_inc_to > 0)
1590	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1591      if (data->explicit_inc_from > 0)
1592	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1593#endif
1594
1595      if (! data->reverse) data->offset += size;
1596
1597      data->len -= size;
1598    }
1599}
1600
1601/* Emit code to move a block Y to a block X.
1602   This may be done with string-move instructions,
1603   with multiple scalar move instructions, or with a library call.
1604
1605   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1606   with mode BLKmode.
1607   SIZE is an rtx that says how long they are.
1608   ALIGN is the maximum alignment we can assume they have,
1609   measured in bytes.
1610
1611   Return the address of the new block, if memcpy is called and returns it,
1612   0 otherwise.  */
1613
1614rtx
1615emit_block_move (x, y, size, align)
1616     rtx x, y;
1617     rtx size;
1618     int align;
1619{
1620  rtx retval = 0;
1621#ifdef TARGET_MEM_FUNCTIONS
1622  static tree fn;
1623  tree call_expr, arg_list;
1624#endif
1625
1626  if (GET_MODE (x) != BLKmode)
1627    abort ();
1628
1629  if (GET_MODE (y) != BLKmode)
1630    abort ();
1631
1632  x = protect_from_queue (x, 1);
1633  y = protect_from_queue (y, 0);
1634  size = protect_from_queue (size, 0);
1635
1636  if (GET_CODE (x) != MEM)
1637    abort ();
1638  if (GET_CODE (y) != MEM)
1639    abort ();
1640  if (size == 0)
1641    abort ();
1642
1643  if (GET_CODE (size) == CONST_INT
1644      && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1645    move_by_pieces (x, y, INTVAL (size), align);
1646  else
1647    {
1648      /* Try the most limited insn first, because there's no point
1649	 including more than one in the machine description unless
1650	 the more limited one has some advantage.  */
1651
1652      rtx opalign = GEN_INT (align);
1653      enum machine_mode mode;
1654
1655      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1656	   mode = GET_MODE_WIDER_MODE (mode))
1657	{
1658	  enum insn_code code = movstr_optab[(int) mode];
1659
1660	  if (code != CODE_FOR_nothing
1661	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1662		 here because if SIZE is less than the mode mask, as it is
1663		 returned by the macro, it will definitely be less than the
1664		 actual mode mask.  */
1665	      && ((GET_CODE (size) == CONST_INT
1666		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1667		       <= (GET_MODE_MASK (mode) >> 1)))
1668		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1669	      && (insn_operand_predicate[(int) code][0] == 0
1670		  || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1671	      && (insn_operand_predicate[(int) code][1] == 0
1672		  || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1673	      && (insn_operand_predicate[(int) code][3] == 0
1674		  || (*insn_operand_predicate[(int) code][3]) (opalign,
1675							       VOIDmode)))
1676	    {
1677	      rtx op2;
1678	      rtx last = get_last_insn ();
1679	      rtx pat;
1680
1681	      op2 = convert_to_mode (mode, size, 1);
1682	      if (insn_operand_predicate[(int) code][2] != 0
1683		  && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1684		op2 = copy_to_mode_reg (mode, op2);
1685
1686	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1687	      if (pat)
1688		{
1689		  emit_insn (pat);
1690		  return 0;
1691		}
1692	      else
1693		delete_insns_since (last);
1694	    }
1695	}
1696
1697#ifdef TARGET_MEM_FUNCTIONS
1698      /* It is incorrect to use the libcall calling conventions to call
1699	 memcpy in this context.
1700
1701	 This could be a user call to memcpy and the user may wish to
1702	 examine the return value from memcpy.
1703
1704	 For targets where libcalls and normal calls have different conventions
1705	 for returning pointers, we could end up generating incorrect code.
1706
1707	 So instead of using a libcall sequence we build up a suitable
1708	 CALL_EXPR and expand the call in the normal fashion.  */
1709      if (fn == NULL_TREE)
1710	{
1711	  tree fntype;
1712
1713	  /* This was copied from except.c, I don't know if all this is
1714	     necessary in this context or not.  */
1715	  fn = get_identifier ("memcpy");
1716	  push_obstacks_nochange ();
1717	  end_temporary_allocation ();
1718	  fntype = build_pointer_type (void_type_node);
1719	  fntype = build_function_type (fntype, NULL_TREE);
1720	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1721	  DECL_EXTERNAL (fn) = 1;
1722	  TREE_PUBLIC (fn) = 1;
1723	  DECL_ARTIFICIAL (fn) = 1;
1724	  make_decl_rtl (fn, NULL_PTR, 1);
1725	  assemble_external (fn);
1726	  pop_obstacks ();
1727	}
1728
1729      /* We need to make an argument list for the function call.
1730
1731	 memcpy has three arguments, the first two are void * addresses and
1732	 the last is a size_t byte count for the copy.  */
1733      arg_list
1734	= build_tree_list (NULL_TREE,
1735			    make_tree (build_pointer_type (void_type_node),
1736				       XEXP (x, 0)));
1737      TREE_CHAIN (arg_list)
1738	= build_tree_list (NULL_TREE,
1739			   make_tree (build_pointer_type (void_type_node),
1740				      XEXP (y, 0)));
1741      TREE_CHAIN (TREE_CHAIN (arg_list))
1742	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1743      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1744
1745      /* Now we have to build up the CALL_EXPR itself.  */
1746      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1747      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1748			 call_expr, arg_list, NULL_TREE);
1749      TREE_SIDE_EFFECTS (call_expr) = 1;
1750
1751      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1752#else
1753      emit_library_call (bcopy_libfunc, 0,
1754			 VOIDmode, 3, XEXP (y, 0), Pmode,
1755			 XEXP (x, 0), Pmode,
1756			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1757					  TREE_UNSIGNED (integer_type_node)),
1758			 TYPE_MODE (integer_type_node));
1759#endif
1760    }
1761
1762  return retval;
1763}
1764
1765/* Copy all or part of a value X into registers starting at REGNO.
1766   The number of registers to be filled is NREGS.  */
1767
1768void
1769move_block_to_reg (regno, x, nregs, mode)
1770     int regno;
1771     rtx x;
1772     int nregs;
1773     enum machine_mode mode;
1774{
1775  int i;
1776#ifdef HAVE_load_multiple
1777  rtx pat;
1778  rtx last;
1779#endif
1780
1781  if (nregs == 0)
1782    return;
1783
1784  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1785    x = validize_mem (force_const_mem (mode, x));
1786
1787  /* See if the machine can do this with a load multiple insn.  */
1788#ifdef HAVE_load_multiple
1789  if (HAVE_load_multiple)
1790    {
1791      last = get_last_insn ();
1792      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1793			       GEN_INT (nregs));
1794      if (pat)
1795	{
1796	  emit_insn (pat);
1797	  return;
1798	}
1799      else
1800	delete_insns_since (last);
1801    }
1802#endif
1803
1804  for (i = 0; i < nregs; i++)
1805    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1806		    operand_subword_force (x, i, mode));
1807}
1808
1809/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1810   The number of registers to be filled is NREGS.  SIZE indicates the number
1811   of bytes in the object X.  */
1812
1813
1814void
1815move_block_from_reg (regno, x, nregs, size)
1816     int regno;
1817     rtx x;
1818     int nregs;
1819     int size;
1820{
1821  int i;
1822#ifdef HAVE_store_multiple
1823  rtx pat;
1824  rtx last;
1825#endif
1826  enum machine_mode mode;
1827
1828  /* If SIZE is that of a mode no bigger than a word, just use that
1829     mode's store operation.  */
1830  if (size <= UNITS_PER_WORD
1831      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1832    {
1833      emit_move_insn (change_address (x, mode, NULL),
1834		      gen_rtx_REG (mode, regno));
1835      return;
1836    }
1837
1838  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1839     to the left before storing to memory.  Note that the previous test
1840     doesn't handle all cases (e.g. SIZE == 3).  */
1841  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1842    {
1843      rtx tem = operand_subword (x, 0, 1, BLKmode);
1844      rtx shift;
1845
1846      if (tem == 0)
1847	abort ();
1848
1849      shift = expand_shift (LSHIFT_EXPR, word_mode,
1850			    gen_rtx_REG (word_mode, regno),
1851			    build_int_2 ((UNITS_PER_WORD - size)
1852					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1853      emit_move_insn (tem, shift);
1854      return;
1855    }
1856
1857  /* See if the machine can do this with a store multiple insn.  */
1858#ifdef HAVE_store_multiple
1859  if (HAVE_store_multiple)
1860    {
1861      last = get_last_insn ();
1862      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1863				GEN_INT (nregs));
1864      if (pat)
1865	{
1866	  emit_insn (pat);
1867	  return;
1868	}
1869      else
1870	delete_insns_since (last);
1871    }
1872#endif
1873
1874  for (i = 0; i < nregs; i++)
1875    {
1876      rtx tem = operand_subword (x, i, 1, BLKmode);
1877
1878      if (tem == 0)
1879	abort ();
1880
1881      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1882    }
1883}
1884
1885/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1886   registers represented by a PARALLEL.  SSIZE represents the total size of
1887   block SRC in bytes, or -1 if not known.  ALIGN is the known alignment of
1888   SRC in bits.  */
1889/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1890   the balance will be in what would be the low-order memory addresses, i.e.
1891   left justified for big endian, right justified for little endian.  This
1892   happens to be true for the targets currently using this support.  If this
1893   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1894   would be needed.  */
1895
1896void
1897emit_group_load (dst, orig_src, ssize, align)
1898     rtx dst, orig_src;
1899     int align, ssize;
1900{
1901  rtx *tmps, src;
1902  int start, i;
1903
1904  if (GET_CODE (dst) != PARALLEL)
1905    abort ();
1906
1907  /* Check for a NULL entry, used to indicate that the parameter goes
1908     both on the stack and in registers.  */
1909  if (XEXP (XVECEXP (dst, 0, 0), 0))
1910    start = 0;
1911  else
1912    start = 1;
1913
1914  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1915
1916  /* If we won't be loading directly from memory, protect the real source
1917     from strange tricks we might play.  */
1918  src = orig_src;
1919  if (GET_CODE (src) != MEM)
1920    {
1921      src = gen_reg_rtx (GET_MODE (orig_src));
1922      emit_move_insn (src, orig_src);
1923    }
1924
1925  /* Process the pieces.  */
1926  for (i = start; i < XVECLEN (dst, 0); i++)
1927    {
1928      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1929      int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1930      int bytelen = GET_MODE_SIZE (mode);
1931      int shift = 0;
1932
1933      /* Handle trailing fragments that run over the size of the struct.  */
1934      if (ssize >= 0 && bytepos + bytelen > ssize)
1935	{
1936	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1937	  bytelen = ssize - bytepos;
1938	  if (bytelen <= 0)
1939	    abort();
1940	}
1941
1942      /* Optimize the access just a bit.  */
1943      if (GET_CODE (src) == MEM
1944	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1945	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1946	  && bytelen == GET_MODE_SIZE (mode))
1947	{
1948	  tmps[i] = gen_reg_rtx (mode);
1949	  emit_move_insn (tmps[i],
1950			  change_address (src, mode,
1951					  plus_constant (XEXP (src, 0),
1952							 bytepos)));
1953	}
1954      else
1955	{
1956	  tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1957				       bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1958				       mode, mode, align, ssize);
1959	}
1960
1961      if (BYTES_BIG_ENDIAN && shift)
1962	{
1963	  expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1964			tmps[i], 0, OPTAB_WIDEN);
1965	}
1966    }
1967  emit_queue();
1968
1969  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1970  for (i = start; i < XVECLEN (dst, 0); i++)
1971    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1972}
1973
1974/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1975   registers represented by a PARALLEL.  SSIZE represents the total size of
1976   block DST, or -1 if not known.  ALIGN is the known alignment of DST.  */
1977
1978void
1979emit_group_store (orig_dst, src, ssize, align)
1980     rtx orig_dst, src;
1981     int ssize, align;
1982{
1983  rtx *tmps, dst;
1984  int start, i;
1985
1986  if (GET_CODE (src) != PARALLEL)
1987    abort ();
1988
1989  /* Check for a NULL entry, used to indicate that the parameter goes
1990     both on the stack and in registers.  */
1991  if (XEXP (XVECEXP (src, 0, 0), 0))
1992    start = 0;
1993  else
1994    start = 1;
1995
1996  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1997
1998  /* Copy the (probable) hard regs into pseudos.  */
1999  for (i = start; i < XVECLEN (src, 0); i++)
2000    {
2001      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2002      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2003      emit_move_insn (tmps[i], reg);
2004    }
2005  emit_queue();
2006
2007  /* If we won't be storing directly into memory, protect the real destination
2008     from strange tricks we might play.  */
2009  dst = orig_dst;
2010  if (GET_CODE (dst) != MEM)
2011    {
2012      dst = gen_reg_rtx (GET_MODE (orig_dst));
2013      /* Make life a bit easier for combine.  */
2014      emit_move_insn (dst, const0_rtx);
2015    }
2016  else if (! MEM_IN_STRUCT_P (dst))
2017    {
2018      /* store_bit_field requires that memory operations have
2019	 mem_in_struct_p set; we might not.  */
2020
2021      dst = copy_rtx (orig_dst);
2022      MEM_IN_STRUCT_P (dst) = 1;
2023    }
2024
2025  /* Process the pieces.  */
2026  for (i = start; i < XVECLEN (src, 0); i++)
2027    {
2028      int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2029      enum machine_mode mode = GET_MODE (tmps[i]);
2030      int bytelen = GET_MODE_SIZE (mode);
2031
2032      /* Handle trailing fragments that run over the size of the struct.  */
2033      if (ssize >= 0 && bytepos + bytelen > ssize)
2034	{
2035	  if (BYTES_BIG_ENDIAN)
2036	    {
2037	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2038	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2039			    tmps[i], 0, OPTAB_WIDEN);
2040	    }
2041	  bytelen = ssize - bytepos;
2042	}
2043
2044      /* Optimize the access just a bit.  */
2045      if (GET_CODE (dst) == MEM
2046	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2047	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2048	  && bytelen == GET_MODE_SIZE (mode))
2049	{
2050	  emit_move_insn (change_address (dst, mode,
2051					  plus_constant (XEXP (dst, 0),
2052							 bytepos)),
2053			  tmps[i]);
2054	}
2055      else
2056	{
2057	  store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2058			   mode, tmps[i], align, ssize);
2059	}
2060    }
2061  emit_queue();
2062
2063  /* Copy from the pseudo into the (probable) hard reg.  */
2064  if (GET_CODE (dst) == REG)
2065    emit_move_insn (orig_dst, dst);
2066}
2067
2068/* Add a USE expression for REG to the (possibly empty) list pointed
2069   to by CALL_FUSAGE.  REG must denote a hard register.  */
2070
2071void
2072use_reg (call_fusage, reg)
2073     rtx *call_fusage, reg;
2074{
2075  if (GET_CODE (reg) != REG
2076      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2077    abort();
2078
2079  *call_fusage
2080    = gen_rtx_EXPR_LIST (VOIDmode,
2081			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2082}
2083
2084/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2085   starting at REGNO.  All of these registers must be hard registers.  */
2086
2087void
2088use_regs (call_fusage, regno, nregs)
2089     rtx *call_fusage;
2090     int regno;
2091     int nregs;
2092{
2093  int i;
2094
2095  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2096    abort ();
2097
2098  for (i = 0; i < nregs; i++)
2099    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2100}
2101
2102/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2103   PARALLEL REGS.  This is for calls that pass values in multiple
2104   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2105
2106void
2107use_group_regs (call_fusage, regs)
2108     rtx *call_fusage;
2109     rtx regs;
2110{
2111  int i;
2112
2113  for (i = 0; i < XVECLEN (regs, 0); i++)
2114    {
2115      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2116
2117      /* A NULL entry means the parameter goes both on the stack and in
2118	 registers.  This can also be a MEM for targets that pass values
2119	 partially on the stack and partially in registers.  */
2120      if (reg != 0 && GET_CODE (reg) == REG)
2121	use_reg (call_fusage, reg);
2122    }
2123}
2124
2125/* Generate several move instructions to clear LEN bytes of block TO.
2126   (A MEM rtx with BLKmode).   The caller must pass TO through
2127   protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2128   we can assume.  */
2129
2130static void
2131clear_by_pieces (to, len, align)
2132     rtx to;
2133     int len, align;
2134{
2135  struct clear_by_pieces data;
2136  rtx to_addr = XEXP (to, 0);
2137  int max_size = MOVE_MAX + 1;
2138
2139  data.offset = 0;
2140  data.to_addr = to_addr;
2141  data.to = to;
2142  data.autinc_to
2143    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2144       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2145
2146  data.explicit_inc_to = 0;
2147  data.reverse
2148    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2149  if (data.reverse) data.offset = len;
2150  data.len = len;
2151
2152  data.to_struct = MEM_IN_STRUCT_P (to);
2153
2154  /* If copying requires more than two move insns,
2155     copy addresses to registers (to make displacements shorter)
2156     and use post-increment if available.  */
2157  if (!data.autinc_to
2158      && move_by_pieces_ninsns (len, align) > 2)
2159    {
2160#ifdef HAVE_PRE_DECREMENT
2161      if (data.reverse && ! data.autinc_to)
2162	{
2163	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2164	  data.autinc_to = 1;
2165	  data.explicit_inc_to = -1;
2166	}
2167#endif
2168#ifdef HAVE_POST_INCREMENT
2169      if (! data.reverse && ! data.autinc_to)
2170	{
2171	  data.to_addr = copy_addr_to_reg (to_addr);
2172	  data.autinc_to = 1;
2173	  data.explicit_inc_to = 1;
2174	}
2175#endif
2176      if (!data.autinc_to && CONSTANT_P (to_addr))
2177	data.to_addr = copy_addr_to_reg (to_addr);
2178    }
2179
2180  if (! SLOW_UNALIGNED_ACCESS
2181      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2182    align = MOVE_MAX;
2183
2184  /* First move what we can in the largest integer mode, then go to
2185     successively smaller modes.  */
2186
2187  while (max_size > 1)
2188    {
2189      enum machine_mode mode = VOIDmode, tmode;
2190      enum insn_code icode;
2191
2192      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2193	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2194	if (GET_MODE_SIZE (tmode) < max_size)
2195	  mode = tmode;
2196
2197      if (mode == VOIDmode)
2198	break;
2199
2200      icode = mov_optab->handlers[(int) mode].insn_code;
2201      if (icode != CODE_FOR_nothing
2202	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2203			   GET_MODE_SIZE (mode)))
2204	clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2205
2206      max_size = GET_MODE_SIZE (mode);
2207    }
2208
2209  /* The code above should have handled everything.  */
2210  if (data.len != 0)
2211    abort ();
2212}
2213
2214/* Subroutine of clear_by_pieces.  Clear as many bytes as appropriate
2215   with move instructions for mode MODE.  GENFUN is the gen_... function
2216   to make a move insn for that mode.  DATA has all the other info.  */
2217
2218static void
2219clear_by_pieces_1 (genfun, mode, data)
2220     rtx (*genfun) PROTO ((rtx, ...));
2221     enum machine_mode mode;
2222     struct clear_by_pieces *data;
2223{
2224  register int size = GET_MODE_SIZE (mode);
2225  register rtx to1;
2226
2227  while (data->len >= size)
2228    {
2229      if (data->reverse) data->offset -= size;
2230
2231      to1 = (data->autinc_to
2232	     ? gen_rtx_MEM (mode, data->to_addr)
2233	     : copy_rtx (change_address (data->to, mode,
2234					 plus_constant (data->to_addr,
2235							data->offset))));
2236      MEM_IN_STRUCT_P (to1) = data->to_struct;
2237
2238#ifdef HAVE_PRE_DECREMENT
2239      if (data->explicit_inc_to < 0)
2240	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2241#endif
2242
2243      emit_insn ((*genfun) (to1, const0_rtx));
2244#ifdef HAVE_POST_INCREMENT
2245      if (data->explicit_inc_to > 0)
2246	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2247#endif
2248
2249      if (! data->reverse) data->offset += size;
2250
2251      data->len -= size;
2252    }
2253}
2254
2255/* Write zeros through the storage of OBJECT.
2256   If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2257   the maximum alignment we can is has, measured in bytes.
2258
2259   If we call a function that returns the length of the block, return it.  */
2260
2261rtx
2262clear_storage (object, size, align)
2263     rtx object;
2264     rtx size;
2265     int align;
2266{
2267#ifdef TARGET_MEM_FUNCTIONS
2268  static tree fn;
2269  tree call_expr, arg_list;
2270#endif
2271  rtx retval = 0;
2272
2273  if (GET_MODE (object) == BLKmode)
2274    {
2275      object = protect_from_queue (object, 1);
2276      size = protect_from_queue (size, 0);
2277
2278      if (GET_CODE (size) == CONST_INT
2279	  && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2280	clear_by_pieces (object, INTVAL (size), align);
2281
2282      else
2283	{
2284	  /* Try the most limited insn first, because there's no point
2285	     including more than one in the machine description unless
2286	     the more limited one has some advantage.  */
2287
2288	  rtx opalign = GEN_INT (align);
2289	  enum machine_mode mode;
2290
2291	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2292	       mode = GET_MODE_WIDER_MODE (mode))
2293	    {
2294	      enum insn_code code = clrstr_optab[(int) mode];
2295
2296	      if (code != CODE_FOR_nothing
2297		  /* We don't need MODE to be narrower than
2298		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2299		     the mode mask, as it is returned by the macro, it will
2300		     definitely be less than the actual mode mask.  */
2301		  && ((GET_CODE (size) == CONST_INT
2302		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2303			   <= (GET_MODE_MASK (mode) >> 1)))
2304		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2305		  && (insn_operand_predicate[(int) code][0] == 0
2306		      || (*insn_operand_predicate[(int) code][0]) (object,
2307								   BLKmode))
2308		  && (insn_operand_predicate[(int) code][2] == 0
2309		      || (*insn_operand_predicate[(int) code][2]) (opalign,
2310								   VOIDmode)))
2311		{
2312		  rtx op1;
2313		  rtx last = get_last_insn ();
2314		  rtx pat;
2315
2316		  op1 = convert_to_mode (mode, size, 1);
2317		  if (insn_operand_predicate[(int) code][1] != 0
2318		      && ! (*insn_operand_predicate[(int) code][1]) (op1,
2319								     mode))
2320		    op1 = copy_to_mode_reg (mode, op1);
2321
2322		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2323		  if (pat)
2324		    {
2325		      emit_insn (pat);
2326		      return 0;
2327		    }
2328		  else
2329		    delete_insns_since (last);
2330		}
2331	    }
2332
2333
2334#ifdef TARGET_MEM_FUNCTIONS
2335      /* It is incorrect to use the libcall calling conventions to call
2336	 memset in this context.
2337
2338	 This could be a user call to memset and the user may wish to
2339	 examine the return value from memset.
2340
2341	 For targets where libcalls and normal calls have different conventions
2342	 for returning pointers, we could end up generating incorrect code.
2343
2344	 So instead of using a libcall sequence we build up a suitable
2345	 CALL_EXPR and expand the call in the normal fashion.  */
2346      if (fn == NULL_TREE)
2347	{
2348	  tree fntype;
2349
2350	  /* This was copied from except.c, I don't know if all this is
2351	     necessary in this context or not.  */
2352	  fn = get_identifier ("memset");
2353	  push_obstacks_nochange ();
2354	  end_temporary_allocation ();
2355	  fntype = build_pointer_type (void_type_node);
2356	  fntype = build_function_type (fntype, NULL_TREE);
2357	  fn = build_decl (FUNCTION_DECL, fn, fntype);
2358	  DECL_EXTERNAL (fn) = 1;
2359	  TREE_PUBLIC (fn) = 1;
2360	  DECL_ARTIFICIAL (fn) = 1;
2361	  make_decl_rtl (fn, NULL_PTR, 1);
2362	  assemble_external (fn);
2363	  pop_obstacks ();
2364	}
2365
2366      /* We need to make an argument list for the function call.
2367
2368	 memset has three arguments, the first is a void * addresses, the
2369	 second a integer with the initialization value, the last is a size_t
2370	 byte count for the copy.  */
2371      arg_list
2372	= build_tree_list (NULL_TREE,
2373			    make_tree (build_pointer_type (void_type_node),
2374				       XEXP (object, 0)));
2375      TREE_CHAIN (arg_list)
2376	= build_tree_list (NULL_TREE,
2377			   make_tree (integer_type_node, const0_rtx));
2378      TREE_CHAIN (TREE_CHAIN (arg_list))
2379	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2380      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2381
2382      /* Now we have to build up the CALL_EXPR itself.  */
2383      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2384      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2385			 call_expr, arg_list, NULL_TREE);
2386      TREE_SIDE_EFFECTS (call_expr) = 1;
2387
2388      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2389#else
2390	  emit_library_call (bzero_libfunc, 0,
2391			     VOIDmode, 2,
2392			     XEXP (object, 0), Pmode,
2393			     convert_to_mode
2394			     (TYPE_MODE (integer_type_node), size,
2395			      TREE_UNSIGNED (integer_type_node)),
2396			     TYPE_MODE (integer_type_node));
2397#endif
2398	}
2399    }
2400  else
2401    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2402
2403  return retval;
2404}
2405
2406/* Generate code to copy Y into X.
2407   Both Y and X must have the same mode, except that
2408   Y can be a constant with VOIDmode.
2409   This mode cannot be BLKmode; use emit_block_move for that.
2410
2411   Return the last instruction emitted.  */
2412
2413rtx
2414emit_move_insn (x, y)
2415     rtx x, y;
2416{
2417  enum machine_mode mode = GET_MODE (x);
2418
2419  x = protect_from_queue (x, 1);
2420  y = protect_from_queue (y, 0);
2421
2422  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2423    abort ();
2424
2425  if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2426    y = force_const_mem (mode, y);
2427
2428  /* If X or Y are memory references, verify that their addresses are valid
2429     for the machine.  */
2430  if (GET_CODE (x) == MEM
2431      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2432	   && ! push_operand (x, GET_MODE (x)))
2433	  || (flag_force_addr
2434	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2435    x = change_address (x, VOIDmode, XEXP (x, 0));
2436
2437  if (GET_CODE (y) == MEM
2438      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2439	  || (flag_force_addr
2440	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2441    y = change_address (y, VOIDmode, XEXP (y, 0));
2442
2443  if (mode == BLKmode)
2444    abort ();
2445
2446  return emit_move_insn_1 (x, y);
2447}
2448
2449/* Low level part of emit_move_insn.
2450   Called just like emit_move_insn, but assumes X and Y
2451   are basically valid.  */
2452
2453rtx
2454emit_move_insn_1 (x, y)
2455     rtx x, y;
2456{
2457  enum machine_mode mode = GET_MODE (x);
2458  enum machine_mode submode;
2459  enum mode_class class = GET_MODE_CLASS (mode);
2460  int i;
2461
2462  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2463    return
2464      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2465
2466  /* Expand complex moves by moving real part and imag part, if possible.  */
2467  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2468	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2469						    * BITS_PER_UNIT),
2470						   (class == MODE_COMPLEX_INT
2471						    ? MODE_INT : MODE_FLOAT),
2472						   0))
2473	   && (mov_optab->handlers[(int) submode].insn_code
2474	       != CODE_FOR_nothing))
2475    {
2476      /* Don't split destination if it is a stack push.  */
2477      int stack = push_operand (x, GET_MODE (x));
2478
2479      /* If this is a stack, push the highpart first, so it
2480	 will be in the argument order.
2481
2482	 In that case, change_address is used only to convert
2483	 the mode, not to change the address.  */
2484      if (stack)
2485	{
2486	  /* Note that the real part always precedes the imag part in memory
2487	     regardless of machine's endianness.  */
2488#ifdef STACK_GROWS_DOWNWARD
2489	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2490		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2491		      gen_imagpart (submode, y)));
2492	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2493		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2494		      gen_realpart (submode, y)));
2495#else
2496	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2497		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2498		      gen_realpart (submode, y)));
2499	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2500		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2501		      gen_imagpart (submode, y)));
2502#endif
2503	}
2504      else
2505	{
2506	  /* Show the output dies here.  */
2507	  if (x != y)
2508	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2509
2510	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2511		     (gen_realpart (submode, x), gen_realpart (submode, y)));
2512	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2513		     (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2514	}
2515
2516      return get_last_insn ();
2517    }
2518
2519  /* This will handle any multi-word mode that lacks a move_insn pattern.
2520     However, you will get better code if you define such patterns,
2521     even if they must turn into multiple assembler instructions.  */
2522  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2523    {
2524      rtx last_insn = 0;
2525
2526#ifdef PUSH_ROUNDING
2527
2528      /* If X is a push on the stack, do the push now and replace
2529	 X with a reference to the stack pointer.  */
2530      if (push_operand (x, GET_MODE (x)))
2531	{
2532	  anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2533	  x = change_address (x, VOIDmode, stack_pointer_rtx);
2534	}
2535#endif
2536
2537      /* Show the output dies here.  */
2538      if (x != y)
2539        emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2540
2541      for (i = 0;
2542	   i < (GET_MODE_SIZE (mode)  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2543	   i++)
2544	{
2545	  rtx xpart = operand_subword (x, i, 1, mode);
2546	  rtx ypart = operand_subword (y, i, 1, mode);
2547
2548	  /* If we can't get a part of Y, put Y into memory if it is a
2549	     constant.  Otherwise, force it into a register.  If we still
2550	     can't get a part of Y, abort.  */
2551	  if (ypart == 0 && CONSTANT_P (y))
2552	    {
2553	      y = force_const_mem (mode, y);
2554	      ypart = operand_subword (y, i, 1, mode);
2555	    }
2556	  else if (ypart == 0)
2557	    ypart = operand_subword_force (y, i, mode);
2558
2559	  if (xpart == 0 || ypart == 0)
2560	    abort ();
2561
2562	  last_insn = emit_move_insn (xpart, ypart);
2563	}
2564
2565      return last_insn;
2566    }
2567  else
2568    abort ();
2569}
2570
2571/* Pushing data onto the stack.  */
2572
2573/* Push a block of length SIZE (perhaps variable)
2574   and return an rtx to address the beginning of the block.
2575   Note that it is not possible for the value returned to be a QUEUED.
2576   The value may be virtual_outgoing_args_rtx.
2577
2578   EXTRA is the number of bytes of padding to push in addition to SIZE.
2579   BELOW nonzero means this padding comes at low addresses;
2580   otherwise, the padding comes at high addresses.  */
2581
2582rtx
2583push_block (size, extra, below)
2584     rtx size;
2585     int extra, below;
2586{
2587  register rtx temp;
2588
2589  size = convert_modes (Pmode, ptr_mode, size, 1);
2590  if (CONSTANT_P (size))
2591    anti_adjust_stack (plus_constant (size, extra));
2592  else if (GET_CODE (size) == REG && extra == 0)
2593    anti_adjust_stack (size);
2594  else
2595    {
2596      rtx temp = copy_to_mode_reg (Pmode, size);
2597      if (extra != 0)
2598	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2599			     temp, 0, OPTAB_LIB_WIDEN);
2600      anti_adjust_stack (temp);
2601    }
2602
2603#ifdef STACK_GROWS_DOWNWARD
2604  temp = virtual_outgoing_args_rtx;
2605  if (extra != 0 && below)
2606    temp = plus_constant (temp, extra);
2607#else
2608  if (GET_CODE (size) == CONST_INT)
2609    temp = plus_constant (virtual_outgoing_args_rtx,
2610			  - INTVAL (size) - (below ? 0 : extra));
2611  else if (extra != 0 && !below)
2612    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2613		    negate_rtx (Pmode, plus_constant (size, extra)));
2614  else
2615    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2616		    negate_rtx (Pmode, size));
2617#endif
2618
2619  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2620}
2621
2622rtx
2623gen_push_operand ()
2624{
2625  return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2626}
2627
2628/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2629   block of SIZE bytes.  */
2630
2631static rtx
2632get_push_address (size)
2633	int size;
2634{
2635  register rtx temp;
2636
2637  if (STACK_PUSH_CODE == POST_DEC)
2638    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2639  else if (STACK_PUSH_CODE == POST_INC)
2640    temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2641  else
2642    temp = stack_pointer_rtx;
2643
2644  return copy_to_reg (temp);
2645}
2646
2647/* Generate code to push X onto the stack, assuming it has mode MODE and
2648   type TYPE.
2649   MODE is redundant except when X is a CONST_INT (since they don't
2650   carry mode info).
2651   SIZE is an rtx for the size of data to be copied (in bytes),
2652   needed only if X is BLKmode.
2653
2654   ALIGN (in bytes) is maximum alignment we can assume.
2655
2656   If PARTIAL and REG are both nonzero, then copy that many of the first
2657   words of X into registers starting with REG, and push the rest of X.
2658   The amount of space pushed is decreased by PARTIAL words,
2659   rounded *down* to a multiple of PARM_BOUNDARY.
2660   REG must be a hard register in this case.
2661   If REG is zero but PARTIAL is not, take any all others actions for an
2662   argument partially in registers, but do not actually load any
2663   registers.
2664
2665   EXTRA is the amount in bytes of extra space to leave next to this arg.
2666   This is ignored if an argument block has already been allocated.
2667
2668   On a machine that lacks real push insns, ARGS_ADDR is the address of
2669   the bottom of the argument block for this call.  We use indexing off there
2670   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
2671   argument block has not been preallocated.
2672
2673   ARGS_SO_FAR is the size of args previously pushed for this call.
2674
2675   REG_PARM_STACK_SPACE is nonzero if functions require stack space
2676   for arguments passed in registers.  If nonzero, it will be the number
2677   of bytes required.  */
2678
2679void
2680emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2681		args_addr, args_so_far, reg_parm_stack_space)
2682     register rtx x;
2683     enum machine_mode mode;
2684     tree type;
2685     rtx size;
2686     int align;
2687     int partial;
2688     rtx reg;
2689     int extra;
2690     rtx args_addr;
2691     rtx args_so_far;
2692     int reg_parm_stack_space;
2693{
2694  rtx xinner;
2695  enum direction stack_direction
2696#ifdef STACK_GROWS_DOWNWARD
2697    = downward;
2698#else
2699    = upward;
2700#endif
2701
2702  /* Decide where to pad the argument: `downward' for below,
2703     `upward' for above, or `none' for don't pad it.
2704     Default is below for small data on big-endian machines; else above.  */
2705  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2706
2707  /* Invert direction if stack is post-update.  */
2708  if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2709    if (where_pad != none)
2710      where_pad = (where_pad == downward ? upward : downward);
2711
2712  xinner = x = protect_from_queue (x, 0);
2713
2714  if (mode == BLKmode)
2715    {
2716      /* Copy a block into the stack, entirely or partially.  */
2717
2718      register rtx temp;
2719      int used = partial * UNITS_PER_WORD;
2720      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2721      int skip;
2722
2723      if (size == 0)
2724	abort ();
2725
2726      used -= offset;
2727
2728      /* USED is now the # of bytes we need not copy to the stack
2729	 because registers will take care of them.  */
2730
2731      if (partial != 0)
2732	xinner = change_address (xinner, BLKmode,
2733				 plus_constant (XEXP (xinner, 0), used));
2734
2735      /* If the partial register-part of the arg counts in its stack size,
2736	 skip the part of stack space corresponding to the registers.
2737	 Otherwise, start copying to the beginning of the stack space,
2738	 by setting SKIP to 0.  */
2739      skip = (reg_parm_stack_space == 0) ? 0 : used;
2740
2741#ifdef PUSH_ROUNDING
2742      /* Do it with several push insns if that doesn't take lots of insns
2743	 and if there is no difficulty with push insns that skip bytes
2744	 on the stack for alignment purposes.  */
2745      if (args_addr == 0
2746	  && GET_CODE (size) == CONST_INT
2747	  && skip == 0
2748	  && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2749	      < MOVE_RATIO)
2750	  /* Here we avoid the case of a structure whose weak alignment
2751	     forces many pushes of a small amount of data,
2752	     and such small pushes do rounding that causes trouble.  */
2753	  && ((! SLOW_UNALIGNED_ACCESS)
2754	      || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2755	      || PUSH_ROUNDING (align) == align)
2756	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2757	{
2758	  /* Push padding now if padding above and stack grows down,
2759	     or if padding below and stack grows up.
2760	     But if space already allocated, this has already been done.  */
2761	  if (extra && args_addr == 0
2762	      && where_pad != none && where_pad != stack_direction)
2763	    anti_adjust_stack (GEN_INT (extra));
2764
2765	  move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2766			  INTVAL (size) - used, align);
2767
2768	  if (flag_check_memory_usage && ! in_check_memory_usage)
2769	    {
2770	      rtx temp;
2771
2772	      in_check_memory_usage = 1;
2773	      temp = get_push_address (INTVAL(size) - used);
2774	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2775		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2776				   temp, ptr_mode,
2777				   XEXP (xinner, 0), ptr_mode,
2778				   GEN_INT (INTVAL(size) - used),
2779				   TYPE_MODE (sizetype));
2780	      else
2781		emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2782				   temp, ptr_mode,
2783			 	   GEN_INT (INTVAL(size) - used),
2784				   TYPE_MODE (sizetype),
2785				   GEN_INT (MEMORY_USE_RW),
2786				   TYPE_MODE (integer_type_node));
2787	      in_check_memory_usage = 0;
2788	    }
2789	}
2790      else
2791#endif /* PUSH_ROUNDING */
2792	{
2793	  /* Otherwise make space on the stack and copy the data
2794	     to the address of that space.  */
2795
2796	  /* Deduct words put into registers from the size we must copy.  */
2797	  if (partial != 0)
2798	    {
2799	      if (GET_CODE (size) == CONST_INT)
2800		size = GEN_INT (INTVAL (size) - used);
2801	      else
2802		size = expand_binop (GET_MODE (size), sub_optab, size,
2803				     GEN_INT (used), NULL_RTX, 0,
2804				     OPTAB_LIB_WIDEN);
2805	    }
2806
2807	  /* Get the address of the stack space.
2808	     In this case, we do not deal with EXTRA separately.
2809	     A single stack adjust will do.  */
2810	  if (! args_addr)
2811	    {
2812	      temp = push_block (size, extra, where_pad == downward);
2813	      extra = 0;
2814	    }
2815	  else if (GET_CODE (args_so_far) == CONST_INT)
2816	    temp = memory_address (BLKmode,
2817				   plus_constant (args_addr,
2818						  skip + INTVAL (args_so_far)));
2819	  else
2820	    temp = memory_address (BLKmode,
2821				   plus_constant (gen_rtx_PLUS (Pmode,
2822								args_addr,
2823								args_so_far),
2824						  skip));
2825	  if (flag_check_memory_usage && ! in_check_memory_usage)
2826	    {
2827	      rtx target;
2828
2829	      in_check_memory_usage = 1;
2830	      target = copy_to_reg (temp);
2831	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2832		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2833				   target, ptr_mode,
2834				   XEXP (xinner, 0), ptr_mode,
2835				   size, TYPE_MODE (sizetype));
2836	      else
2837	        emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2838				   target, ptr_mode,
2839			 	   size, TYPE_MODE (sizetype),
2840				   GEN_INT (MEMORY_USE_RW),
2841				   TYPE_MODE (integer_type_node));
2842	      in_check_memory_usage = 0;
2843	    }
2844
2845	  /* TEMP is the address of the block.  Copy the data there.  */
2846	  if (GET_CODE (size) == CONST_INT
2847	      && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2848		  < MOVE_RATIO))
2849	    {
2850	      move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2851			      INTVAL (size), align);
2852	      goto ret;
2853	    }
2854	  else
2855	    {
2856	      rtx opalign = GEN_INT (align);
2857	      enum machine_mode mode;
2858	      rtx target = gen_rtx_MEM (BLKmode, temp);
2859
2860	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2861		   mode != VOIDmode;
2862		   mode = GET_MODE_WIDER_MODE (mode))
2863		{
2864		  enum insn_code code = movstr_optab[(int) mode];
2865
2866		  if (code != CODE_FOR_nothing
2867		      && ((GET_CODE (size) == CONST_INT
2868			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
2869			       <= (GET_MODE_MASK (mode) >> 1)))
2870			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2871		      && (insn_operand_predicate[(int) code][0] == 0
2872			  || ((*insn_operand_predicate[(int) code][0])
2873			      (target, BLKmode)))
2874		      && (insn_operand_predicate[(int) code][1] == 0
2875			  || ((*insn_operand_predicate[(int) code][1])
2876			      (xinner, BLKmode)))
2877		      && (insn_operand_predicate[(int) code][3] == 0
2878			  || ((*insn_operand_predicate[(int) code][3])
2879			      (opalign, VOIDmode))))
2880		    {
2881		      rtx op2 = convert_to_mode (mode, size, 1);
2882		      rtx last = get_last_insn ();
2883		      rtx pat;
2884
2885		      if (insn_operand_predicate[(int) code][2] != 0
2886			  && ! ((*insn_operand_predicate[(int) code][2])
2887				(op2, mode)))
2888			op2 = copy_to_mode_reg (mode, op2);
2889
2890		      pat = GEN_FCN ((int) code) (target, xinner,
2891						  op2, opalign);
2892		      if (pat)
2893			{
2894			  emit_insn (pat);
2895			  goto ret;
2896			}
2897		      else
2898			delete_insns_since (last);
2899		    }
2900		}
2901	    }
2902
2903#ifndef ACCUMULATE_OUTGOING_ARGS
2904	  /* If the source is referenced relative to the stack pointer,
2905	     copy it to another register to stabilize it.  We do not need
2906	     to do this if we know that we won't be changing sp.  */
2907
2908	  if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2909	      || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2910	    temp = copy_to_reg (temp);
2911#endif
2912
2913	  /* Make inhibit_defer_pop nonzero around the library call
2914	     to force it to pop the bcopy-arguments right away.  */
2915	  NO_DEFER_POP;
2916#ifdef TARGET_MEM_FUNCTIONS
2917	  emit_library_call (memcpy_libfunc, 0,
2918			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2919			     convert_to_mode (TYPE_MODE (sizetype),
2920					      size, TREE_UNSIGNED (sizetype)),
2921			     TYPE_MODE (sizetype));
2922#else
2923	  emit_library_call (bcopy_libfunc, 0,
2924			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2925			     convert_to_mode (TYPE_MODE (integer_type_node),
2926					      size,
2927					      TREE_UNSIGNED (integer_type_node)),
2928			     TYPE_MODE (integer_type_node));
2929#endif
2930	  OK_DEFER_POP;
2931	}
2932    }
2933  else if (partial > 0)
2934    {
2935      /* Scalar partly in registers.  */
2936
2937      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2938      int i;
2939      int not_stack;
2940      /* # words of start of argument
2941	 that we must make space for but need not store.  */
2942      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2943      int args_offset = INTVAL (args_so_far);
2944      int skip;
2945
2946      /* Push padding now if padding above and stack grows down,
2947	 or if padding below and stack grows up.
2948	 But if space already allocated, this has already been done.  */
2949      if (extra && args_addr == 0
2950	  && where_pad != none && where_pad != stack_direction)
2951	anti_adjust_stack (GEN_INT (extra));
2952
2953      /* If we make space by pushing it, we might as well push
2954	 the real data.  Otherwise, we can leave OFFSET nonzero
2955	 and leave the space uninitialized.  */
2956      if (args_addr == 0)
2957	offset = 0;
2958
2959      /* Now NOT_STACK gets the number of words that we don't need to
2960	 allocate on the stack.  */
2961      not_stack = partial - offset;
2962
2963      /* If the partial register-part of the arg counts in its stack size,
2964	 skip the part of stack space corresponding to the registers.
2965	 Otherwise, start copying to the beginning of the stack space,
2966	 by setting SKIP to 0.  */
2967      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2968
2969      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2970	x = validize_mem (force_const_mem (mode, x));
2971
2972      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2973	 SUBREGs of such registers are not allowed.  */
2974      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2975	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2976	x = copy_to_reg (x);
2977
2978      /* Loop over all the words allocated on the stack for this arg.  */
2979      /* We can do it by words, because any scalar bigger than a word
2980	 has a size a multiple of a word.  */
2981#ifndef PUSH_ARGS_REVERSED
2982      for (i = not_stack; i < size; i++)
2983#else
2984      for (i = size - 1; i >= not_stack; i--)
2985#endif
2986	if (i >= not_stack + offset)
2987	  emit_push_insn (operand_subword_force (x, i, mode),
2988			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2989			  0, args_addr,
2990			  GEN_INT (args_offset + ((i - not_stack + skip)
2991						  * UNITS_PER_WORD)),
2992			  reg_parm_stack_space);
2993    }
2994  else
2995    {
2996      rtx addr;
2997      rtx target = NULL_RTX;
2998
2999      /* Push padding now if padding above and stack grows down,
3000	 or if padding below and stack grows up.
3001	 But if space already allocated, this has already been done.  */
3002      if (extra && args_addr == 0
3003	  && where_pad != none && where_pad != stack_direction)
3004	anti_adjust_stack (GEN_INT (extra));
3005
3006#ifdef PUSH_ROUNDING
3007      if (args_addr == 0)
3008	addr = gen_push_operand ();
3009      else
3010#endif
3011	{
3012	  if (GET_CODE (args_so_far) == CONST_INT)
3013	    addr
3014	      = memory_address (mode,
3015				plus_constant (args_addr,
3016					       INTVAL (args_so_far)));
3017          else
3018	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3019						       args_so_far));
3020	  target = addr;
3021	}
3022
3023      emit_move_insn (gen_rtx_MEM (mode, addr), x);
3024
3025      if (flag_check_memory_usage && ! in_check_memory_usage)
3026	{
3027	  in_check_memory_usage = 1;
3028	  if (target == 0)
3029	    target = get_push_address (GET_MODE_SIZE (mode));
3030
3031	  if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3032	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3033			       target, ptr_mode,
3034			       XEXP (x, 0), ptr_mode,
3035			       GEN_INT (GET_MODE_SIZE (mode)),
3036			       TYPE_MODE (sizetype));
3037	  else
3038	    emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3039			       target, ptr_mode,
3040			       GEN_INT (GET_MODE_SIZE (mode)),
3041			       TYPE_MODE (sizetype),
3042			       GEN_INT (MEMORY_USE_RW),
3043			       TYPE_MODE (integer_type_node));
3044	  in_check_memory_usage = 0;
3045	}
3046    }
3047
3048 ret:
3049  /* If part should go in registers, copy that part
3050     into the appropriate registers.  Do this now, at the end,
3051     since mem-to-mem copies above may do function calls.  */
3052  if (partial > 0 && reg != 0)
3053    {
3054      /* Handle calls that pass values in multiple non-contiguous locations.
3055	 The Irix 6 ABI has examples of this.  */
3056      if (GET_CODE (reg) == PARALLEL)
3057	emit_group_load (reg, x, -1, align);  /* ??? size? */
3058      else
3059	move_block_to_reg (REGNO (reg), x, partial, mode);
3060    }
3061
3062  if (extra && args_addr == 0 && where_pad == stack_direction)
3063    anti_adjust_stack (GEN_INT (extra));
3064}
3065
3066/* Expand an assignment that stores the value of FROM into TO.
3067   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3068   (This may contain a QUEUED rtx;
3069   if the value is constant, this rtx is a constant.)
3070   Otherwise, the returned value is NULL_RTX.
3071
3072   SUGGEST_REG is no longer actually used.
3073   It used to mean, copy the value through a register
3074   and return that register, if that is possible.
3075   We now use WANT_VALUE to decide whether to do this.  */
3076
3077rtx
3078expand_assignment (to, from, want_value, suggest_reg)
3079     tree to, from;
3080     int want_value;
3081     int suggest_reg;
3082{
3083  register rtx to_rtx = 0;
3084  rtx result;
3085
3086  /* Don't crash if the lhs of the assignment was erroneous.  */
3087
3088  if (TREE_CODE (to) == ERROR_MARK)
3089    {
3090      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3091      return want_value ? result : NULL_RTX;
3092    }
3093
3094  /* Assignment of a structure component needs special treatment
3095     if the structure component's rtx is not simply a MEM.
3096     Assignment of an array element at a constant index, and assignment of
3097     an array element in an unaligned packed structure field, has the same
3098     problem.  */
3099
3100  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3101      || TREE_CODE (to) == ARRAY_REF)
3102    {
3103      enum machine_mode mode1;
3104      int bitsize;
3105      int bitpos;
3106      tree offset;
3107      int unsignedp;
3108      int volatilep = 0;
3109      tree tem;
3110      int alignment;
3111
3112      push_temp_slots ();
3113      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3114				 &unsignedp, &volatilep, &alignment);
3115
3116      /* If we are going to use store_bit_field and extract_bit_field,
3117	 make sure to_rtx will be safe for multiple use.  */
3118
3119      if (mode1 == VOIDmode && want_value)
3120	tem = stabilize_reference (tem);
3121
3122      to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3123      if (offset != 0)
3124	{
3125	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3126
3127	  if (GET_CODE (to_rtx) != MEM)
3128	    abort ();
3129
3130	  if (GET_MODE (offset_rtx) != ptr_mode)
3131	    {
3132#ifdef POINTERS_EXTEND_UNSIGNED
3133	      offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3134#else
3135	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3136#endif
3137	    }
3138
3139	  if (GET_CODE (to_rtx) == MEM
3140	      && GET_MODE (to_rtx) == BLKmode
3141	      && bitsize
3142	      && (bitpos % bitsize) == 0
3143	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3144	      && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3145	    {
3146	      rtx temp = change_address (to_rtx, mode1,
3147				         plus_constant (XEXP (to_rtx, 0),
3148						        (bitpos /
3149						         BITS_PER_UNIT)));
3150	      if (GET_CODE (XEXP (temp, 0)) == REG)
3151	        to_rtx = temp;
3152	      else
3153		to_rtx = change_address (to_rtx, mode1,
3154				         force_reg (GET_MODE (XEXP (temp, 0)),
3155						    XEXP (temp, 0)));
3156	      bitpos = 0;
3157	    }
3158
3159	  to_rtx = change_address (to_rtx, VOIDmode,
3160				   gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3161						 force_reg (ptr_mode, offset_rtx)));
3162	}
3163      if (volatilep)
3164	{
3165	  if (GET_CODE (to_rtx) == MEM)
3166	    {
3167	      /* When the offset is zero, to_rtx is the address of the
3168		 structure we are storing into, and hence may be shared.
3169		 We must make a new MEM before setting the volatile bit.  */
3170	      if (offset == 0)
3171		to_rtx = copy_rtx (to_rtx);
3172
3173	      MEM_VOLATILE_P (to_rtx) = 1;
3174	    }
3175#if 0  /* This was turned off because, when a field is volatile
3176	  in an object which is not volatile, the object may be in a register,
3177	  and then we would abort over here.  */
3178	  else
3179	    abort ();
3180#endif
3181	}
3182
3183      if (TREE_CODE (to) == COMPONENT_REF
3184	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3185	{
3186	  if (offset == 0)
3187	    to_rtx = copy_rtx (to_rtx);
3188
3189	  RTX_UNCHANGING_P (to_rtx) = 1;
3190	}
3191
3192      /* Check the access.  */
3193      if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
3194	{
3195	  rtx to_addr;
3196	  int size;
3197	  int best_mode_size;
3198	  enum machine_mode best_mode;
3199
3200	  best_mode = get_best_mode (bitsize, bitpos,
3201	  			     TYPE_ALIGN (TREE_TYPE (tem)),
3202	  			     mode1, volatilep);
3203	  if (best_mode == VOIDmode)
3204	    best_mode = QImode;
3205
3206	  best_mode_size = GET_MODE_BITSIZE (best_mode);
3207	  to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3208	  size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3209	  size *= GET_MODE_SIZE (best_mode);
3210
3211	  /* Check the access right of the pointer.  */
3212	  if (size)
3213	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3214			       to_addr, ptr_mode,
3215			       GEN_INT (size), TYPE_MODE (sizetype),
3216			       GEN_INT (MEMORY_USE_WO),
3217			       TYPE_MODE (integer_type_node));
3218	}
3219
3220      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3221			    (want_value
3222			     /* Spurious cast makes HPUX compiler happy.  */
3223			     ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3224			     : VOIDmode),
3225			    unsignedp,
3226			    /* Required alignment of containing datum.  */
3227			    alignment,
3228			    int_size_in_bytes (TREE_TYPE (tem)));
3229      preserve_temp_slots (result);
3230      free_temp_slots ();
3231      pop_temp_slots ();
3232
3233      /* If the value is meaningful, convert RESULT to the proper mode.
3234	 Otherwise, return nothing.  */
3235      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3236					  TYPE_MODE (TREE_TYPE (from)),
3237					  result,
3238					  TREE_UNSIGNED (TREE_TYPE (to)))
3239	      : NULL_RTX);
3240    }
3241
3242  /* If the rhs is a function call and its value is not an aggregate,
3243     call the function before we start to compute the lhs.
3244     This is needed for correct code for cases such as
3245     val = setjmp (buf) on machines where reference to val
3246     requires loading up part of an address in a separate insn.
3247
3248     Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3249     a promoted variable where the zero- or sign- extension needs to be done.
3250     Handling this in the normal way is safe because no computation is done
3251     before the call.  */
3252  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3253      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3254      && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3255    {
3256      rtx value;
3257
3258      push_temp_slots ();
3259      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3260      if (to_rtx == 0)
3261	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3262
3263      /* Handle calls that return values in multiple non-contiguous locations.
3264	 The Irix 6 ABI has examples of this.  */
3265      if (GET_CODE (to_rtx) == PARALLEL)
3266	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3267			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3268      else if (GET_MODE (to_rtx) == BLKmode)
3269	emit_block_move (to_rtx, value, expr_size (from),
3270			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3271      else
3272	emit_move_insn (to_rtx, value);
3273      preserve_temp_slots (to_rtx);
3274      free_temp_slots ();
3275      pop_temp_slots ();
3276      return want_value ? to_rtx : NULL_RTX;
3277    }
3278
3279  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3280     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3281
3282  if (to_rtx == 0)
3283    {
3284      to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3285      if (GET_CODE (to_rtx) == MEM)
3286	MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3287    }
3288
3289  /* Don't move directly into a return register.  */
3290  if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3291    {
3292      rtx temp;
3293
3294      push_temp_slots ();
3295      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3296      emit_move_insn (to_rtx, temp);
3297      preserve_temp_slots (to_rtx);
3298      free_temp_slots ();
3299      pop_temp_slots ();
3300      return want_value ? to_rtx : NULL_RTX;
3301    }
3302
3303  /* In case we are returning the contents of an object which overlaps
3304     the place the value is being stored, use a safe function when copying
3305     a value through a pointer into a structure value return block.  */
3306  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3307      && current_function_returns_struct
3308      && !current_function_returns_pcc_struct)
3309    {
3310      rtx from_rtx, size;
3311
3312      push_temp_slots ();
3313      size = expr_size (from);
3314      from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3315			      EXPAND_MEMORY_USE_DONT);
3316
3317      /* Copy the rights of the bitmap.  */
3318      if (flag_check_memory_usage)
3319	emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3320			   XEXP (to_rtx, 0), ptr_mode,
3321			   XEXP (from_rtx, 0), ptr_mode,
3322			   convert_to_mode (TYPE_MODE (sizetype),
3323					    size, TREE_UNSIGNED (sizetype)),
3324			   TYPE_MODE (sizetype));
3325
3326#ifdef TARGET_MEM_FUNCTIONS
3327      emit_library_call (memcpy_libfunc, 0,
3328			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3329			 XEXP (from_rtx, 0), Pmode,
3330			 convert_to_mode (TYPE_MODE (sizetype),
3331					  size, TREE_UNSIGNED (sizetype)),
3332			 TYPE_MODE (sizetype));
3333#else
3334      emit_library_call (bcopy_libfunc, 0,
3335			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3336			 XEXP (to_rtx, 0), Pmode,
3337			 convert_to_mode (TYPE_MODE (integer_type_node),
3338					  size, TREE_UNSIGNED (integer_type_node)),
3339			 TYPE_MODE (integer_type_node));
3340#endif
3341
3342      preserve_temp_slots (to_rtx);
3343      free_temp_slots ();
3344      pop_temp_slots ();
3345      return want_value ? to_rtx : NULL_RTX;
3346    }
3347
3348  /* Compute FROM and store the value in the rtx we got.  */
3349
3350  push_temp_slots ();
3351  result = store_expr (from, to_rtx, want_value);
3352  preserve_temp_slots (result);
3353  free_temp_slots ();
3354  pop_temp_slots ();
3355  return want_value ? result : NULL_RTX;
3356}
3357
3358/* Generate code for computing expression EXP,
3359   and storing the value into TARGET.
3360   TARGET may contain a QUEUED rtx.
3361
3362   If WANT_VALUE is nonzero, return a copy of the value
3363   not in TARGET, so that we can be sure to use the proper
3364   value in a containing expression even if TARGET has something
3365   else stored in it.  If possible, we copy the value through a pseudo
3366   and return that pseudo.  Or, if the value is constant, we try to
3367   return the constant.  In some cases, we return a pseudo
3368   copied *from* TARGET.
3369
3370   If the mode is BLKmode then we may return TARGET itself.
3371   It turns out that in BLKmode it doesn't cause a problem.
3372   because C has no operators that could combine two different
3373   assignments into the same BLKmode object with different values
3374   with no sequence point.  Will other languages need this to
3375   be more thorough?
3376
3377   If WANT_VALUE is 0, we return NULL, to make sure
3378   to catch quickly any cases where the caller uses the value
3379   and fails to set WANT_VALUE.  */
3380
3381rtx
3382store_expr (exp, target, want_value)
3383     register tree exp;
3384     register rtx target;
3385     int want_value;
3386{
3387  register rtx temp;
3388  int dont_return_target = 0;
3389
3390  if (TREE_CODE (exp) == COMPOUND_EXPR)
3391    {
3392      /* Perform first part of compound expression, then assign from second
3393	 part.  */
3394      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3395      emit_queue ();
3396      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3397    }
3398  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3399    {
3400      /* For conditional expression, get safe form of the target.  Then
3401	 test the condition, doing the appropriate assignment on either
3402	 side.  This avoids the creation of unnecessary temporaries.
3403	 For non-BLKmode, it is more efficient not to do this.  */
3404
3405      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3406
3407      emit_queue ();
3408      target = protect_from_queue (target, 1);
3409
3410      do_pending_stack_adjust ();
3411      NO_DEFER_POP;
3412      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3413      start_cleanup_deferral ();
3414      store_expr (TREE_OPERAND (exp, 1), target, 0);
3415      end_cleanup_deferral ();
3416      emit_queue ();
3417      emit_jump_insn (gen_jump (lab2));
3418      emit_barrier ();
3419      emit_label (lab1);
3420      start_cleanup_deferral ();
3421      store_expr (TREE_OPERAND (exp, 2), target, 0);
3422      end_cleanup_deferral ();
3423      emit_queue ();
3424      emit_label (lab2);
3425      OK_DEFER_POP;
3426
3427      return want_value ? target : NULL_RTX;
3428    }
3429  else if (queued_subexp_p (target))
3430    /* If target contains a postincrement, let's not risk
3431       using it as the place to generate the rhs.  */
3432    {
3433      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3434	{
3435	  /* Expand EXP into a new pseudo.  */
3436	  temp = gen_reg_rtx (GET_MODE (target));
3437	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3438	}
3439      else
3440	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3441
3442      /* If target is volatile, ANSI requires accessing the value
3443	 *from* the target, if it is accessed.  So make that happen.
3444	 In no case return the target itself.  */
3445      if (! MEM_VOLATILE_P (target) && want_value)
3446	dont_return_target = 1;
3447    }
3448  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3449	   && GET_MODE (target) != BLKmode)
3450    /* If target is in memory and caller wants value in a register instead,
3451       arrange that.  Pass TARGET as target for expand_expr so that,
3452       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3453       We know expand_expr will not use the target in that case.
3454       Don't do this if TARGET is volatile because we are supposed
3455       to write it and then read it.  */
3456    {
3457      temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3458			  GET_MODE (target), 0);
3459      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3460	temp = copy_to_reg (temp);
3461      dont_return_target = 1;
3462    }
3463  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3464    /* If this is an scalar in a register that is stored in a wider mode
3465       than the declared mode, compute the result into its declared mode
3466       and then convert to the wider mode.  Our value is the computed
3467       expression.  */
3468    {
3469      /* If we don't want a value, we can do the conversion inside EXP,
3470	 which will often result in some optimizations.  Do the conversion
3471	 in two steps: first change the signedness, if needed, then
3472	 the extend.  But don't do this if the type of EXP is a subtype
3473	 of something else since then the conversion might involve
3474	 more than just converting modes.  */
3475      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3476	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
3477	{
3478	  if (TREE_UNSIGNED (TREE_TYPE (exp))
3479	      != SUBREG_PROMOTED_UNSIGNED_P (target))
3480	    exp
3481	      = convert
3482		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3483					  TREE_TYPE (exp)),
3484		 exp);
3485
3486	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3487					SUBREG_PROMOTED_UNSIGNED_P (target)),
3488			 exp);
3489	}
3490
3491      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3492
3493      /* If TEMP is a volatile MEM and we want a result value, make
3494	 the access now so it gets done only once.  Likewise if
3495	 it contains TARGET.  */
3496      if (GET_CODE (temp) == MEM && want_value
3497	  && (MEM_VOLATILE_P (temp)
3498	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3499	temp = copy_to_reg (temp);
3500
3501      /* If TEMP is a VOIDmode constant, use convert_modes to make
3502	 sure that we properly convert it.  */
3503      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3504	temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3505			      TYPE_MODE (TREE_TYPE (exp)), temp,
3506			      SUBREG_PROMOTED_UNSIGNED_P (target));
3507
3508      convert_move (SUBREG_REG (target), temp,
3509		    SUBREG_PROMOTED_UNSIGNED_P (target));
3510      return want_value ? temp : NULL_RTX;
3511    }
3512  else
3513    {
3514      temp = expand_expr (exp, target, GET_MODE (target), 0);
3515      /* Return TARGET if it's a specified hardware register.
3516	 If TARGET is a volatile mem ref, either return TARGET
3517	 or return a reg copied *from* TARGET; ANSI requires this.
3518
3519	 Otherwise, if TEMP is not TARGET, return TEMP
3520	 if it is constant (for efficiency),
3521	 or if we really want the correct value.  */
3522      if (!(target && GET_CODE (target) == REG
3523	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
3524	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3525	  && ! rtx_equal_p (temp, target)
3526	  && (CONSTANT_P (temp) || want_value))
3527	dont_return_target = 1;
3528    }
3529
3530  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3531     the same as that of TARGET, adjust the constant.  This is needed, for
3532     example, in case it is a CONST_DOUBLE and we want only a word-sized
3533     value.  */
3534  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3535      && TREE_CODE (exp) != ERROR_MARK
3536      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3537    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3538			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3539
3540  if (flag_check_memory_usage
3541      && GET_CODE (target) == MEM
3542      && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3543    {
3544      if (GET_CODE (temp) == MEM)
3545        emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3546			   XEXP (target, 0), ptr_mode,
3547			   XEXP (temp, 0), ptr_mode,
3548			   expr_size (exp), TYPE_MODE (sizetype));
3549      else
3550        emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3551			   XEXP (target, 0), ptr_mode,
3552			   expr_size (exp), TYPE_MODE (sizetype),
3553			   GEN_INT (MEMORY_USE_WO),
3554			   TYPE_MODE (integer_type_node));
3555    }
3556
3557  /* If value was not generated in the target, store it there.
3558     Convert the value to TARGET's type first if nec.  */
3559  /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3560     one or both of them are volatile memory refs, we have to distinguish
3561     two cases:
3562     - expand_expr has used TARGET.  In this case, we must not generate
3563       another copy.  This can be detected by TARGET being equal according
3564       to == .
3565     - expand_expr has not used TARGET - that means that the source just
3566       happens to have the same RTX form.  Since temp will have been created
3567       by expand_expr, it will compare unequal according to == .
3568       We must generate a copy in this case, to reach the correct number
3569       of volatile memory references.  */
3570
3571  if ((! rtx_equal_p (temp, target)
3572       || (temp != target && (side_effects_p (temp)
3573			      || side_effects_p (target))))
3574      && TREE_CODE (exp) != ERROR_MARK)
3575    {
3576      target = protect_from_queue (target, 1);
3577      if (GET_MODE (temp) != GET_MODE (target)
3578	  && GET_MODE (temp) != VOIDmode)
3579	{
3580	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3581	  if (dont_return_target)
3582	    {
3583	      /* In this case, we will return TEMP,
3584		 so make sure it has the proper mode.
3585		 But don't forget to store the value into TARGET.  */
3586	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3587	      emit_move_insn (target, temp);
3588	    }
3589	  else
3590	    convert_move (target, temp, unsignedp);
3591	}
3592
3593      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3594	{
3595	  /* Handle copying a string constant into an array.
3596	     The string constant may be shorter than the array.
3597	     So copy just the string's actual length, and clear the rest.  */
3598	  rtx size;
3599	  rtx addr;
3600
3601	  /* Get the size of the data type of the string,
3602	     which is actually the size of the target.  */
3603	  size = expr_size (exp);
3604	  if (GET_CODE (size) == CONST_INT
3605	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
3606	    emit_block_move (target, temp, size,
3607			     TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3608	  else
3609	    {
3610	      /* Compute the size of the data to copy from the string.  */
3611	      tree copy_size
3612		= size_binop (MIN_EXPR,
3613			      make_tree (sizetype, size),
3614			      convert (sizetype,
3615				       build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3616	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3617					       VOIDmode, 0);
3618	      rtx label = 0;
3619
3620	      /* Copy that much.  */
3621	      emit_block_move (target, temp, copy_size_rtx,
3622			       TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3623
3624	      /* Figure out how much is left in TARGET that we have to clear.
3625		 Do all calculations in ptr_mode.  */
3626
3627	      addr = XEXP (target, 0);
3628	      addr = convert_modes (ptr_mode, Pmode, addr, 1);
3629
3630	      if (GET_CODE (copy_size_rtx) == CONST_INT)
3631		{
3632		  addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3633		  size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3634		}
3635	      else
3636		{
3637		  addr = force_reg (ptr_mode, addr);
3638		  addr = expand_binop (ptr_mode, add_optab, addr,
3639				       copy_size_rtx, NULL_RTX, 0,
3640				       OPTAB_LIB_WIDEN);
3641
3642		  size = expand_binop (ptr_mode, sub_optab, size,
3643				       copy_size_rtx, NULL_RTX, 0,
3644				       OPTAB_LIB_WIDEN);
3645
3646		  emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3647				 GET_MODE (size), 0, 0);
3648		  label = gen_label_rtx ();
3649		  emit_jump_insn (gen_blt (label));
3650		}
3651
3652	      if (size != const0_rtx)
3653		{
3654		  /* Be sure we can write on ADDR.  */
3655		  if (flag_check_memory_usage)
3656		    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3657				       addr, ptr_mode,
3658				       size, TYPE_MODE (sizetype),
3659 				       GEN_INT (MEMORY_USE_WO),
3660				       TYPE_MODE (integer_type_node));
3661#ifdef TARGET_MEM_FUNCTIONS
3662		  emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3663				     addr, ptr_mode,
3664				     const0_rtx, TYPE_MODE (integer_type_node),
3665				     convert_to_mode (TYPE_MODE (sizetype),
3666						      size,
3667						      TREE_UNSIGNED (sizetype)),
3668				     TYPE_MODE (sizetype));
3669#else
3670		  emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3671				     addr, ptr_mode,
3672				     convert_to_mode (TYPE_MODE (integer_type_node),
3673						      size,
3674						      TREE_UNSIGNED (integer_type_node)),
3675				     TYPE_MODE (integer_type_node));
3676#endif
3677		}
3678
3679	      if (label)
3680		emit_label (label);
3681	    }
3682	}
3683      /* Handle calls that return values in multiple non-contiguous locations.
3684	 The Irix 6 ABI has examples of this.  */
3685      else if (GET_CODE (target) == PARALLEL)
3686	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3687			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3688      else if (GET_MODE (temp) == BLKmode)
3689	emit_block_move (target, temp, expr_size (exp),
3690			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3691      else
3692	emit_move_insn (target, temp);
3693    }
3694
3695  /* If we don't want a value, return NULL_RTX.  */
3696  if (! want_value)
3697    return NULL_RTX;
3698
3699  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3700     ??? The latter test doesn't seem to make sense.  */
3701  else if (dont_return_target && GET_CODE (temp) != MEM)
3702    return temp;
3703
3704  /* Return TARGET itself if it is a hard register.  */
3705  else if (want_value && GET_MODE (target) != BLKmode
3706	   && ! (GET_CODE (target) == REG
3707		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3708    return copy_to_reg (target);
3709
3710  else
3711    return target;
3712}
3713
3714/* Return 1 if EXP just contains zeros.  */
3715
3716static int
3717is_zeros_p (exp)
3718     tree exp;
3719{
3720  tree elt;
3721
3722  switch (TREE_CODE (exp))
3723    {
3724    case CONVERT_EXPR:
3725    case NOP_EXPR:
3726    case NON_LVALUE_EXPR:
3727      return is_zeros_p (TREE_OPERAND (exp, 0));
3728
3729    case INTEGER_CST:
3730      return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3731
3732    case COMPLEX_CST:
3733      return
3734	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3735
3736    case REAL_CST:
3737      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3738
3739    case CONSTRUCTOR:
3740      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3741	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3742      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3743	if (! is_zeros_p (TREE_VALUE (elt)))
3744	  return 0;
3745
3746      return 1;
3747
3748    default:
3749      return 0;
3750    }
3751}
3752
3753/* Return 1 if EXP contains mostly (3/4)  zeros.  */
3754
3755static int
3756mostly_zeros_p (exp)
3757     tree exp;
3758{
3759  if (TREE_CODE (exp) == CONSTRUCTOR)
3760    {
3761      int elts = 0, zeros = 0;
3762      tree elt = CONSTRUCTOR_ELTS (exp);
3763      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3764	{
3765	  /* If there are no ranges of true bits, it is all zero.  */
3766	  return elt == NULL_TREE;
3767	}
3768      for (; elt; elt = TREE_CHAIN (elt))
3769	{
3770	  /* We do not handle the case where the index is a RANGE_EXPR,
3771	     so the statistic will be somewhat inaccurate.
3772	     We do make a more accurate count in store_constructor itself,
3773	     so since this function is only used for nested array elements,
3774	     this should be close enough.  */
3775	  if (mostly_zeros_p (TREE_VALUE (elt)))
3776	    zeros++;
3777	  elts++;
3778	}
3779
3780      return 4 * zeros >= 3 * elts;
3781    }
3782
3783  return is_zeros_p (exp);
3784}
3785
3786/* Helper function for store_constructor.
3787   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3788   TYPE is the type of the CONSTRUCTOR, not the element type.
3789   CLEARED is as for store_constructor.
3790
3791   This provides a recursive shortcut back to store_constructor when it isn't
3792   necessary to go through store_field.  This is so that we can pass through
3793   the cleared field to let store_constructor know that we may not have to
3794   clear a substructure if the outer structure has already been cleared.  */
3795
3796static void
3797store_constructor_field (target, bitsize, bitpos,
3798			 mode, exp, type, cleared)
3799     rtx target;
3800     int bitsize, bitpos;
3801     enum machine_mode mode;
3802     tree exp, type;
3803     int cleared;
3804{
3805  if (TREE_CODE (exp) == CONSTRUCTOR
3806      && bitpos % BITS_PER_UNIT == 0
3807      /* If we have a non-zero bitpos for a register target, then we just
3808	 let store_field do the bitfield handling.  This is unlikely to
3809	 generate unnecessary clear instructions anyways.  */
3810      && (bitpos == 0 || GET_CODE (target) == MEM))
3811    {
3812      if (bitpos != 0)
3813	target = change_address (target, VOIDmode,
3814				 plus_constant (XEXP (target, 0),
3815						bitpos / BITS_PER_UNIT));
3816      store_constructor (exp, target, cleared);
3817    }
3818  else
3819    store_field (target, bitsize, bitpos, mode, exp,
3820		 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3821		 int_size_in_bytes (type));
3822}
3823
3824/* Store the value of constructor EXP into the rtx TARGET.
3825   TARGET is either a REG or a MEM.
3826   CLEARED is true if TARGET is known to have been zero'd.  */
3827
3828static void
3829store_constructor (exp, target, cleared)
3830     tree exp;
3831     rtx target;
3832     int cleared;
3833{
3834  tree type = TREE_TYPE (exp);
3835
3836  /* We know our target cannot conflict, since safe_from_p has been called.  */
3837#if 0
3838  /* Don't try copying piece by piece into a hard register
3839     since that is vulnerable to being clobbered by EXP.
3840     Instead, construct in a pseudo register and then copy it all.  */
3841  if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3842    {
3843      rtx temp = gen_reg_rtx (GET_MODE (target));
3844      store_constructor (exp, temp, 0);
3845      emit_move_insn (target, temp);
3846      return;
3847    }
3848#endif
3849
3850  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3851      || TREE_CODE (type) == QUAL_UNION_TYPE)
3852    {
3853      register tree elt;
3854
3855      /* Inform later passes that the whole union value is dead.  */
3856      if (TREE_CODE (type) == UNION_TYPE
3857	  || TREE_CODE (type) == QUAL_UNION_TYPE)
3858	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3859
3860      /* If we are building a static constructor into a register,
3861	 set the initial value as zero so we can fold the value into
3862	 a constant.  But if more than one register is involved,
3863	 this probably loses.  */
3864      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3865	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3866	{
3867	  if (! cleared)
3868	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3869
3870	  cleared = 1;
3871	}
3872
3873      /* If the constructor has fewer fields than the structure
3874	 or if we are initializing the structure to mostly zeros,
3875	 clear the whole structure first.  */
3876      else if ((list_length (CONSTRUCTOR_ELTS (exp))
3877		!= list_length (TYPE_FIELDS (type)))
3878	       || mostly_zeros_p (exp))
3879	{
3880	  if (! cleared)
3881	    clear_storage (target, expr_size (exp),
3882			   TYPE_ALIGN (type) / BITS_PER_UNIT);
3883
3884	  cleared = 1;
3885	}
3886      else
3887	/* Inform later passes that the old value is dead.  */
3888	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3889
3890      /* Store each element of the constructor into
3891	 the corresponding field of TARGET.  */
3892
3893      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3894	{
3895	  register tree field = TREE_PURPOSE (elt);
3896	  register enum machine_mode mode;
3897	  int bitsize;
3898	  int bitpos = 0;
3899	  int unsignedp;
3900	  tree pos, constant = 0, offset = 0;
3901	  rtx to_rtx = target;
3902
3903	  /* Just ignore missing fields.
3904	     We cleared the whole structure, above,
3905	     if any fields are missing.  */
3906	  if (field == 0)
3907	    continue;
3908
3909	  if (cleared && is_zeros_p (TREE_VALUE (elt)))
3910	    continue;
3911
3912	  bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3913	  unsignedp = TREE_UNSIGNED (field);
3914	  mode = DECL_MODE (field);
3915	  if (DECL_BIT_FIELD (field))
3916	    mode = VOIDmode;
3917
3918	  pos = DECL_FIELD_BITPOS (field);
3919	  if (TREE_CODE (pos) == INTEGER_CST)
3920	    constant = pos;
3921	  else if (TREE_CODE (pos) == PLUS_EXPR
3922		   && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3923	    constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3924	  else
3925	    offset = pos;
3926
3927	  if (constant)
3928	    bitpos = TREE_INT_CST_LOW (constant);
3929
3930	  if (offset)
3931	    {
3932	      rtx offset_rtx;
3933
3934	      if (contains_placeholder_p (offset))
3935		offset = build (WITH_RECORD_EXPR, sizetype,
3936				offset, make_tree (TREE_TYPE (exp), target));
3937
3938	      offset = size_binop (FLOOR_DIV_EXPR, offset,
3939				   size_int (BITS_PER_UNIT));
3940
3941	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3942	      if (GET_CODE (to_rtx) != MEM)
3943		abort ();
3944
3945              if (GET_MODE (offset_rtx) != ptr_mode)
3946                {
3947#ifdef POINTERS_EXTEND_UNSIGNED
3948                  offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3949#else
3950                  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3951#endif
3952                }
3953
3954	      to_rtx
3955		= change_address (to_rtx, VOIDmode,
3956				  gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3957					   force_reg (ptr_mode, offset_rtx)));
3958	    }
3959	  if (TREE_READONLY (field))
3960	    {
3961	      if (GET_CODE (to_rtx) == MEM)
3962		to_rtx = copy_rtx (to_rtx);
3963
3964	      RTX_UNCHANGING_P (to_rtx) = 1;
3965	    }
3966
3967	  store_constructor_field (to_rtx, bitsize, bitpos,
3968				   mode, TREE_VALUE (elt), type, cleared);
3969	}
3970    }
3971  else if (TREE_CODE (type) == ARRAY_TYPE)
3972    {
3973      register tree elt;
3974      register int i;
3975      int need_to_clear;
3976      tree domain = TYPE_DOMAIN (type);
3977      HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3978      HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3979      tree elttype = TREE_TYPE (type);
3980
3981      /* If the constructor has fewer elements than the array,
3982         clear the whole array first.  Similarly if this is
3983         static constructor of a non-BLKmode object.  */
3984      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3985	need_to_clear = 1;
3986      else
3987	{
3988	  HOST_WIDE_INT count = 0, zero_count = 0;
3989	  need_to_clear = 0;
3990	  /* This loop is a more accurate version of the loop in
3991	     mostly_zeros_p (it handles RANGE_EXPR in an index).
3992	     It is also needed to check for missing elements.  */
3993	  for (elt = CONSTRUCTOR_ELTS (exp);
3994	       elt != NULL_TREE;
3995	       elt = TREE_CHAIN (elt))
3996	    {
3997	      tree index = TREE_PURPOSE (elt);
3998	      HOST_WIDE_INT this_node_count;
3999	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4000		{
4001		  tree lo_index = TREE_OPERAND (index, 0);
4002		  tree hi_index = TREE_OPERAND (index, 1);
4003		  if (TREE_CODE (lo_index) != INTEGER_CST
4004		      || TREE_CODE (hi_index) != INTEGER_CST)
4005		    {
4006		      need_to_clear = 1;
4007		      break;
4008		    }
4009		  this_node_count = TREE_INT_CST_LOW (hi_index)
4010		    - TREE_INT_CST_LOW (lo_index) + 1;
4011		}
4012	      else
4013		this_node_count = 1;
4014	      count += this_node_count;
4015	      if (mostly_zeros_p (TREE_VALUE (elt)))
4016		zero_count += this_node_count;
4017	    }
4018	  /* Clear the entire array first if there are any missing elements,
4019	     or if the incidence of zero elements is >= 75%.  */
4020	  if (count < maxelt - minelt + 1
4021	      || 4 * zero_count >= 3 * count)
4022	    need_to_clear = 1;
4023	}
4024      if (need_to_clear)
4025	{
4026	  if (! cleared)
4027	    clear_storage (target, expr_size (exp),
4028			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4029	  cleared = 1;
4030	}
4031      else
4032	/* Inform later passes that the old value is dead.  */
4033	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4034
4035      /* Store each element of the constructor into
4036	 the corresponding element of TARGET, determined
4037	 by counting the elements.  */
4038      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4039	   elt;
4040	   elt = TREE_CHAIN (elt), i++)
4041	{
4042	  register enum machine_mode mode;
4043	  int bitsize;
4044	  int bitpos;
4045	  int unsignedp;
4046	  tree value = TREE_VALUE (elt);
4047	  tree index = TREE_PURPOSE (elt);
4048	  rtx xtarget = target;
4049
4050	  if (cleared && is_zeros_p (value))
4051	    continue;
4052
4053	  mode = TYPE_MODE (elttype);
4054	  bitsize = GET_MODE_BITSIZE (mode);
4055	  unsignedp = TREE_UNSIGNED (elttype);
4056
4057	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4058	    {
4059	      tree lo_index = TREE_OPERAND (index, 0);
4060	      tree hi_index = TREE_OPERAND (index, 1);
4061	      rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4062	      struct nesting *loop;
4063	      HOST_WIDE_INT lo, hi, count;
4064	      tree position;
4065
4066	      /* If the range is constant and "small", unroll the loop.  */
4067	      if (TREE_CODE (lo_index) == INTEGER_CST
4068		  && TREE_CODE (hi_index) == INTEGER_CST
4069		  && (lo = TREE_INT_CST_LOW (lo_index),
4070		      hi = TREE_INT_CST_LOW (hi_index),
4071		      count = hi - lo + 1,
4072		      (GET_CODE (target) != MEM
4073		       || count <= 2
4074		       || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4075			   && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4076			   <= 40 * 8))))
4077		{
4078		  lo -= minelt;  hi -= minelt;
4079		  for (; lo <= hi; lo++)
4080		    {
4081		      bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4082		      store_constructor_field (target, bitsize, bitpos,
4083					       mode, value, type, cleared);
4084		    }
4085		}
4086	      else
4087		{
4088		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4089		  loop_top = gen_label_rtx ();
4090		  loop_end = gen_label_rtx ();
4091
4092		  unsignedp = TREE_UNSIGNED (domain);
4093
4094		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4095
4096		  DECL_RTL (index) = index_r
4097		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4098						 &unsignedp, 0));
4099
4100		  if (TREE_CODE (value) == SAVE_EXPR
4101		      && SAVE_EXPR_RTL (value) == 0)
4102		    {
4103		      /* Make sure value gets expanded once before the
4104                         loop.  */
4105		      expand_expr (value, const0_rtx, VOIDmode, 0);
4106		      emit_queue ();
4107		    }
4108		  store_expr (lo_index, index_r, 0);
4109		  loop = expand_start_loop (0);
4110
4111		  /* Assign value to element index.  */
4112		  position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4113					 size_int (BITS_PER_UNIT));
4114		  position = size_binop (MULT_EXPR,
4115					 size_binop (MINUS_EXPR, index,
4116						     TYPE_MIN_VALUE (domain)),
4117					 position);
4118		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4119		  addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4120		  xtarget = change_address (target, mode, addr);
4121		  if (TREE_CODE (value) == CONSTRUCTOR)
4122		    store_constructor (value, xtarget, cleared);
4123		  else
4124		    store_expr (value, xtarget, 0);
4125
4126		  expand_exit_loop_if_false (loop,
4127					     build (LT_EXPR, integer_type_node,
4128						    index, hi_index));
4129
4130		  expand_increment (build (PREINCREMENT_EXPR,
4131					   TREE_TYPE (index),
4132					   index, integer_one_node), 0, 0);
4133		  expand_end_loop ();
4134		  emit_label (loop_end);
4135
4136		  /* Needed by stupid register allocation. to extend the
4137		     lifetime of pseudo-regs used by target past the end
4138		     of the loop.  */
4139		  emit_insn (gen_rtx_USE (GET_MODE (target), target));
4140		}
4141	    }
4142	  else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4143	      || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4144	    {
4145	      rtx pos_rtx, addr;
4146	      tree position;
4147
4148	      if (index == 0)
4149		index = size_int (i);
4150
4151	      if (minelt)
4152		index = size_binop (MINUS_EXPR, index,
4153				    TYPE_MIN_VALUE (domain));
4154	      position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4155				     size_int (BITS_PER_UNIT));
4156	      position = size_binop (MULT_EXPR, index, position);
4157	      pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4158	      addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4159	      xtarget = change_address (target, mode, addr);
4160	      store_expr (value, xtarget, 0);
4161	    }
4162	  else
4163	    {
4164	      if (index != 0)
4165		bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4166			  * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4167	      else
4168		bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4169	      store_constructor_field (target, bitsize, bitpos,
4170				       mode, value, type, cleared);
4171	    }
4172	}
4173    }
4174  /* set constructor assignments */
4175  else if (TREE_CODE (type) == SET_TYPE)
4176    {
4177      tree elt = CONSTRUCTOR_ELTS (exp);
4178      int nbytes = int_size_in_bytes (type), nbits;
4179      tree domain = TYPE_DOMAIN (type);
4180      tree domain_min, domain_max, bitlength;
4181
4182      /* The default implementation strategy is to extract the constant
4183	 parts of the constructor, use that to initialize the target,
4184	 and then "or" in whatever non-constant ranges we need in addition.
4185
4186	 If a large set is all zero or all ones, it is
4187	 probably better to set it using memset (if available) or bzero.
4188	 Also, if a large set has just a single range, it may also be
4189	 better to first clear all the first clear the set (using
4190	 bzero/memset), and set the bits we want.  */
4191
4192      /* Check for all zeros.  */
4193      if (elt == NULL_TREE)
4194	{
4195	  if (!cleared)
4196	    clear_storage (target, expr_size (exp),
4197			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4198	  return;
4199	}
4200
4201      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4202      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4203      bitlength = size_binop (PLUS_EXPR,
4204			      size_binop (MINUS_EXPR, domain_max, domain_min),
4205			      size_one_node);
4206
4207      if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4208	abort ();
4209      nbits = TREE_INT_CST_LOW (bitlength);
4210
4211      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4212	 are "complicated" (more than one range), initialize (the
4213	 constant parts) by copying from a constant.  */
4214      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4215	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4216	{
4217	  int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4218	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4219	  char *bit_buffer = (char *) alloca (nbits);
4220	  HOST_WIDE_INT word = 0;
4221	  int bit_pos = 0;
4222	  int ibit = 0;
4223	  int offset = 0;  /* In bytes from beginning of set.  */
4224	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4225	  for (;;)
4226	    {
4227	      if (bit_buffer[ibit])
4228		{
4229		  if (BYTES_BIG_ENDIAN)
4230		    word |= (1 << (set_word_size - 1 - bit_pos));
4231		  else
4232		    word |= 1 << bit_pos;
4233		}
4234	      bit_pos++;  ibit++;
4235	      if (bit_pos >= set_word_size || ibit == nbits)
4236		{
4237		  if (word != 0 || ! cleared)
4238		    {
4239		      rtx datum = GEN_INT (word);
4240		      rtx to_rtx;
4241		      /* The assumption here is that it is safe to use
4242			 XEXP if the set is multi-word, but not if
4243			 it's single-word.  */
4244		      if (GET_CODE (target) == MEM)
4245			{
4246			  to_rtx = plus_constant (XEXP (target, 0), offset);
4247			  to_rtx = change_address (target, mode, to_rtx);
4248			}
4249		      else if (offset == 0)
4250			to_rtx = target;
4251		      else
4252			abort ();
4253		      emit_move_insn (to_rtx, datum);
4254		    }
4255		  if (ibit == nbits)
4256		    break;
4257		  word = 0;
4258		  bit_pos = 0;
4259		  offset += set_word_size / BITS_PER_UNIT;
4260		}
4261	    }
4262	}
4263      else if (!cleared)
4264	{
4265	  /* Don't bother clearing storage if the set is all ones.  */
4266	  if (TREE_CHAIN (elt) != NULL_TREE
4267	      || (TREE_PURPOSE (elt) == NULL_TREE
4268		  ? nbits != 1
4269		  : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4270		     || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4271		     || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4272			 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4273			 != nbits))))
4274	    clear_storage (target, expr_size (exp),
4275			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4276	}
4277
4278      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4279	{
4280	  /* start of range of element or NULL */
4281	  tree startbit = TREE_PURPOSE (elt);
4282	  /* end of range of element, or element value */
4283	  tree endbit   = TREE_VALUE (elt);
4284#ifdef TARGET_MEM_FUNCTIONS
4285	  HOST_WIDE_INT startb, endb;
4286#endif
4287	  rtx  bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4288
4289	  bitlength_rtx = expand_expr (bitlength,
4290			    NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4291
4292	  /* handle non-range tuple element like [ expr ]  */
4293	  if (startbit == NULL_TREE)
4294	    {
4295	      startbit = save_expr (endbit);
4296	      endbit = startbit;
4297	    }
4298	  startbit = convert (sizetype, startbit);
4299	  endbit = convert (sizetype, endbit);
4300	  if (! integer_zerop (domain_min))
4301	    {
4302	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4303	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4304	    }
4305	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4306				      EXPAND_CONST_ADDRESS);
4307	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4308				    EXPAND_CONST_ADDRESS);
4309
4310	  if (REG_P (target))
4311	    {
4312	      targetx = assign_stack_temp (GET_MODE (target),
4313					   GET_MODE_SIZE (GET_MODE (target)),
4314					   0);
4315	      emit_move_insn (targetx, target);
4316	    }
4317	  else if (GET_CODE (target) == MEM)
4318	    targetx = target;
4319	  else
4320	    abort ();
4321
4322#ifdef TARGET_MEM_FUNCTIONS
4323	  /* Optimization:  If startbit and endbit are
4324	     constants divisible by BITS_PER_UNIT,
4325	     call memset instead.  */
4326	  if (TREE_CODE (startbit) == INTEGER_CST
4327	      && TREE_CODE (endbit) == INTEGER_CST
4328	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4329	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4330	    {
4331	      emit_library_call (memset_libfunc, 0,
4332				 VOIDmode, 3,
4333				 plus_constant (XEXP (targetx, 0),
4334						startb / BITS_PER_UNIT),
4335				 Pmode,
4336				 constm1_rtx, TYPE_MODE (integer_type_node),
4337				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4338				 TYPE_MODE (sizetype));
4339	    }
4340	  else
4341#endif
4342	    {
4343	      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4344				 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4345				 bitlength_rtx, TYPE_MODE (sizetype),
4346				 startbit_rtx, TYPE_MODE (sizetype),
4347				 endbit_rtx, TYPE_MODE (sizetype));
4348	    }
4349	  if (REG_P (target))
4350	    emit_move_insn (target, targetx);
4351	}
4352    }
4353
4354  else
4355    abort ();
4356}
4357
4358/* Store the value of EXP (an expression tree)
4359   into a subfield of TARGET which has mode MODE and occupies
4360   BITSIZE bits, starting BITPOS bits from the start of TARGET.
4361   If MODE is VOIDmode, it means that we are storing into a bit-field.
4362
4363   If VALUE_MODE is VOIDmode, return nothing in particular.
4364   UNSIGNEDP is not used in this case.
4365
4366   Otherwise, return an rtx for the value stored.  This rtx
4367   has mode VALUE_MODE if that is convenient to do.
4368   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4369
4370   ALIGN is the alignment that TARGET is known to have, measured in bytes.
4371   TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.  */
4372
4373static rtx
4374store_field (target, bitsize, bitpos, mode, exp, value_mode,
4375	     unsignedp, align, total_size)
4376     rtx target;
4377     int bitsize, bitpos;
4378     enum machine_mode mode;
4379     tree exp;
4380     enum machine_mode value_mode;
4381     int unsignedp;
4382     int align;
4383     int total_size;
4384{
4385  HOST_WIDE_INT width_mask = 0;
4386
4387  if (TREE_CODE (exp) == ERROR_MARK)
4388    return const0_rtx;
4389
4390  if (bitsize < HOST_BITS_PER_WIDE_INT)
4391    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4392
4393  /* If we are storing into an unaligned field of an aligned union that is
4394     in a register, we may have the mode of TARGET being an integer mode but
4395     MODE == BLKmode.  In that case, get an aligned object whose size and
4396     alignment are the same as TARGET and store TARGET into it (we can avoid
4397     the store if the field being stored is the entire width of TARGET).  Then
4398     call ourselves recursively to store the field into a BLKmode version of
4399     that object.  Finally, load from the object into TARGET.  This is not
4400     very efficient in general, but should only be slightly more expensive
4401     than the otherwise-required unaligned accesses.  Perhaps this can be
4402     cleaned up later.  */
4403
4404  if (mode == BLKmode
4405      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4406    {
4407      rtx object = assign_stack_temp (GET_MODE (target),
4408				      GET_MODE_SIZE (GET_MODE (target)), 0);
4409      rtx blk_object = copy_rtx (object);
4410
4411      MEM_IN_STRUCT_P (object) = 1;
4412      MEM_IN_STRUCT_P (blk_object) = 1;
4413      PUT_MODE (blk_object, BLKmode);
4414
4415      if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4416	emit_move_insn (object, target);
4417
4418      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4419		   align, total_size);
4420
4421      /* Even though we aren't returning target, we need to
4422	 give it the updated value.  */
4423      emit_move_insn (target, object);
4424
4425      return blk_object;
4426    }
4427
4428  /* If the structure is in a register or if the component
4429     is a bit field, we cannot use addressing to access it.
4430     Use bit-field techniques or SUBREG to store in it.  */
4431
4432  if (mode == VOIDmode
4433      || (mode != BLKmode && ! direct_store[(int) mode])
4434      || GET_CODE (target) == REG
4435      || GET_CODE (target) == SUBREG
4436      /* If the field isn't aligned enough to store as an ordinary memref,
4437	 store it as a bit field.  */
4438      || (SLOW_UNALIGNED_ACCESS
4439	  && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4440      || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4441    {
4442      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4443
4444      /* If BITSIZE is narrower than the size of the type of EXP
4445	 we will be narrowing TEMP.  Normally, what's wanted are the
4446	 low-order bits.  However, if EXP's type is a record and this is
4447	 big-endian machine, we want the upper BITSIZE bits.  */
4448      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4449	  && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4450	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4451	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4452			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4453				       - bitsize),
4454			     temp, 1);
4455
4456      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4457	 MODE.  */
4458      if (mode != VOIDmode && mode != BLKmode
4459	  && mode != TYPE_MODE (TREE_TYPE (exp)))
4460	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4461
4462      /* If the modes of TARGET and TEMP are both BLKmode, both
4463	 must be in memory and BITPOS must be aligned on a byte
4464	 boundary.  If so, we simply do a block copy.  */
4465      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4466	{
4467	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4468	      || bitpos % BITS_PER_UNIT != 0)
4469	    abort ();
4470
4471	  target = change_address (target, VOIDmode,
4472				   plus_constant (XEXP (target, 0),
4473						bitpos / BITS_PER_UNIT));
4474
4475	  emit_block_move (target, temp,
4476			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4477				    / BITS_PER_UNIT),
4478			   1);
4479
4480	  return value_mode == VOIDmode ? const0_rtx : target;
4481	}
4482
4483      /* Store the value in the bitfield.  */
4484      store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4485      if (value_mode != VOIDmode)
4486	{
4487	  /* The caller wants an rtx for the value.  */
4488	  /* If possible, avoid refetching from the bitfield itself.  */
4489	  if (width_mask != 0
4490	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4491	    {
4492	      tree count;
4493	      enum machine_mode tmode;
4494
4495	      if (unsignedp)
4496		return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4497	      tmode = GET_MODE (temp);
4498	      if (tmode == VOIDmode)
4499		tmode = value_mode;
4500	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4501	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4502	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4503	    }
4504	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
4505				    NULL_RTX, value_mode, 0, align,
4506				    total_size);
4507	}
4508      return const0_rtx;
4509    }
4510  else
4511    {
4512      rtx addr = XEXP (target, 0);
4513      rtx to_rtx;
4514
4515      /* If a value is wanted, it must be the lhs;
4516	 so make the address stable for multiple use.  */
4517
4518      if (value_mode != VOIDmode && GET_CODE (addr) != REG
4519	  && ! CONSTANT_ADDRESS_P (addr)
4520	  /* A frame-pointer reference is already stable.  */
4521	  && ! (GET_CODE (addr) == PLUS
4522		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
4523		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
4524		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4525	addr = copy_to_reg (addr);
4526
4527      /* Now build a reference to just the desired component.  */
4528
4529      to_rtx = copy_rtx (change_address (target, mode,
4530					 plus_constant (addr,
4531							(bitpos
4532							 / BITS_PER_UNIT))));
4533      MEM_IN_STRUCT_P (to_rtx) = 1;
4534
4535      return store_expr (exp, to_rtx, value_mode != VOIDmode);
4536    }
4537}
4538
4539/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4540   or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4541   ARRAY_REFs and find the ultimate containing object, which we return.
4542
4543   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4544   bit position, and *PUNSIGNEDP to the signedness of the field.
4545   If the position of the field is variable, we store a tree
4546   giving the variable offset (in units) in *POFFSET.
4547   This offset is in addition to the bit position.
4548   If the position is not variable, we store 0 in *POFFSET.
4549   We set *PALIGNMENT to the alignment in bytes of the address that will be
4550   computed.  This is the alignment of the thing we return if *POFFSET
4551   is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4552
4553   If any of the extraction expressions is volatile,
4554   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
4555
4556   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
4557   is a mode that can be used to access the field.  In that case, *PBITSIZE
4558   is redundant.
4559
4560   If the field describes a variable-sized object, *PMODE is set to
4561   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
4562   this case, but the address of the object can be found.   */
4563
4564tree
4565get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4566		     punsignedp, pvolatilep, palignment)
4567     tree exp;
4568     int *pbitsize;
4569     int *pbitpos;
4570     tree *poffset;
4571     enum machine_mode *pmode;
4572     int *punsignedp;
4573     int *pvolatilep;
4574     int *palignment;
4575{
4576  tree orig_exp = exp;
4577  tree size_tree = 0;
4578  enum machine_mode mode = VOIDmode;
4579  tree offset = integer_zero_node;
4580  int alignment = BIGGEST_ALIGNMENT;
4581
4582  if (TREE_CODE (exp) == COMPONENT_REF)
4583    {
4584      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4585      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4586	mode = DECL_MODE (TREE_OPERAND (exp, 1));
4587      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4588    }
4589  else if (TREE_CODE (exp) == BIT_FIELD_REF)
4590    {
4591      size_tree = TREE_OPERAND (exp, 1);
4592      *punsignedp = TREE_UNSIGNED (exp);
4593    }
4594  else
4595    {
4596      mode = TYPE_MODE (TREE_TYPE (exp));
4597      *pbitsize = GET_MODE_BITSIZE (mode);
4598      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4599    }
4600
4601  if (size_tree)
4602    {
4603      if (TREE_CODE (size_tree) != INTEGER_CST)
4604	mode = BLKmode, *pbitsize = -1;
4605      else
4606	*pbitsize = TREE_INT_CST_LOW (size_tree);
4607    }
4608
4609  /* Compute cumulative bit-offset for nested component-refs and array-refs,
4610     and find the ultimate containing object.  */
4611
4612  *pbitpos = 0;
4613
4614  while (1)
4615    {
4616      if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4617	{
4618	  tree pos = (TREE_CODE (exp) == COMPONENT_REF
4619		      ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4620		      : TREE_OPERAND (exp, 2));
4621	  tree constant = integer_zero_node, var = pos;
4622
4623	  /* If this field hasn't been filled in yet, don't go
4624	     past it.  This should only happen when folding expressions
4625	     made during type construction.  */
4626	  if (pos == 0)
4627	    break;
4628
4629	  /* Assume here that the offset is a multiple of a unit.
4630	     If not, there should be an explicitly added constant.  */
4631	  if (TREE_CODE (pos) == PLUS_EXPR
4632	      && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4633	    constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4634	  else if (TREE_CODE (pos) == INTEGER_CST)
4635	    constant = pos, var = integer_zero_node;
4636
4637	  *pbitpos += TREE_INT_CST_LOW (constant);
4638	  offset = size_binop (PLUS_EXPR, offset,
4639			       size_binop (EXACT_DIV_EXPR, var,
4640					   size_int (BITS_PER_UNIT)));
4641	}
4642
4643      else if (TREE_CODE (exp) == ARRAY_REF)
4644	{
4645	  /* This code is based on the code in case ARRAY_REF in expand_expr
4646	     below.  We assume here that the size of an array element is
4647	     always an integral multiple of BITS_PER_UNIT.  */
4648
4649	  tree index = TREE_OPERAND (exp, 1);
4650	  tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4651	  tree low_bound
4652	    = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4653	  tree index_type = TREE_TYPE (index);
4654	  tree xindex;
4655
4656	  if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4657	    {
4658	      index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4659			       index);
4660	      index_type = TREE_TYPE (index);
4661	    }
4662
4663	  if (! integer_zerop (low_bound))
4664	    index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4665
4666	  if (TREE_CODE (index) == INTEGER_CST)
4667	    {
4668	      index = convert (sbitsizetype, index);
4669	      index_type = TREE_TYPE (index);
4670	    }
4671
4672	  xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4673			        convert (sbitsizetype,
4674					 TYPE_SIZE (TREE_TYPE (exp)))));
4675
4676	  if (TREE_CODE (xindex) == INTEGER_CST
4677	      && TREE_INT_CST_HIGH (xindex) == 0)
4678	    *pbitpos += TREE_INT_CST_LOW (xindex);
4679	  else
4680	    {
4681	      /* Either the bit offset calculated above is not constant, or
4682		 it overflowed.  In either case, redo the multiplication
4683		 against the size in units.  This is especially important
4684		 in the non-constant case to avoid a division at runtime.  */
4685	      xindex = fold (build (MULT_EXPR, ssizetype, index,
4686                                    convert (ssizetype,
4687                                         TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4688
4689	      if (contains_placeholder_p (xindex))
4690		xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4691
4692	      offset = size_binop (PLUS_EXPR, offset, xindex);
4693	    }
4694	}
4695      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4696	       && ! ((TREE_CODE (exp) == NOP_EXPR
4697		      || TREE_CODE (exp) == CONVERT_EXPR)
4698		     && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4699			   && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4700			       != UNION_TYPE))
4701		     && (TYPE_MODE (TREE_TYPE (exp))
4702			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4703	break;
4704
4705      /* If any reference in the chain is volatile, the effect is volatile.  */
4706      if (TREE_THIS_VOLATILE (exp))
4707	*pvolatilep = 1;
4708
4709      /* If the offset is non-constant already, then we can't assume any
4710	 alignment more than the alignment here.  */
4711      if (! integer_zerop (offset))
4712	alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4713
4714      exp = TREE_OPERAND (exp, 0);
4715    }
4716
4717  if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4718    alignment = MIN (alignment, DECL_ALIGN (exp));
4719  else if (TREE_TYPE (exp) != 0)
4720    alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4721
4722  if (integer_zerop (offset))
4723    offset = 0;
4724
4725  if (offset != 0 && contains_placeholder_p (offset))
4726    offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4727
4728  *pmode = mode;
4729  *poffset = offset;
4730  *palignment = alignment / BITS_PER_UNIT;
4731  return exp;
4732}
4733
4734/* Subroutine of expand_exp: compute memory_usage from modifier.  */
4735static enum memory_use_mode
4736get_memory_usage_from_modifier (modifier)
4737     enum expand_modifier modifier;
4738{
4739  switch (modifier)
4740    {
4741    case EXPAND_NORMAL:
4742    case EXPAND_SUM:
4743      return MEMORY_USE_RO;
4744      break;
4745    case EXPAND_MEMORY_USE_WO:
4746      return MEMORY_USE_WO;
4747      break;
4748    case EXPAND_MEMORY_USE_RW:
4749      return MEMORY_USE_RW;
4750      break;
4751    case EXPAND_MEMORY_USE_DONT:
4752      /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4753	 MEMORY_USE_DONT, because they are modifiers to a call of
4754	 expand_expr in the ADDR_EXPR case of expand_expr.  */
4755    case EXPAND_CONST_ADDRESS:
4756    case EXPAND_INITIALIZER:
4757      return MEMORY_USE_DONT;
4758    case EXPAND_MEMORY_USE_BAD:
4759    default:
4760      abort ();
4761    }
4762}
4763
4764/* Given an rtx VALUE that may contain additions and multiplications,
4765   return an equivalent value that just refers to a register or memory.
4766   This is done by generating instructions to perform the arithmetic
4767   and returning a pseudo-register containing the value.
4768
4769   The returned value may be a REG, SUBREG, MEM or constant.  */
4770
4771rtx
4772force_operand (value, target)
4773     rtx value, target;
4774{
4775  register optab binoptab = 0;
4776  /* Use a temporary to force order of execution of calls to
4777     `force_operand'.  */
4778  rtx tmp;
4779  register rtx op2;
4780  /* Use subtarget as the target for operand 0 of a binary operation.  */
4781  register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4782
4783  /* Check for a PIC address load.  */
4784  if (flag_pic
4785      && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4786      && XEXP (value, 0) == pic_offset_table_rtx
4787      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4788	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
4789	  || GET_CODE (XEXP (value, 1)) == CONST))
4790    {
4791      if (!subtarget)
4792	subtarget = gen_reg_rtx (GET_MODE (value));
4793      emit_move_insn (subtarget, value);
4794      return subtarget;
4795    }
4796
4797  if (GET_CODE (value) == PLUS)
4798    binoptab = add_optab;
4799  else if (GET_CODE (value) == MINUS)
4800    binoptab = sub_optab;
4801  else if (GET_CODE (value) == MULT)
4802    {
4803      op2 = XEXP (value, 1);
4804      if (!CONSTANT_P (op2)
4805	  && !(GET_CODE (op2) == REG && op2 != subtarget))
4806	subtarget = 0;
4807      tmp = force_operand (XEXP (value, 0), subtarget);
4808      return expand_mult (GET_MODE (value), tmp,
4809			  force_operand (op2, NULL_RTX),
4810			  target, 0);
4811    }
4812
4813  if (binoptab)
4814    {
4815      op2 = XEXP (value, 1);
4816      if (!CONSTANT_P (op2)
4817	  && !(GET_CODE (op2) == REG && op2 != subtarget))
4818	subtarget = 0;
4819      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4820	{
4821	  binoptab = add_optab;
4822	  op2 = negate_rtx (GET_MODE (value), op2);
4823	}
4824
4825      /* Check for an addition with OP2 a constant integer and our first
4826	 operand a PLUS of a virtual register and something else.  In that
4827	 case, we want to emit the sum of the virtual register and the
4828	 constant first and then add the other value.  This allows virtual
4829	 register instantiation to simply modify the constant rather than
4830	 creating another one around this addition.  */
4831      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4832	  && GET_CODE (XEXP (value, 0)) == PLUS
4833	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4834	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4835	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4836	{
4837	  rtx temp = expand_binop (GET_MODE (value), binoptab,
4838				   XEXP (XEXP (value, 0), 0), op2,
4839				   subtarget, 0, OPTAB_LIB_WIDEN);
4840	  return expand_binop (GET_MODE (value), binoptab, temp,
4841			       force_operand (XEXP (XEXP (value, 0), 1), 0),
4842			       target, 0, OPTAB_LIB_WIDEN);
4843	}
4844
4845      tmp = force_operand (XEXP (value, 0), subtarget);
4846      return expand_binop (GET_MODE (value), binoptab, tmp,
4847			   force_operand (op2, NULL_RTX),
4848			   target, 0, OPTAB_LIB_WIDEN);
4849      /* We give UNSIGNEDP = 0 to expand_binop
4850	 because the only operations we are expanding here are signed ones.  */
4851    }
4852  return value;
4853}
4854
4855/* Subroutine of expand_expr:
4856   save the non-copied parts (LIST) of an expr (LHS), and return a list
4857   which can restore these values to their previous values,
4858   should something modify their storage.  */
4859
4860static tree
4861save_noncopied_parts (lhs, list)
4862     tree lhs;
4863     tree list;
4864{
4865  tree tail;
4866  tree parts = 0;
4867
4868  for (tail = list; tail; tail = TREE_CHAIN (tail))
4869    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4870      parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4871    else
4872      {
4873	tree part = TREE_VALUE (tail);
4874	tree part_type = TREE_TYPE (part);
4875	tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4876	rtx target = assign_temp (part_type, 0, 1, 1);
4877	if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4878	  target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4879	parts = tree_cons (to_be_saved,
4880			   build (RTL_EXPR, part_type, NULL_TREE,
4881				  (tree) target),
4882			   parts);
4883	store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4884      }
4885  return parts;
4886}
4887
4888/* Subroutine of expand_expr:
4889   record the non-copied parts (LIST) of an expr (LHS), and return a list
4890   which specifies the initial values of these parts.  */
4891
4892static tree
4893init_noncopied_parts (lhs, list)
4894     tree lhs;
4895     tree list;
4896{
4897  tree tail;
4898  tree parts = 0;
4899
4900  for (tail = list; tail; tail = TREE_CHAIN (tail))
4901    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4902      parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4903    else
4904      {
4905	tree part = TREE_VALUE (tail);
4906	tree part_type = TREE_TYPE (part);
4907	tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4908	parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4909      }
4910  return parts;
4911}
4912
4913/* Subroutine of expand_expr: return nonzero iff there is no way that
4914   EXP can reference X, which is being modified.  TOP_P is nonzero if this
4915   call is going to be used to determine whether we need a temporary
4916   for EXP, as opposed to a recursive call to this function.
4917
4918   It is always safe for this routine to return zero since it merely
4919   searches for optimization opportunities.  */
4920
4921static int
4922safe_from_p (x, exp, top_p)
4923     rtx x;
4924     tree exp;
4925     int top_p;
4926{
4927  rtx exp_rtl = 0;
4928  int i, nops;
4929  static int save_expr_count;
4930  static int save_expr_size = 0;
4931  static tree *save_expr_rewritten;
4932  static tree save_expr_trees[256];
4933
4934  if (x == 0
4935      /* If EXP has varying size, we MUST use a target since we currently
4936	 have no way of allocating temporaries of variable size
4937	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4938	 So we assume here that something at a higher level has prevented a
4939	 clash.  This is somewhat bogus, but the best we can do.  Only
4940	 do this when X is BLKmode and when we are at the top level.  */
4941      || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4942	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4943	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4944	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4945	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4946	      != INTEGER_CST)
4947	  && GET_MODE (x) == BLKmode))
4948    return 1;
4949
4950  if (top_p && save_expr_size == 0)
4951    {
4952      int rtn;
4953
4954      save_expr_count = 0;
4955      save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4956      save_expr_rewritten = &save_expr_trees[0];
4957
4958      rtn = safe_from_p (x, exp, 1);
4959
4960      for (i = 0; i < save_expr_count; ++i)
4961	{
4962	  if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
4963	    abort ();
4964	  TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
4965	}
4966
4967      save_expr_size = 0;
4968
4969      return rtn;
4970    }
4971
4972  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4973     find the underlying pseudo.  */
4974  if (GET_CODE (x) == SUBREG)
4975    {
4976      x = SUBREG_REG (x);
4977      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4978	return 0;
4979    }
4980
4981  /* If X is a location in the outgoing argument area, it is always safe.  */
4982  if (GET_CODE (x) == MEM
4983      && (XEXP (x, 0) == virtual_outgoing_args_rtx
4984	  || (GET_CODE (XEXP (x, 0)) == PLUS
4985	      && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4986    return 1;
4987
4988  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4989    {
4990    case 'd':
4991      exp_rtl = DECL_RTL (exp);
4992      break;
4993
4994    case 'c':
4995      return 1;
4996
4997    case 'x':
4998      if (TREE_CODE (exp) == TREE_LIST)
4999	return ((TREE_VALUE (exp) == 0
5000		 || safe_from_p (x, TREE_VALUE (exp), 0))
5001		&& (TREE_CHAIN (exp) == 0
5002		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5003      else if (TREE_CODE (exp) == ERROR_MARK)
5004	return 1;	/* An already-visited SAVE_EXPR? */
5005      else
5006	return 0;
5007
5008    case '1':
5009      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5010
5011    case '2':
5012    case '<':
5013      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5014	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5015
5016    case 'e':
5017    case 'r':
5018      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5019	 the expression.  If it is set, we conflict iff we are that rtx or
5020	 both are in memory.  Otherwise, we check all operands of the
5021	 expression recursively.  */
5022
5023      switch (TREE_CODE (exp))
5024	{
5025	case ADDR_EXPR:
5026	  return (staticp (TREE_OPERAND (exp, 0))
5027		  || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5028		  || TREE_STATIC (exp));
5029
5030	case INDIRECT_REF:
5031	  if (GET_CODE (x) == MEM)
5032	    return 0;
5033	  break;
5034
5035	case CALL_EXPR:
5036	  exp_rtl = CALL_EXPR_RTL (exp);
5037	  if (exp_rtl == 0)
5038	    {
5039	      /* Assume that the call will clobber all hard registers and
5040		 all of memory.  */
5041	      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5042		  || GET_CODE (x) == MEM)
5043		return 0;
5044	    }
5045
5046	  break;
5047
5048	case RTL_EXPR:
5049	  /* If a sequence exists, we would have to scan every instruction
5050	     in the sequence to see if it was safe.  This is probably not
5051	     worthwhile.  */
5052	  if (RTL_EXPR_SEQUENCE (exp))
5053	    return 0;
5054
5055	  exp_rtl = RTL_EXPR_RTL (exp);
5056	  break;
5057
5058	case WITH_CLEANUP_EXPR:
5059	  exp_rtl = RTL_EXPR_RTL (exp);
5060	  break;
5061
5062	case CLEANUP_POINT_EXPR:
5063	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5064
5065	case SAVE_EXPR:
5066	  exp_rtl = SAVE_EXPR_RTL (exp);
5067	  if (exp_rtl)
5068	    break;
5069
5070	  /* This SAVE_EXPR might appear many times in the top-level
5071	     safe_from_p() expression, and if it has a complex
5072	     subexpression, examining it multiple times could result
5073	     in a combinatorial explosion.  E.g. on an Alpha
5074	     running at least 200MHz, a Fortran test case compiled with
5075	     optimization took about 28 minutes to compile -- even though
5076	     it was only a few lines long, and the complicated line causing
5077	     so much time to be spent in the earlier version of safe_from_p()
5078	     had only 293 or so unique nodes.
5079
5080	     So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5081	     where it is so we can turn it back in the top-level safe_from_p()
5082	     when we're done.  */
5083
5084	  /* For now, don't bother re-sizing the array. */
5085	  if (save_expr_count >= save_expr_size)
5086	    return 0;
5087	  save_expr_rewritten[save_expr_count++] = exp;
5088	  TREE_SET_CODE (exp, ERROR_MARK);
5089
5090	  nops = tree_code_length[(int) SAVE_EXPR];
5091	  for (i = 0; i < nops; i++)
5092	    if (TREE_OPERAND (exp, i) != 0
5093		&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5094	      return 0;
5095	  return 1;
5096
5097	case BIND_EXPR:
5098	  /* The only operand we look at is operand 1.  The rest aren't
5099	     part of the expression.  */
5100	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5101
5102	case METHOD_CALL_EXPR:
5103	  /* This takes a rtx argument, but shouldn't appear here.  */
5104	  abort ();
5105
5106	default:
5107	  break;
5108	}
5109
5110      /* If we have an rtx, we do not need to scan our operands.  */
5111      if (exp_rtl)
5112	break;
5113
5114      nops = tree_code_length[(int) TREE_CODE (exp)];
5115      for (i = 0; i < nops; i++)
5116	if (TREE_OPERAND (exp, i) != 0
5117	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5118	  return 0;
5119    }
5120
5121  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5122     with it.  */
5123  if (exp_rtl)
5124    {
5125      if (GET_CODE (exp_rtl) == SUBREG)
5126	{
5127	  exp_rtl = SUBREG_REG (exp_rtl);
5128	  if (GET_CODE (exp_rtl) == REG
5129	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5130	    return 0;
5131	}
5132
5133      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5134	 are memory and EXP is not readonly.  */
5135      return ! (rtx_equal_p (x, exp_rtl)
5136		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5137		    && ! TREE_READONLY (exp)));
5138    }
5139
5140  /* If we reach here, it is safe.  */
5141  return 1;
5142}
5143
5144/* Subroutine of expand_expr: return nonzero iff EXP is an
5145   expression whose type is statically determinable.  */
5146
5147static int
5148fixed_type_p (exp)
5149     tree exp;
5150{
5151  if (TREE_CODE (exp) == PARM_DECL
5152      || TREE_CODE (exp) == VAR_DECL
5153      || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5154      || TREE_CODE (exp) == COMPONENT_REF
5155      || TREE_CODE (exp) == ARRAY_REF)
5156    return 1;
5157  return 0;
5158}
5159
5160/* Subroutine of expand_expr: return rtx if EXP is a
5161   variable or parameter; else return 0.  */
5162
5163static rtx
5164var_rtx (exp)
5165     tree exp;
5166{
5167  STRIP_NOPS (exp);
5168  switch (TREE_CODE (exp))
5169    {
5170    case PARM_DECL:
5171    case VAR_DECL:
5172      return DECL_RTL (exp);
5173    default:
5174      return 0;
5175    }
5176}
5177
5178#ifdef MAX_INTEGER_COMPUTATION_MODE
5179void
5180check_max_integer_computation_mode (exp)
5181    tree exp;
5182{
5183  enum tree_code code = TREE_CODE (exp);
5184  enum machine_mode mode;
5185
5186  /* First check the type of the overall operation.   We need only look at
5187     unary, binary and relational operations.  */
5188  if (TREE_CODE_CLASS (code) == '1'
5189      || TREE_CODE_CLASS (code) == '2'
5190      || TREE_CODE_CLASS (code) == '<')
5191    {
5192      mode = TYPE_MODE (TREE_TYPE (exp));
5193      if (GET_MODE_CLASS (mode) == MODE_INT
5194	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5195	fatal ("unsupported wide integer operation");
5196    }
5197
5198  /* Check operand of a unary op.  */
5199  if (TREE_CODE_CLASS (code) == '1')
5200    {
5201      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5202      if (GET_MODE_CLASS (mode) == MODE_INT
5203	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5204	fatal ("unsupported wide integer operation");
5205    }
5206
5207  /* Check operands of a binary/comparison op.  */
5208  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5209    {
5210      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5211      if (GET_MODE_CLASS (mode) == MODE_INT
5212	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5213	fatal ("unsupported wide integer operation");
5214
5215      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5216      if (GET_MODE_CLASS (mode) == MODE_INT
5217	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5218	fatal ("unsupported wide integer operation");
5219    }
5220}
5221#endif
5222
5223
5224/* expand_expr: generate code for computing expression EXP.
5225   An rtx for the computed value is returned.  The value is never null.
5226   In the case of a void EXP, const0_rtx is returned.
5227
5228   The value may be stored in TARGET if TARGET is nonzero.
5229   TARGET is just a suggestion; callers must assume that
5230   the rtx returned may not be the same as TARGET.
5231
5232   If TARGET is CONST0_RTX, it means that the value will be ignored.
5233
5234   If TMODE is not VOIDmode, it suggests generating the
5235   result in mode TMODE.  But this is done only when convenient.
5236   Otherwise, TMODE is ignored and the value generated in its natural mode.
5237   TMODE is just a suggestion; callers must assume that
5238   the rtx returned may not have mode TMODE.
5239
5240   Note that TARGET may have neither TMODE nor MODE.  In that case, it
5241   probably will not be used.
5242
5243   If MODIFIER is EXPAND_SUM then when EXP is an addition
5244   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5245   or a nest of (PLUS ...) and (MINUS ...) where the terms are
5246   products as above, or REG or MEM, or constant.
5247   Ordinarily in such cases we would output mul or add instructions
5248   and then return a pseudo reg containing the sum.
5249
5250   EXPAND_INITIALIZER is much like EXPAND_SUM except that
5251   it also marks a label as absolutely required (it can't be dead).
5252   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5253   This is used for outputting expressions used in initializers.
5254
5255   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5256   with a constant address even if that address is not normally legitimate.
5257   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
5258
5259rtx
5260expand_expr (exp, target, tmode, modifier)
5261     register tree exp;
5262     rtx target;
5263     enum machine_mode tmode;
5264     enum expand_modifier modifier;
5265{
5266  /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5267     This is static so it will be accessible to our recursive callees.  */
5268  static tree placeholder_list = 0;
5269  register rtx op0, op1, temp;
5270  tree type = TREE_TYPE (exp);
5271  int unsignedp = TREE_UNSIGNED (type);
5272  register enum machine_mode mode = TYPE_MODE (type);
5273  register enum tree_code code = TREE_CODE (exp);
5274  optab this_optab;
5275  /* Use subtarget as the target for operand 0 of a binary operation.  */
5276  rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5277  rtx original_target = target;
5278  int ignore = (target == const0_rtx
5279		|| ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5280		     || code == CONVERT_EXPR || code == REFERENCE_EXPR
5281		     || code == COND_EXPR)
5282		    && TREE_CODE (type) == VOID_TYPE));
5283  tree context;
5284  /* Used by check-memory-usage to make modifier read only.  */
5285  enum expand_modifier ro_modifier;
5286
5287  /* Make a read-only version of the modifier.  */
5288  if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5289      || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5290    ro_modifier = modifier;
5291  else
5292    ro_modifier = EXPAND_NORMAL;
5293
5294  /* Don't use hard regs as subtargets, because the combiner
5295     can only handle pseudo regs.  */
5296  if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5297    subtarget = 0;
5298  /* Avoid subtargets inside loops,
5299     since they hide some invariant expressions.  */
5300  if (preserve_subexpressions_p ())
5301    subtarget = 0;
5302
5303  /* If we are going to ignore this result, we need only do something
5304     if there is a side-effect somewhere in the expression.  If there
5305     is, short-circuit the most common cases here.  Note that we must
5306     not call expand_expr with anything but const0_rtx in case this
5307     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
5308
5309  if (ignore)
5310    {
5311      if (! TREE_SIDE_EFFECTS (exp))
5312	return const0_rtx;
5313
5314      /* Ensure we reference a volatile object even if value is ignored.  */
5315      if (TREE_THIS_VOLATILE (exp)
5316	  && TREE_CODE (exp) != FUNCTION_DECL
5317	  && mode != VOIDmode && mode != BLKmode)
5318	{
5319	  temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5320	  if (GET_CODE (temp) == MEM)
5321	    temp = copy_to_reg (temp);
5322	  return const0_rtx;
5323	}
5324
5325      if (TREE_CODE_CLASS (code) == '1')
5326	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5327			    VOIDmode, ro_modifier);
5328      else if (TREE_CODE_CLASS (code) == '2'
5329	       || TREE_CODE_CLASS (code) == '<')
5330	{
5331	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5332	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5333	  return const0_rtx;
5334	}
5335      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5336	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5337	/* If the second operand has no side effects, just evaluate
5338	   the first.  */
5339	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5340			    VOIDmode, ro_modifier);
5341
5342      target = 0;
5343    }
5344
5345#ifdef MAX_INTEGER_COMPUTATION_MODE
5346  if (target)
5347    {
5348      enum machine_mode mode = GET_MODE (target);
5349
5350      if (GET_MODE_CLASS (mode) == MODE_INT
5351	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5352	fatal ("unsupported wide integer operation");
5353    }
5354
5355  if (GET_MODE_CLASS (tmode) == MODE_INT
5356      && tmode > MAX_INTEGER_COMPUTATION_MODE)
5357    fatal ("unsupported wide integer operation");
5358
5359  check_max_integer_computation_mode (exp);
5360#endif
5361
5362  /* If will do cse, generate all results into pseudo registers
5363     since 1) that allows cse to find more things
5364     and 2) otherwise cse could produce an insn the machine
5365     cannot support.  */
5366
5367  if (! cse_not_expected && mode != BLKmode && target
5368      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5369    target = subtarget;
5370
5371  switch (code)
5372    {
5373    case LABEL_DECL:
5374      {
5375	tree function = decl_function_context (exp);
5376	/* Handle using a label in a containing function.  */
5377	if (function != current_function_decl
5378	    && function != inline_function_decl && function != 0)
5379	  {
5380	    struct function *p = find_function_data (function);
5381	    /* Allocate in the memory associated with the function
5382	       that the label is in.  */
5383	    push_obstacks (p->function_obstack,
5384			   p->function_maybepermanent_obstack);
5385
5386	    p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5387						  label_rtx (exp),
5388						  p->forced_labels);
5389	    pop_obstacks ();
5390	  }
5391	else if (modifier == EXPAND_INITIALIZER)
5392	  forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5393					     label_rtx (exp), forced_labels);
5394	temp = gen_rtx_MEM (FUNCTION_MODE,
5395			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5396	if (function != current_function_decl
5397	    && function != inline_function_decl && function != 0)
5398	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5399	return temp;
5400      }
5401
5402    case PARM_DECL:
5403      if (DECL_RTL (exp) == 0)
5404	{
5405	  error_with_decl (exp, "prior parameter's size depends on `%s'");
5406	  return CONST0_RTX (mode);
5407	}
5408
5409      /* ... fall through ...  */
5410
5411    case VAR_DECL:
5412      /* If a static var's type was incomplete when the decl was written,
5413	 but the type is complete now, lay out the decl now.  */
5414      if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5415	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5416	{
5417	  push_obstacks_nochange ();
5418	  end_temporary_allocation ();
5419	  layout_decl (exp, 0);
5420	  PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5421	  pop_obstacks ();
5422	}
5423
5424      /* Only check automatic variables.  Currently, function arguments are
5425         not checked (this can be done at compile-time with prototypes).
5426         Aggregates are not checked.  */
5427      if (flag_check_memory_usage && code == VAR_DECL
5428	  && GET_CODE (DECL_RTL (exp)) == MEM
5429	  && DECL_CONTEXT (exp) != NULL_TREE
5430	  && ! TREE_STATIC (exp)
5431	  && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5432	{
5433	  enum memory_use_mode memory_usage;
5434	  memory_usage = get_memory_usage_from_modifier (modifier);
5435
5436	  if (memory_usage != MEMORY_USE_DONT)
5437	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5438			       XEXP (DECL_RTL (exp), 0), ptr_mode,
5439			       GEN_INT (int_size_in_bytes (type)),
5440			       TYPE_MODE (sizetype),
5441			       GEN_INT (memory_usage),
5442			       TYPE_MODE (integer_type_node));
5443	}
5444
5445      /* ... fall through ...  */
5446
5447    case FUNCTION_DECL:
5448    case RESULT_DECL:
5449      if (DECL_RTL (exp) == 0)
5450	abort ();
5451
5452      /* Ensure variable marked as used even if it doesn't go through
5453	 a parser.  If it hasn't be used yet, write out an external
5454	 definition.  */
5455      if (! TREE_USED (exp))
5456	{
5457	  assemble_external (exp);
5458	  TREE_USED (exp) = 1;
5459	}
5460
5461      /* Show we haven't gotten RTL for this yet.  */
5462      temp = 0;
5463
5464      /* Handle variables inherited from containing functions.  */
5465      context = decl_function_context (exp);
5466
5467      /* We treat inline_function_decl as an alias for the current function
5468	 because that is the inline function whose vars, types, etc.
5469	 are being merged into the current function.
5470	 See expand_inline_function.  */
5471
5472      if (context != 0 && context != current_function_decl
5473	  && context != inline_function_decl
5474	  /* If var is static, we don't need a static chain to access it.  */
5475	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
5476		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5477	{
5478	  rtx addr;
5479
5480	  /* Mark as non-local and addressable.  */
5481	  DECL_NONLOCAL (exp) = 1;
5482	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
5483	    abort ();
5484	  mark_addressable (exp);
5485	  if (GET_CODE (DECL_RTL (exp)) != MEM)
5486	    abort ();
5487	  addr = XEXP (DECL_RTL (exp), 0);
5488	  if (GET_CODE (addr) == MEM)
5489	    addr = gen_rtx_MEM (Pmode,
5490				fix_lexical_addr (XEXP (addr, 0), exp));
5491	  else
5492	    addr = fix_lexical_addr (addr, exp);
5493	  temp = change_address (DECL_RTL (exp), mode, addr);
5494	}
5495
5496      /* This is the case of an array whose size is to be determined
5497	 from its initializer, while the initializer is still being parsed.
5498	 See expand_decl.  */
5499
5500      else if (GET_CODE (DECL_RTL (exp)) == MEM
5501	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5502	temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5503			       XEXP (DECL_RTL (exp), 0));
5504
5505      /* If DECL_RTL is memory, we are in the normal case and either
5506	 the address is not valid or it is not a register and -fforce-addr
5507	 is specified, get the address into a register.  */
5508
5509      else if (GET_CODE (DECL_RTL (exp)) == MEM
5510	       && modifier != EXPAND_CONST_ADDRESS
5511	       && modifier != EXPAND_SUM
5512	       && modifier != EXPAND_INITIALIZER
5513	       && (! memory_address_p (DECL_MODE (exp),
5514				       XEXP (DECL_RTL (exp), 0))
5515		   || (flag_force_addr
5516		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5517	temp = change_address (DECL_RTL (exp), VOIDmode,
5518			       copy_rtx (XEXP (DECL_RTL (exp), 0)));
5519
5520      /* If we got something, return it.  But first, set the alignment
5521	 the address is a register.  */
5522      if (temp != 0)
5523	{
5524	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5525	    mark_reg_pointer (XEXP (temp, 0),
5526			      DECL_ALIGN (exp) / BITS_PER_UNIT);
5527
5528	  return temp;
5529	}
5530
5531      /* If the mode of DECL_RTL does not match that of the decl, it
5532	 must be a promoted value.  We return a SUBREG of the wanted mode,
5533	 but mark it so that we know that it was already extended.  */
5534
5535      if (GET_CODE (DECL_RTL (exp)) == REG
5536	  && GET_MODE (DECL_RTL (exp)) != mode)
5537	{
5538	  /* Get the signedness used for this variable.  Ensure we get the
5539	     same mode we got when the variable was declared.  */
5540	  if (GET_MODE (DECL_RTL (exp))
5541	      != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5542	    abort ();
5543
5544	  temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5545	  SUBREG_PROMOTED_VAR_P (temp) = 1;
5546	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5547	  return temp;
5548	}
5549
5550      return DECL_RTL (exp);
5551
5552    case INTEGER_CST:
5553      return immed_double_const (TREE_INT_CST_LOW (exp),
5554				 TREE_INT_CST_HIGH (exp),
5555				 mode);
5556
5557    case CONST_DECL:
5558      return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5559      			  EXPAND_MEMORY_USE_BAD);
5560
5561    case REAL_CST:
5562      /* If optimized, generate immediate CONST_DOUBLE
5563	 which will be turned into memory by reload if necessary.
5564
5565	 We used to force a register so that loop.c could see it.  But
5566	 this does not allow gen_* patterns to perform optimizations with
5567	 the constants.  It also produces two insns in cases like "x = 1.0;".
5568	 On most machines, floating-point constants are not permitted in
5569	 many insns, so we'd end up copying it to a register in any case.
5570
5571	 Now, we do the copying in expand_binop, if appropriate.  */
5572      return immed_real_const (exp);
5573
5574    case COMPLEX_CST:
5575    case STRING_CST:
5576      if (! TREE_CST_RTL (exp))
5577	output_constant_def (exp);
5578
5579      /* TREE_CST_RTL probably contains a constant address.
5580	 On RISC machines where a constant address isn't valid,
5581	 make some insns to get that address into a register.  */
5582      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5583	  && modifier != EXPAND_CONST_ADDRESS
5584	  && modifier != EXPAND_INITIALIZER
5585	  && modifier != EXPAND_SUM
5586	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5587	      || (flag_force_addr
5588		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5589	return change_address (TREE_CST_RTL (exp), VOIDmode,
5590			       copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5591      return TREE_CST_RTL (exp);
5592
5593    case EXPR_WITH_FILE_LOCATION:
5594      {
5595	rtx to_return;
5596	char *saved_input_filename = input_filename;
5597	int saved_lineno = lineno;
5598	input_filename = EXPR_WFL_FILENAME (exp);
5599	lineno = EXPR_WFL_LINENO (exp);
5600	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5601	  emit_line_note (input_filename, lineno);
5602	/* Possibly avoid switching back and force here */
5603	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5604	input_filename = saved_input_filename;
5605	lineno = saved_lineno;
5606	return to_return;
5607      }
5608
5609    case SAVE_EXPR:
5610      context = decl_function_context (exp);
5611
5612      /* If this SAVE_EXPR was at global context, assume we are an
5613	 initialization function and move it into our context.  */
5614      if (context == 0)
5615	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5616
5617      /* We treat inline_function_decl as an alias for the current function
5618	 because that is the inline function whose vars, types, etc.
5619	 are being merged into the current function.
5620	 See expand_inline_function.  */
5621      if (context == current_function_decl || context == inline_function_decl)
5622	context = 0;
5623
5624      /* If this is non-local, handle it.  */
5625      if (context)
5626	{
5627	  /* The following call just exists to abort if the context is
5628	     not of a containing function.  */
5629	  find_function_data (context);
5630
5631	  temp = SAVE_EXPR_RTL (exp);
5632	  if (temp && GET_CODE (temp) == REG)
5633	    {
5634	      put_var_into_stack (exp);
5635	      temp = SAVE_EXPR_RTL (exp);
5636	    }
5637	  if (temp == 0 || GET_CODE (temp) != MEM)
5638	    abort ();
5639	  return change_address (temp, mode,
5640				 fix_lexical_addr (XEXP (temp, 0), exp));
5641	}
5642      if (SAVE_EXPR_RTL (exp) == 0)
5643	{
5644	  if (mode == VOIDmode)
5645	    temp = const0_rtx;
5646	  else
5647	    temp = assign_temp (type, 3, 0, 0);
5648
5649	  SAVE_EXPR_RTL (exp) = temp;
5650	  if (!optimize && GET_CODE (temp) == REG)
5651	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5652						save_expr_regs);
5653
5654	  /* If the mode of TEMP does not match that of the expression, it
5655	     must be a promoted value.  We pass store_expr a SUBREG of the
5656	     wanted mode but mark it so that we know that it was already
5657	     extended.  Note that `unsignedp' was modified above in
5658	     this case.  */
5659
5660	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5661	    {
5662	      temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5663	      SUBREG_PROMOTED_VAR_P (temp) = 1;
5664	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5665	    }
5666
5667	  if (temp == const0_rtx)
5668	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5669			 EXPAND_MEMORY_USE_BAD);
5670	  else
5671	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
5672
5673	  TREE_USED (exp) = 1;
5674	}
5675
5676      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5677	 must be a promoted value.  We return a SUBREG of the wanted mode,
5678	 but mark it so that we know that it was already extended.  */
5679
5680      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5681	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5682	{
5683	  /* Compute the signedness and make the proper SUBREG.  */
5684	  promote_mode (type, mode, &unsignedp, 0);
5685	  temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5686	  SUBREG_PROMOTED_VAR_P (temp) = 1;
5687	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5688	  return temp;
5689	}
5690
5691      return SAVE_EXPR_RTL (exp);
5692
5693    case UNSAVE_EXPR:
5694      {
5695	rtx temp;
5696	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5697	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5698	return temp;
5699      }
5700
5701    case PLACEHOLDER_EXPR:
5702      {
5703	tree placeholder_expr;
5704
5705	/* If there is an object on the head of the placeholder list,
5706	   see if some object in it of type TYPE or a pointer to it.  For
5707	   further information, see tree.def.  */
5708	for (placeholder_expr = placeholder_list;
5709	     placeholder_expr != 0;
5710	     placeholder_expr = TREE_CHAIN (placeholder_expr))
5711	  {
5712	    tree need_type = TYPE_MAIN_VARIANT (type);
5713	    tree object = 0;
5714	    tree old_list = placeholder_list;
5715	    tree elt;
5716
5717	    /* Find the outermost reference that is of the type we want.
5718	       If none, see if any object has a type that is a pointer to
5719	       the type we want.  */
5720	    for (elt = TREE_PURPOSE (placeholder_expr);
5721		 elt != 0 && object == 0;
5722		 elt
5723		 = ((TREE_CODE (elt) == COMPOUND_EXPR
5724		     || TREE_CODE (elt) == COND_EXPR)
5725		    ? TREE_OPERAND (elt, 1)
5726		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5727		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5728		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5729		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5730		    ? TREE_OPERAND (elt, 0) : 0))
5731	      if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5732		object = elt;
5733
5734	    for (elt = TREE_PURPOSE (placeholder_expr);
5735		 elt != 0 && object == 0;
5736		 elt
5737		 = ((TREE_CODE (elt) == COMPOUND_EXPR
5738		     || TREE_CODE (elt) == COND_EXPR)
5739		    ? TREE_OPERAND (elt, 1)
5740		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5741		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5742		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5743		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5744		    ? TREE_OPERAND (elt, 0) : 0))
5745	      if (POINTER_TYPE_P (TREE_TYPE (elt))
5746		  && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5747		      == need_type))
5748		object = build1 (INDIRECT_REF, need_type, elt);
5749
5750	    if (object != 0)
5751	      {
5752		/* Expand this object skipping the list entries before
5753		   it was found in case it is also a PLACEHOLDER_EXPR.
5754		   In that case, we want to translate it using subsequent
5755		   entries.  */
5756		placeholder_list = TREE_CHAIN (placeholder_expr);
5757		temp = expand_expr (object, original_target, tmode,
5758				    ro_modifier);
5759		placeholder_list = old_list;
5760		return temp;
5761	      }
5762	  }
5763      }
5764
5765      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
5766      abort ();
5767
5768    case WITH_RECORD_EXPR:
5769      /* Put the object on the placeholder list, expand our first operand,
5770	 and pop the list.  */
5771      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5772				    placeholder_list);
5773      target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5774			    tmode, ro_modifier);
5775      placeholder_list = TREE_CHAIN (placeholder_list);
5776      return target;
5777
5778    case EXIT_EXPR:
5779      expand_exit_loop_if_false (NULL_PTR,
5780				 invert_truthvalue (TREE_OPERAND (exp, 0)));
5781      return const0_rtx;
5782
5783    case LOOP_EXPR:
5784      push_temp_slots ();
5785      expand_start_loop (1);
5786      expand_expr_stmt (TREE_OPERAND (exp, 0));
5787      expand_end_loop ();
5788      pop_temp_slots ();
5789
5790      return const0_rtx;
5791
5792    case BIND_EXPR:
5793      {
5794	tree vars = TREE_OPERAND (exp, 0);
5795	int vars_need_expansion = 0;
5796
5797	/* Need to open a binding contour here because
5798	   if there are any cleanups they must be contained here.  */
5799	expand_start_bindings (0);
5800
5801	/* Mark the corresponding BLOCK for output in its proper place.  */
5802	if (TREE_OPERAND (exp, 2) != 0
5803	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
5804	  insert_block (TREE_OPERAND (exp, 2));
5805
5806	/* If VARS have not yet been expanded, expand them now.  */
5807	while (vars)
5808	  {
5809	    if (DECL_RTL (vars) == 0)
5810	      {
5811		vars_need_expansion = 1;
5812		expand_decl (vars);
5813	      }
5814	    expand_decl_init (vars);
5815	    vars = TREE_CHAIN (vars);
5816	  }
5817
5818	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5819
5820	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5821
5822	return temp;
5823      }
5824
5825    case RTL_EXPR:
5826      if (RTL_EXPR_SEQUENCE (exp))
5827	{
5828	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5829	    abort ();
5830	  emit_insns (RTL_EXPR_SEQUENCE (exp));
5831	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5832	}
5833      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5834      free_temps_for_rtl_expr (exp);
5835      return RTL_EXPR_RTL (exp);
5836
5837    case CONSTRUCTOR:
5838      /* If we don't need the result, just ensure we evaluate any
5839	 subexpressions.  */
5840      if (ignore)
5841	{
5842	  tree elt;
5843	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5844	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5845	    		 EXPAND_MEMORY_USE_BAD);
5846	  return const0_rtx;
5847	}
5848
5849      /* All elts simple constants => refer to a constant in memory.  But
5850	 if this is a non-BLKmode mode, let it store a field at a time
5851	 since that should make a CONST_INT or CONST_DOUBLE when we
5852	 fold.  Likewise, if we have a target we can use, it is best to
5853	 store directly into the target unless the type is large enough
5854	 that memcpy will be used.  If we are making an initializer and
5855	 all operands are constant, put it in memory as well.  */
5856      else if ((TREE_STATIC (exp)
5857		&& ((mode == BLKmode
5858		     && ! (target != 0 && safe_from_p (target, exp, 1)))
5859		    || TREE_ADDRESSABLE (exp)
5860		    || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5861			&& (move_by_pieces_ninsns
5862			    (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5863			     TYPE_ALIGN (type) / BITS_PER_UNIT)
5864			    > MOVE_RATIO)
5865			&& ! mostly_zeros_p (exp))))
5866	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5867	{
5868	  rtx constructor = output_constant_def (exp);
5869	  if (modifier != EXPAND_CONST_ADDRESS
5870	      && modifier != EXPAND_INITIALIZER
5871	      && modifier != EXPAND_SUM
5872	      && (! memory_address_p (GET_MODE (constructor),
5873				      XEXP (constructor, 0))
5874		  || (flag_force_addr
5875		      && GET_CODE (XEXP (constructor, 0)) != REG)))
5876	    constructor = change_address (constructor, VOIDmode,
5877					  XEXP (constructor, 0));
5878	  return constructor;
5879	}
5880
5881      else
5882	{
5883	  /* Handle calls that pass values in multiple non-contiguous
5884	     locations.  The Irix 6 ABI has examples of this.  */
5885	  if (target == 0 || ! safe_from_p (target, exp, 1)
5886	      || GET_CODE (target) == PARALLEL)
5887	    {
5888	      if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5889		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5890	      else
5891		target = assign_temp (type, 0, 1, 1);
5892	    }
5893
5894	  if (TREE_READONLY (exp))
5895	    {
5896	      if (GET_CODE (target) == MEM)
5897		target = copy_rtx (target);
5898
5899	      RTX_UNCHANGING_P (target) = 1;
5900	    }
5901
5902	  store_constructor (exp, target, 0);
5903	  return target;
5904	}
5905
5906    case INDIRECT_REF:
5907      {
5908	tree exp1 = TREE_OPERAND (exp, 0);
5909	tree exp2;
5910	tree index;
5911 	tree string = string_constant (exp1, &index);
5912 	int i;
5913
5914	/* Try to optimize reads from const strings.  */
5915 	if (string
5916 	    && TREE_CODE (string) == STRING_CST
5917 	    && TREE_CODE (index) == INTEGER_CST
5918 	    && !TREE_INT_CST_HIGH (index)
5919 	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5920 	    && GET_MODE_CLASS (mode) == MODE_INT
5921 	    && GET_MODE_SIZE (mode) == 1
5922	    && modifier != EXPAND_MEMORY_USE_WO)
5923 	  return GEN_INT (TREE_STRING_POINTER (string)[i]);
5924
5925	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5926	op0 = memory_address (mode, op0);
5927
5928	if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5929	  {
5930	    enum memory_use_mode memory_usage;
5931	    memory_usage = get_memory_usage_from_modifier (modifier);
5932
5933            if (memory_usage != MEMORY_USE_DONT)
5934	      {
5935		in_check_memory_usage = 1;
5936		emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5937				   op0, ptr_mode,
5938				   GEN_INT (int_size_in_bytes (type)),
5939				   TYPE_MODE (sizetype),
5940				   GEN_INT (memory_usage),
5941				   TYPE_MODE (integer_type_node));
5942		in_check_memory_usage = 0;
5943	      }
5944	  }
5945
5946	temp = gen_rtx_MEM (mode, op0);
5947	/* If address was computed by addition,
5948	   mark this as an element of an aggregate.  */
5949	if (TREE_CODE (exp1) == PLUS_EXPR
5950	    || (TREE_CODE (exp1) == SAVE_EXPR
5951		&& TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
5952	    || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5953	    || (TREE_CODE (exp1) == ADDR_EXPR
5954		&& (exp2 = TREE_OPERAND (exp1, 0))
5955		&& AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5956	  MEM_IN_STRUCT_P (temp) = 1;
5957
5958	/* If the pointer is actually a REFERENCE_TYPE, this could be pointing
5959	   into some aggregate too.  In theory we could fold this into the
5960	   previous check and use rtx_addr_varies_p there too.
5961
5962	   However, this seems safer.  */
5963	if (!MEM_IN_STRUCT_P (temp)
5964	    && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
5965	        /* This may have been an array reference to the first element
5966		   that was optimized away from being an addition.  */
5967	        || (TREE_CODE (exp1) == NOP_EXPR
5968		    && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5969			 == REFERENCE_TYPE)
5970		        || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5971			     == POINTER_TYPE)
5972			    && (AGGREGATE_TYPE_P
5973			        (TREE_TYPE (TREE_TYPE
5974					    (TREE_OPERAND (exp1, 0))))))))))
5975	  MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
5976
5977	MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5978	MEM_ALIAS_SET (temp) = get_alias_set (exp);
5979
5980	/* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5981	   here, because, in C and C++, the fact that a location is accessed
5982	   through a pointer to const does not mean that the value there can
5983	   never change.  Languages where it can never change should
5984	   also set TREE_STATIC.  */
5985	RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5986	return temp;
5987      }
5988
5989    case ARRAY_REF:
5990      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5991	abort ();
5992
5993      {
5994	tree array = TREE_OPERAND (exp, 0);
5995	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5996	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5997	tree index = TREE_OPERAND (exp, 1);
5998	tree index_type = TREE_TYPE (index);
5999	HOST_WIDE_INT i;
6000
6001	/* Optimize the special-case of a zero lower bound.
6002
6003	   We convert the low_bound to sizetype to avoid some problems
6004	   with constant folding.  (E.g. suppose the lower bound is 1,
6005	   and its mode is QI.  Without the conversion,  (ARRAY
6006	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6007	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
6008
6009	   But sizetype isn't quite right either (especially if
6010	   the lowbound is negative).  FIXME */
6011
6012	if (! integer_zerop (low_bound))
6013	  index = fold (build (MINUS_EXPR, index_type, index,
6014			       convert (sizetype, low_bound)));
6015
6016	/* Fold an expression like: "foo"[2].
6017	   This is not done in fold so it won't happen inside &.
6018	   Don't fold if this is for wide characters since it's too
6019	   difficult to do correctly and this is a very rare case.  */
6020
6021	if (TREE_CODE (array) == STRING_CST
6022	    && TREE_CODE (index) == INTEGER_CST
6023	    && !TREE_INT_CST_HIGH (index)
6024	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6025	    && GET_MODE_CLASS (mode) == MODE_INT
6026	    && GET_MODE_SIZE (mode) == 1)
6027	  return GEN_INT (TREE_STRING_POINTER (array)[i]);
6028
6029	/* If this is a constant index into a constant array,
6030	   just get the value from the array.  Handle both the cases when
6031	   we have an explicit constructor and when our operand is a variable
6032	   that was declared const.  */
6033
6034	if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6035	  {
6036	    if (TREE_CODE (index) == INTEGER_CST
6037		&& TREE_INT_CST_HIGH (index) == 0)
6038	      {
6039		tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6040
6041		i = TREE_INT_CST_LOW (index);
6042		while (elem && i--)
6043		  elem = TREE_CHAIN (elem);
6044		if (elem)
6045		  return expand_expr (fold (TREE_VALUE (elem)), target,
6046				      tmode, ro_modifier);
6047	      }
6048	  }
6049
6050	else if (optimize >= 1
6051		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6052		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6053		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6054	  {
6055	    if (TREE_CODE (index) == INTEGER_CST)
6056	      {
6057		tree init = DECL_INITIAL (array);
6058
6059		i = TREE_INT_CST_LOW (index);
6060		if (TREE_CODE (init) == CONSTRUCTOR)
6061		  {
6062		    tree elem = CONSTRUCTOR_ELTS (init);
6063
6064		    while (elem
6065			   && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6066		      elem = TREE_CHAIN (elem);
6067		    if (elem)
6068		      return expand_expr (fold (TREE_VALUE (elem)), target,
6069					  tmode, ro_modifier);
6070		  }
6071		else if (TREE_CODE (init) == STRING_CST
6072			 && TREE_INT_CST_HIGH (index) == 0
6073			 && (TREE_INT_CST_LOW (index)
6074			     < TREE_STRING_LENGTH (init)))
6075		  return (GEN_INT
6076			  (TREE_STRING_POINTER
6077			   (init)[TREE_INT_CST_LOW (index)]));
6078	      }
6079	  }
6080      }
6081
6082      /* ... fall through ... */
6083
6084    case COMPONENT_REF:
6085    case BIT_FIELD_REF:
6086      /* If the operand is a CONSTRUCTOR, we can just extract the
6087	 appropriate field if it is present.  Don't do this if we have
6088	 already written the data since we want to refer to that copy
6089	 and varasm.c assumes that's what we'll do.  */
6090      if (code != ARRAY_REF
6091	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6092	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6093	{
6094	  tree elt;
6095
6096	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6097	       elt = TREE_CHAIN (elt))
6098	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6099		/* We can normally use the value of the field in the
6100		   CONSTRUCTOR.  However, if this is a bitfield in
6101		   an integral mode that we can fit in a HOST_WIDE_INT,
6102		   we must mask only the number of bits in the bitfield,
6103		   since this is done implicitly by the constructor.  If
6104		   the bitfield does not meet either of those conditions,
6105		   we can't do this optimization.  */
6106		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6107		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6108			 == MODE_INT)
6109			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6110			    <= HOST_BITS_PER_WIDE_INT))))
6111	      {
6112		op0 =  expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6113		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6114		  {
6115		    int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6116
6117		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6118		      {
6119			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6120			op0 = expand_and (op0, op1, target);
6121		      }
6122		    else
6123		      {
6124			enum machine_mode imode
6125			  = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6126			tree count
6127			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6128					 0);
6129
6130			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6131					    target, 0);
6132			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6133					    target, 0);
6134		      }
6135		  }
6136
6137		return op0;
6138	      }
6139	}
6140
6141      {
6142	enum machine_mode mode1;
6143	int bitsize;
6144	int bitpos;
6145	tree offset;
6146	int volatilep = 0;
6147	int alignment;
6148	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6149					&mode1, &unsignedp, &volatilep,
6150					&alignment);
6151
6152	/* If we got back the original object, something is wrong.  Perhaps
6153	   we are evaluating an expression too early.  In any event, don't
6154	   infinitely recurse.  */
6155	if (tem == exp)
6156	  abort ();
6157
6158	/* If TEM's type is a union of variable size, pass TARGET to the inner
6159	   computation, since it will need a temporary and TARGET is known
6160	   to have to do.  This occurs in unchecked conversion in Ada.  */
6161
6162	op0 = expand_expr (tem,
6163			   (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6164			    && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6165				!= INTEGER_CST)
6166			    ? target : NULL_RTX),
6167			   VOIDmode,
6168			   modifier == EXPAND_INITIALIZER
6169			   ? modifier : EXPAND_NORMAL);
6170
6171	/* If this is a constant, put it into a register if it is a
6172	   legitimate constant and memory if it isn't.  */
6173	if (CONSTANT_P (op0))
6174	  {
6175	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6176	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6177	      op0 = force_reg (mode, op0);
6178	    else
6179	      op0 = validize_mem (force_const_mem (mode, op0));
6180	  }
6181
6182	if (offset != 0)
6183	  {
6184	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6185
6186	    if (GET_CODE (op0) != MEM)
6187	      abort ();
6188
6189	    if (GET_MODE (offset_rtx) != ptr_mode)
6190	      {
6191#ifdef POINTERS_EXTEND_UNSIGNED
6192		offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6193#else
6194		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6195#endif
6196	      }
6197
6198	    if (GET_CODE (op0) == MEM
6199		&& GET_MODE (op0) == BLKmode
6200		&& bitsize
6201		&& (bitpos % bitsize) == 0
6202		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6203		&& (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6204	      {
6205		rtx temp = change_address (op0, mode1,
6206					   plus_constant (XEXP (op0, 0),
6207							  (bitpos /
6208							   BITS_PER_UNIT)));
6209		if (GET_CODE (XEXP (temp, 0)) == REG)
6210		  op0 = temp;
6211		else
6212		  op0 = change_address (op0, mode1,
6213					force_reg (GET_MODE (XEXP (temp, 0)),
6214						   XEXP (temp, 0)));
6215		bitpos = 0;
6216	      }
6217
6218
6219	    op0 = change_address (op0, VOIDmode,
6220				  gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6221						force_reg (ptr_mode, offset_rtx)));
6222	  }
6223
6224	/* Don't forget about volatility even if this is a bitfield.  */
6225	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6226	  {
6227	    op0 = copy_rtx (op0);
6228	    MEM_VOLATILE_P (op0) = 1;
6229	  }
6230
6231	/* Check the access.  */
6232	if (flag_check_memory_usage && GET_CODE (op0) == MEM)
6233          {
6234	    enum memory_use_mode memory_usage;
6235	    memory_usage = get_memory_usage_from_modifier (modifier);
6236
6237	    if (memory_usage != MEMORY_USE_DONT)
6238	      {
6239		rtx to;
6240		int size;
6241
6242		to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6243		size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6244
6245        	/* Check the access right of the pointer.  */
6246		if (size > BITS_PER_UNIT)
6247		  emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6248				     to, ptr_mode,
6249				     GEN_INT (size / BITS_PER_UNIT),
6250				     TYPE_MODE (sizetype),
6251				     GEN_INT (memory_usage),
6252				     TYPE_MODE (integer_type_node));
6253	      }
6254	  }
6255
6256	/* In cases where an aligned union has an unaligned object
6257	   as a field, we might be extracting a BLKmode value from
6258	   an integer-mode (e.g., SImode) object.  Handle this case
6259	   by doing the extract into an object as wide as the field
6260	   (which we know to be the width of a basic mode), then
6261	   storing into memory, and changing the mode to BLKmode.
6262	   If we ultimately want the address (EXPAND_CONST_ADDRESS or
6263	   EXPAND_INITIALIZER), then we must not copy to a temporary.  */
6264	if (mode1 == VOIDmode
6265	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6266	    || (modifier != EXPAND_CONST_ADDRESS
6267		&& modifier != EXPAND_INITIALIZER
6268		&& ((mode1 != BLKmode && ! direct_load[(int) mode1]
6269		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6270		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6271		    /* If the field isn't aligned enough to fetch as a memref,
6272		       fetch it as a bit field.  */
6273		    || (SLOW_UNALIGNED_ACCESS
6274			&& ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
6275			    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6276	  {
6277	    enum machine_mode ext_mode = mode;
6278
6279	    if (ext_mode == BLKmode)
6280	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6281
6282	    if (ext_mode == BLKmode)
6283	      {
6284		/* In this case, BITPOS must start at a byte boundary and
6285		   TARGET, if specified, must be a MEM.  */
6286		if (GET_CODE (op0) != MEM
6287		    || (target != 0 && GET_CODE (target) != MEM)
6288		    || bitpos % BITS_PER_UNIT != 0)
6289		  abort ();
6290
6291		op0 = change_address (op0, VOIDmode,
6292				      plus_constant (XEXP (op0, 0),
6293						     bitpos / BITS_PER_UNIT));
6294		if (target == 0)
6295		  target = assign_temp (type, 0, 1, 1);
6296
6297		emit_block_move (target, op0,
6298				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6299					  / BITS_PER_UNIT),
6300				 1);
6301
6302		return target;
6303	      }
6304
6305	    op0 = validize_mem (op0);
6306
6307	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6308	      mark_reg_pointer (XEXP (op0, 0), alignment);
6309
6310	    op0 = extract_bit_field (op0, bitsize, bitpos,
6311				     unsignedp, target, ext_mode, ext_mode,
6312				     alignment,
6313				     int_size_in_bytes (TREE_TYPE (tem)));
6314
6315	    /* If the result is a record type and BITSIZE is narrower than
6316	       the mode of OP0, an integral mode, and this is a big endian
6317	       machine, we must put the field into the high-order bits.  */
6318	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6319		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6320		&& bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6321	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6322				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6323					    - bitsize),
6324				  op0, 1);
6325
6326	    if (mode == BLKmode)
6327	      {
6328		rtx new = assign_stack_temp (ext_mode,
6329					     bitsize / BITS_PER_UNIT, 0);
6330
6331		emit_move_insn (new, op0);
6332		op0 = copy_rtx (new);
6333		PUT_MODE (op0, BLKmode);
6334		MEM_IN_STRUCT_P (op0) = 1;
6335	      }
6336
6337	    return op0;
6338	  }
6339
6340	/* If the result is BLKmode, use that to access the object
6341	   now as well.  */
6342	if (mode == BLKmode)
6343	  mode1 = BLKmode;
6344
6345	/* Get a reference to just this component.  */
6346	if (modifier == EXPAND_CONST_ADDRESS
6347	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6348	  op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6349						   (bitpos / BITS_PER_UNIT)));
6350	else
6351	  op0 = change_address (op0, mode1,
6352				plus_constant (XEXP (op0, 0),
6353					       (bitpos / BITS_PER_UNIT)));
6354
6355	if (GET_CODE (op0) == MEM)
6356	  MEM_ALIAS_SET (op0) = get_alias_set (exp);
6357
6358	if (GET_CODE (XEXP (op0, 0)) == REG)
6359	  mark_reg_pointer (XEXP (op0, 0), alignment);
6360
6361	MEM_IN_STRUCT_P (op0) = 1;
6362	MEM_VOLATILE_P (op0) |= volatilep;
6363	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6364	    || modifier == EXPAND_CONST_ADDRESS
6365	    || modifier == EXPAND_INITIALIZER)
6366	  return op0;
6367	else if (target == 0)
6368	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6369
6370	convert_move (target, op0, unsignedp);
6371	return target;
6372      }
6373
6374      /* Intended for a reference to a buffer of a file-object in Pascal.
6375	 But it's not certain that a special tree code will really be
6376	 necessary for these.  INDIRECT_REF might work for them.  */
6377    case BUFFER_REF:
6378      abort ();
6379
6380    case IN_EXPR:
6381      {
6382	/* Pascal set IN expression.
6383
6384	   Algorithm:
6385	       rlo       = set_low - (set_low%bits_per_word);
6386	       the_word  = set [ (index - rlo)/bits_per_word ];
6387	       bit_index = index % bits_per_word;
6388	       bitmask   = 1 << bit_index;
6389	       return !!(the_word & bitmask);  */
6390
6391	tree set = TREE_OPERAND (exp, 0);
6392	tree index = TREE_OPERAND (exp, 1);
6393	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6394	tree set_type = TREE_TYPE (set);
6395	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6396	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6397	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6398	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6399	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6400	rtx setval = expand_expr (set, 0, VOIDmode, 0);
6401	rtx setaddr = XEXP (setval, 0);
6402	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6403	rtx rlow;
6404	rtx diff, quo, rem, addr, bit, result;
6405
6406	preexpand_calls (exp);
6407
6408	/* If domain is empty, answer is no.  Likewise if index is constant
6409	   and out of bounds.  */
6410	if (((TREE_CODE (set_high_bound) == INTEGER_CST
6411	     && TREE_CODE (set_low_bound) == INTEGER_CST
6412	     && tree_int_cst_lt (set_high_bound, set_low_bound))
6413	     || (TREE_CODE (index) == INTEGER_CST
6414		 && TREE_CODE (set_low_bound) == INTEGER_CST
6415		 && tree_int_cst_lt (index, set_low_bound))
6416	     || (TREE_CODE (set_high_bound) == INTEGER_CST
6417		 && TREE_CODE (index) == INTEGER_CST
6418		 && tree_int_cst_lt (set_high_bound, index))))
6419	  return const0_rtx;
6420
6421	if (target == 0)
6422	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6423
6424	/* If we get here, we have to generate the code for both cases
6425	   (in range and out of range).  */
6426
6427	op0 = gen_label_rtx ();
6428	op1 = gen_label_rtx ();
6429
6430	if (! (GET_CODE (index_val) == CONST_INT
6431	       && GET_CODE (lo_r) == CONST_INT))
6432	  {
6433	    emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6434			   GET_MODE (index_val), iunsignedp, 0);
6435	    emit_jump_insn (gen_blt (op1));
6436	  }
6437
6438	if (! (GET_CODE (index_val) == CONST_INT
6439	       && GET_CODE (hi_r) == CONST_INT))
6440	  {
6441	    emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6442			   GET_MODE (index_val), iunsignedp, 0);
6443	    emit_jump_insn (gen_bgt (op1));
6444	  }
6445
6446	/* Calculate the element number of bit zero in the first word
6447	   of the set.  */
6448	if (GET_CODE (lo_r) == CONST_INT)
6449	  rlow = GEN_INT (INTVAL (lo_r)
6450			  & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6451	else
6452	  rlow = expand_binop (index_mode, and_optab, lo_r,
6453			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6454			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6455
6456	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6457			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6458
6459	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6460			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6461	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6462			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6463
6464	addr = memory_address (byte_mode,
6465			       expand_binop (index_mode, add_optab, diff,
6466					     setaddr, NULL_RTX, iunsignedp,
6467					     OPTAB_LIB_WIDEN));
6468
6469	/* Extract the bit we want to examine */
6470	bit = expand_shift (RSHIFT_EXPR, byte_mode,
6471			    gen_rtx_MEM (byte_mode, addr),
6472			    make_tree (TREE_TYPE (index), rem),
6473			    NULL_RTX, 1);
6474	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6475			       GET_MODE (target) == byte_mode ? target : 0,
6476			       1, OPTAB_LIB_WIDEN);
6477
6478	if (result != target)
6479	  convert_move (target, result, 1);
6480
6481	/* Output the code to handle the out-of-range case.  */
6482	emit_jump (op0);
6483	emit_label (op1);
6484	emit_move_insn (target, const0_rtx);
6485	emit_label (op0);
6486	return target;
6487      }
6488
6489    case WITH_CLEANUP_EXPR:
6490      if (RTL_EXPR_RTL (exp) == 0)
6491	{
6492	  RTL_EXPR_RTL (exp)
6493	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6494	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6495
6496	  /* That's it for this cleanup.  */
6497	  TREE_OPERAND (exp, 2) = 0;
6498	}
6499      return RTL_EXPR_RTL (exp);
6500
6501    case CLEANUP_POINT_EXPR:
6502      {
6503	extern int temp_slot_level;
6504	/* Start a new binding layer that will keep track of all cleanup
6505	   actions to be performed.  */
6506	expand_start_bindings (0);
6507
6508	target_temp_slot_level = temp_slot_level;
6509
6510	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6511	/* If we're going to use this value, load it up now.  */
6512	if (! ignore)
6513	  op0 = force_not_mem (op0);
6514	preserve_temp_slots (op0);
6515	expand_end_bindings (NULL_TREE, 0, 0);
6516      }
6517      return op0;
6518
6519    case CALL_EXPR:
6520      /* Check for a built-in function.  */
6521      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6522	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6523	      == FUNCTION_DECL)
6524	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6525	return expand_builtin (exp, target, subtarget, tmode, ignore);
6526
6527      /* If this call was expanded already by preexpand_calls,
6528	 just return the result we got.  */
6529      if (CALL_EXPR_RTL (exp) != 0)
6530	return CALL_EXPR_RTL (exp);
6531
6532      return expand_call (exp, target, ignore);
6533
6534    case NON_LVALUE_EXPR:
6535    case NOP_EXPR:
6536    case CONVERT_EXPR:
6537    case REFERENCE_EXPR:
6538      if (TREE_CODE (type) == UNION_TYPE)
6539	{
6540	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6541	  if (target == 0)
6542	    {
6543	      if (mode != BLKmode)
6544		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6545	      else
6546		target = assign_temp (type, 0, 1, 1);
6547	    }
6548
6549	  if (GET_CODE (target) == MEM)
6550	    /* Store data into beginning of memory target.  */
6551	    store_expr (TREE_OPERAND (exp, 0),
6552			change_address (target, TYPE_MODE (valtype), 0), 0);
6553
6554	  else if (GET_CODE (target) == REG)
6555	    /* Store this field into a union of the proper type.  */
6556	    store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6557			 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6558			 VOIDmode, 0, 1,
6559			 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6560	  else
6561	    abort ();
6562
6563	  /* Return the entire union.  */
6564	  return target;
6565	}
6566
6567      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6568	{
6569	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6570			     ro_modifier);
6571
6572	  /* If the signedness of the conversion differs and OP0 is
6573	     a promoted SUBREG, clear that indication since we now
6574	     have to do the proper extension.  */
6575	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6576	      && GET_CODE (op0) == SUBREG)
6577	    SUBREG_PROMOTED_VAR_P (op0) = 0;
6578
6579	  return op0;
6580	}
6581
6582      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6583      if (GET_MODE (op0) == mode)
6584	return op0;
6585
6586      /* If OP0 is a constant, just convert it into the proper mode.  */
6587      if (CONSTANT_P (op0))
6588	return
6589	  convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6590			 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6591
6592      if (modifier == EXPAND_INITIALIZER)
6593	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6594
6595      if (target == 0)
6596	return
6597	  convert_to_mode (mode, op0,
6598			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6599      else
6600	convert_move (target, op0,
6601		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6602      return target;
6603
6604    case PLUS_EXPR:
6605      /* We come here from MINUS_EXPR when the second operand is a
6606         constant.  */
6607    plus_expr:
6608      this_optab = add_optab;
6609
6610      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6611	 something else, make sure we add the register to the constant and
6612	 then to the other thing.  This case can occur during strength
6613	 reduction and doing it this way will produce better code if the
6614	 frame pointer or argument pointer is eliminated.
6615
6616	 fold-const.c will ensure that the constant is always in the inner
6617	 PLUS_EXPR, so the only case we need to do anything about is if
6618	 sp, ap, or fp is our second argument, in which case we must swap
6619	 the innermost first argument and our second argument.  */
6620
6621      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6622	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6623	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6624	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6625	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6626	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6627	{
6628	  tree t = TREE_OPERAND (exp, 1);
6629
6630	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6631	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6632	}
6633
6634      /* If the result is to be ptr_mode and we are adding an integer to
6635	 something, we might be forming a constant.  So try to use
6636	 plus_constant.  If it produces a sum and we can't accept it,
6637	 use force_operand.  This allows P = &ARR[const] to generate
6638	 efficient code on machines where a SYMBOL_REF is not a valid
6639	 address.
6640
6641	 If this is an EXPAND_SUM call, always return the sum.  */
6642      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6643	  || mode == ptr_mode)
6644	{
6645	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6646	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6647	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6648	    {
6649	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6650				 EXPAND_SUM);
6651	      op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6652	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6653		op1 = force_operand (op1, target);
6654	      return op1;
6655	    }
6656
6657	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6658		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6659		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6660	    {
6661	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6662				 EXPAND_SUM);
6663	      if (! CONSTANT_P (op0))
6664		{
6665		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6666				     VOIDmode, modifier);
6667		  /* Don't go to both_summands if modifier
6668		     says it's not right to return a PLUS.  */
6669		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6670		    goto binop2;
6671		  goto both_summands;
6672		}
6673	      op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6674	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6675		op0 = force_operand (op0, target);
6676	      return op0;
6677	    }
6678	}
6679
6680      /* No sense saving up arithmetic to be done
6681	 if it's all in the wrong mode to form part of an address.
6682	 And force_operand won't know whether to sign-extend or
6683	 zero-extend.  */
6684      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6685	  || mode != ptr_mode)
6686	goto binop;
6687
6688      preexpand_calls (exp);
6689      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6690	subtarget = 0;
6691
6692      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6693      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6694
6695    both_summands:
6696      /* Make sure any term that's a sum with a constant comes last.  */
6697      if (GET_CODE (op0) == PLUS
6698	  && CONSTANT_P (XEXP (op0, 1)))
6699	{
6700	  temp = op0;
6701	  op0 = op1;
6702	  op1 = temp;
6703	}
6704      /* If adding to a sum including a constant,
6705	 associate it to put the constant outside.  */
6706      if (GET_CODE (op1) == PLUS
6707	  && CONSTANT_P (XEXP (op1, 1)))
6708	{
6709	  rtx constant_term = const0_rtx;
6710
6711	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6712	  if (temp != 0)
6713	    op0 = temp;
6714	  /* Ensure that MULT comes first if there is one.  */
6715	  else if (GET_CODE (op0) == MULT)
6716	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6717	  else
6718	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6719
6720	  /* Let's also eliminate constants from op0 if possible.  */
6721	  op0 = eliminate_constant_term (op0, &constant_term);
6722
6723	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6724	     their sum should be a constant.  Form it into OP1, since the
6725	     result we want will then be OP0 + OP1.  */
6726
6727	  temp = simplify_binary_operation (PLUS, mode, constant_term,
6728					    XEXP (op1, 1));
6729	  if (temp != 0)
6730	    op1 = temp;
6731	  else
6732	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6733	}
6734
6735      /* Put a constant term last and put a multiplication first.  */
6736      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6737	temp = op1, op1 = op0, op0 = temp;
6738
6739      temp = simplify_binary_operation (PLUS, mode, op0, op1);
6740      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6741
6742    case MINUS_EXPR:
6743      /* For initializers, we are allowed to return a MINUS of two
6744	 symbolic constants.  Here we handle all cases when both operands
6745	 are constant.  */
6746      /* Handle difference of two symbolic constants,
6747	 for the sake of an initializer.  */
6748      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6749	  && really_constant_p (TREE_OPERAND (exp, 0))
6750	  && really_constant_p (TREE_OPERAND (exp, 1)))
6751	{
6752	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6753				 VOIDmode, ro_modifier);
6754	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6755				 VOIDmode, ro_modifier);
6756
6757	  /* If the last operand is a CONST_INT, use plus_constant of
6758	     the negated constant.  Else make the MINUS.  */
6759	  if (GET_CODE (op1) == CONST_INT)
6760	    return plus_constant (op0, - INTVAL (op1));
6761	  else
6762	    return gen_rtx_MINUS (mode, op0, op1);
6763	}
6764      /* Convert A - const to A + (-const).  */
6765      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6766	{
6767	  tree negated = fold (build1 (NEGATE_EXPR, type,
6768				       TREE_OPERAND (exp, 1)));
6769
6770	  /* Deal with the case where we can't negate the constant
6771	     in TYPE.  */
6772	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6773	    {
6774	      tree newtype = signed_type (type);
6775	      tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6776	      tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6777	      tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6778
6779	      if (! TREE_OVERFLOW (newneg))
6780		return expand_expr (convert (type,
6781					     build (PLUS_EXPR, newtype,
6782						    newop0, newneg)),
6783				    target, tmode, ro_modifier);
6784	    }
6785	  else
6786	    {
6787	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6788	      goto plus_expr;
6789	    }
6790	}
6791      this_optab = sub_optab;
6792      goto binop;
6793
6794    case MULT_EXPR:
6795      preexpand_calls (exp);
6796      /* If first operand is constant, swap them.
6797	 Thus the following special case checks need only
6798	 check the second operand.  */
6799      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6800	{
6801	  register tree t1 = TREE_OPERAND (exp, 0);
6802	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6803	  TREE_OPERAND (exp, 1) = t1;
6804	}
6805
6806      /* Attempt to return something suitable for generating an
6807	 indexed address, for machines that support that.  */
6808
6809      if (modifier == EXPAND_SUM && mode == ptr_mode
6810	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6811	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6812	{
6813	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6814			     EXPAND_SUM);
6815
6816	  /* Apply distributive law if OP0 is x+c.  */
6817	  if (GET_CODE (op0) == PLUS
6818	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6819	    return gen_rtx_PLUS (mode,
6820				 gen_rtx_MULT (mode, XEXP (op0, 0),
6821					       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6822			    GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6823				     * INTVAL (XEXP (op0, 1))));
6824
6825	  if (GET_CODE (op0) != REG)
6826	    op0 = force_operand (op0, NULL_RTX);
6827	  if (GET_CODE (op0) != REG)
6828	    op0 = copy_to_mode_reg (mode, op0);
6829
6830	  return gen_rtx_MULT (mode, op0,
6831			       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6832	}
6833
6834      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6835	subtarget = 0;
6836
6837      /* Check for multiplying things that have been extended
6838	 from a narrower type.  If this machine supports multiplying
6839	 in that narrower type with a result in the desired type,
6840	 do it that way, and avoid the explicit type-conversion.  */
6841      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6842	  && TREE_CODE (type) == INTEGER_TYPE
6843	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6844	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6845	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6846	       && int_fits_type_p (TREE_OPERAND (exp, 1),
6847				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6848	       /* Don't use a widening multiply if a shift will do.  */
6849	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6850		    > HOST_BITS_PER_WIDE_INT)
6851		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6852	      ||
6853	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6854	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6855		   ==
6856		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6857	       /* If both operands are extended, they must either both
6858		  be zero-extended or both be sign-extended.  */
6859	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6860		   ==
6861		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6862	{
6863	  enum machine_mode innermode
6864	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6865	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6866			? smul_widen_optab : umul_widen_optab);
6867	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6868			? umul_widen_optab : smul_widen_optab);
6869	  if (mode == GET_MODE_WIDER_MODE (innermode))
6870	    {
6871	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6872		{
6873		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6874				     NULL_RTX, VOIDmode, 0);
6875		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6876		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6877				       VOIDmode, 0);
6878		  else
6879		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6880				       NULL_RTX, VOIDmode, 0);
6881		  goto binop2;
6882		}
6883	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6884		       && innermode == word_mode)
6885		{
6886		  rtx htem;
6887		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6888				     NULL_RTX, VOIDmode, 0);
6889		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6890		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6891				       VOIDmode, 0);
6892		  else
6893		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6894				       NULL_RTX, VOIDmode, 0);
6895		  temp = expand_binop (mode, other_optab, op0, op1, target,
6896				       unsignedp, OPTAB_LIB_WIDEN);
6897		  htem = expand_mult_highpart_adjust (innermode,
6898						      gen_highpart (innermode, temp),
6899						      op0, op1,
6900						      gen_highpart (innermode, temp),
6901						      unsignedp);
6902		  emit_move_insn (gen_highpart (innermode, temp), htem);
6903		  return temp;
6904		}
6905	    }
6906	}
6907      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6908      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6909      return expand_mult (mode, op0, op1, target, unsignedp);
6910
6911    case TRUNC_DIV_EXPR:
6912    case FLOOR_DIV_EXPR:
6913    case CEIL_DIV_EXPR:
6914    case ROUND_DIV_EXPR:
6915    case EXACT_DIV_EXPR:
6916      preexpand_calls (exp);
6917      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6918	subtarget = 0;
6919      /* Possible optimization: compute the dividend with EXPAND_SUM
6920	 then if the divisor is constant can optimize the case
6921	 where some terms of the dividend have coeffs divisible by it.  */
6922      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6923      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6924      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6925
6926    case RDIV_EXPR:
6927      this_optab = flodiv_optab;
6928      goto binop;
6929
6930    case TRUNC_MOD_EXPR:
6931    case FLOOR_MOD_EXPR:
6932    case CEIL_MOD_EXPR:
6933    case ROUND_MOD_EXPR:
6934      preexpand_calls (exp);
6935      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6936	subtarget = 0;
6937      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6938      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6939      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6940
6941    case FIX_ROUND_EXPR:
6942    case FIX_FLOOR_EXPR:
6943    case FIX_CEIL_EXPR:
6944      abort ();			/* Not used for C.  */
6945
6946    case FIX_TRUNC_EXPR:
6947      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6948      if (target == 0)
6949	target = gen_reg_rtx (mode);
6950      expand_fix (target, op0, unsignedp);
6951      return target;
6952
6953    case FLOAT_EXPR:
6954      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6955      if (target == 0)
6956	target = gen_reg_rtx (mode);
6957      /* expand_float can't figure out what to do if FROM has VOIDmode.
6958	 So give it the correct mode.  With -O, cse will optimize this.  */
6959      if (GET_MODE (op0) == VOIDmode)
6960	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6961				op0);
6962      expand_float (target, op0,
6963		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6964      return target;
6965
6966    case NEGATE_EXPR:
6967      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6968      temp = expand_unop (mode, neg_optab, op0, target, 0);
6969      if (temp == 0)
6970	abort ();
6971      return temp;
6972
6973    case ABS_EXPR:
6974      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6975
6976      /* Handle complex values specially.  */
6977      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6978	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6979	return expand_complex_abs (mode, op0, target, unsignedp);
6980
6981      /* Unsigned abs is simply the operand.  Testing here means we don't
6982	 risk generating incorrect code below.  */
6983      if (TREE_UNSIGNED (type))
6984	return op0;
6985
6986      return expand_abs (mode, op0, target, unsignedp,
6987			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
6988
6989    case MAX_EXPR:
6990    case MIN_EXPR:
6991      target = original_target;
6992      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
6993	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6994	  || GET_MODE (target) != mode
6995	  || (GET_CODE (target) == REG
6996	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
6997	target = gen_reg_rtx (mode);
6998      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6999      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7000
7001      /* First try to do it with a special MIN or MAX instruction.
7002	 If that does not win, use a conditional jump to select the proper
7003	 value.  */
7004      this_optab = (TREE_UNSIGNED (type)
7005		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7006		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7007
7008      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7009			   OPTAB_WIDEN);
7010      if (temp != 0)
7011	return temp;
7012
7013      /* At this point, a MEM target is no longer useful; we will get better
7014	 code without it.  */
7015
7016      if (GET_CODE (target) == MEM)
7017	target = gen_reg_rtx (mode);
7018
7019      if (target != op0)
7020	emit_move_insn (target, op0);
7021
7022      op0 = gen_label_rtx ();
7023
7024      /* If this mode is an integer too wide to compare properly,
7025	 compare word by word.  Rely on cse to optimize constant cases.  */
7026      if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7027	{
7028	  if (code == MAX_EXPR)
7029	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7030					  target, op1, NULL_RTX, op0);
7031	  else
7032	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7033					  op1, target, NULL_RTX, op0);
7034	  emit_move_insn (target, op1);
7035	}
7036      else
7037	{
7038	  if (code == MAX_EXPR)
7039	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7040		    ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7041		    : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7042	  else
7043	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7044		    ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7045		    : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7046	  if (temp == const0_rtx)
7047	    emit_move_insn (target, op1);
7048	  else if (temp != const_true_rtx)
7049	    {
7050	      if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7051		emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7052	      else
7053		abort ();
7054	      emit_move_insn (target, op1);
7055	    }
7056	}
7057      emit_label (op0);
7058      return target;
7059
7060    case BIT_NOT_EXPR:
7061      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7062      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7063      if (temp == 0)
7064	abort ();
7065      return temp;
7066
7067    case FFS_EXPR:
7068      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7069      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7070      if (temp == 0)
7071	abort ();
7072      return temp;
7073
7074      /* ??? Can optimize bitwise operations with one arg constant.
7075	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7076	 and (a bitwise1 b) bitwise2 b (etc)
7077	 but that is probably not worth while.  */
7078
7079      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7080	 boolean values when we want in all cases to compute both of them.  In
7081	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7082	 as actual zero-or-1 values and then bitwise anding.  In cases where
7083	 there cannot be any side effects, better code would be made by
7084	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7085	 how to recognize those cases.  */
7086
7087    case TRUTH_AND_EXPR:
7088    case BIT_AND_EXPR:
7089      this_optab = and_optab;
7090      goto binop;
7091
7092    case TRUTH_OR_EXPR:
7093    case BIT_IOR_EXPR:
7094      this_optab = ior_optab;
7095      goto binop;
7096
7097    case TRUTH_XOR_EXPR:
7098    case BIT_XOR_EXPR:
7099      this_optab = xor_optab;
7100      goto binop;
7101
7102    case LSHIFT_EXPR:
7103    case RSHIFT_EXPR:
7104    case LROTATE_EXPR:
7105    case RROTATE_EXPR:
7106      preexpand_calls (exp);
7107      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7108	subtarget = 0;
7109      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7110      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7111			   unsignedp);
7112
7113      /* Could determine the answer when only additive constants differ.  Also,
7114	 the addition of one can be handled by changing the condition.  */
7115    case LT_EXPR:
7116    case LE_EXPR:
7117    case GT_EXPR:
7118    case GE_EXPR:
7119    case EQ_EXPR:
7120    case NE_EXPR:
7121      preexpand_calls (exp);
7122      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7123      if (temp != 0)
7124	return temp;
7125
7126      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
7127      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7128	  && original_target
7129	  && GET_CODE (original_target) == REG
7130	  && (GET_MODE (original_target)
7131	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7132	{
7133	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7134			      VOIDmode, 0);
7135
7136	  if (temp != original_target)
7137	    temp = copy_to_reg (temp);
7138
7139	  op1 = gen_label_rtx ();
7140	  emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7141			 GET_MODE (temp), unsignedp, 0);
7142	  emit_jump_insn (gen_beq (op1));
7143	  emit_move_insn (temp, const1_rtx);
7144	  emit_label (op1);
7145	  return temp;
7146	}
7147
7148      /* If no set-flag instruction, must generate a conditional
7149	 store into a temporary variable.  Drop through
7150	 and handle this like && and ||.  */
7151
7152    case TRUTH_ANDIF_EXPR:
7153    case TRUTH_ORIF_EXPR:
7154      if (! ignore
7155	  && (target == 0 || ! safe_from_p (target, exp, 1)
7156	      /* Make sure we don't have a hard reg (such as function's return
7157		 value) live across basic blocks, if not optimizing.  */
7158	      || (!optimize && GET_CODE (target) == REG
7159		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7160	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7161
7162      if (target)
7163	emit_clr_insn (target);
7164
7165      op1 = gen_label_rtx ();
7166      jumpifnot (exp, op1);
7167
7168      if (target)
7169	emit_0_to_1_insn (target);
7170
7171      emit_label (op1);
7172      return ignore ? const0_rtx : target;
7173
7174    case TRUTH_NOT_EXPR:
7175      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7176      /* The parser is careful to generate TRUTH_NOT_EXPR
7177	 only with operands that are always zero or one.  */
7178      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7179			   target, 1, OPTAB_LIB_WIDEN);
7180      if (temp == 0)
7181	abort ();
7182      return temp;
7183
7184    case COMPOUND_EXPR:
7185      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7186      emit_queue ();
7187      return expand_expr (TREE_OPERAND (exp, 1),
7188			  (ignore ? const0_rtx : target),
7189			  VOIDmode, 0);
7190
7191    case COND_EXPR:
7192      /* If we would have a "singleton" (see below) were it not for a
7193	 conversion in each arm, bring that conversion back out.  */
7194      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7195	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7196	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7197	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7198	{
7199	  tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7200	  tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7201
7202	  if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7203	       && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7204	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7205		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7206	      || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7207		  && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7208	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7209		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7210	    return expand_expr (build1 (NOP_EXPR, type,
7211					build (COND_EXPR, TREE_TYPE (true),
7212					       TREE_OPERAND (exp, 0),
7213					       true, false)),
7214				target, tmode, modifier);
7215	}
7216
7217      {
7218	/* Note that COND_EXPRs whose type is a structure or union
7219	   are required to be constructed to contain assignments of
7220	   a temporary variable, so that we can evaluate them here
7221	   for side effect only.  If type is void, we must do likewise.  */
7222
7223	/* If an arm of the branch requires a cleanup,
7224	   only that cleanup is performed.  */
7225
7226	tree singleton = 0;
7227	tree binary_op = 0, unary_op = 0;
7228
7229	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7230	   convert it to our mode, if necessary.  */
7231	if (integer_onep (TREE_OPERAND (exp, 1))
7232	    && integer_zerop (TREE_OPERAND (exp, 2))
7233	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7234	  {
7235	    if (ignore)
7236	      {
7237		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7238			     ro_modifier);
7239		return const0_rtx;
7240	      }
7241
7242	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7243	    if (GET_MODE (op0) == mode)
7244	      return op0;
7245
7246	    if (target == 0)
7247	      target = gen_reg_rtx (mode);
7248	    convert_move (target, op0, unsignedp);
7249	    return target;
7250	  }
7251
7252	/* Check for X ? A + B : A.  If we have this, we can copy A to the
7253	   output and conditionally add B.  Similarly for unary operations.
7254	   Don't do this if X has side-effects because those side effects
7255	   might affect A or B and the "?" operation is a sequence point in
7256	   ANSI.  (operand_equal_p tests for side effects.)  */
7257
7258	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7259	    && operand_equal_p (TREE_OPERAND (exp, 2),
7260				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7261	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7262	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7263		 && operand_equal_p (TREE_OPERAND (exp, 1),
7264				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7265	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7266	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7267		 && operand_equal_p (TREE_OPERAND (exp, 2),
7268				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7269	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7270	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7271		 && operand_equal_p (TREE_OPERAND (exp, 1),
7272				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7273	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7274
7275	/* If we are not to produce a result, we have no target.  Otherwise,
7276	   if a target was specified use it; it will not be used as an
7277	   intermediate target unless it is safe.  If no target, use a
7278	   temporary.  */
7279
7280	if (ignore)
7281	  temp = 0;
7282	else if (original_target
7283		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7284		     || (singleton && GET_CODE (original_target) == REG
7285			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7286			 && original_target == var_rtx (singleton)))
7287		 && GET_MODE (original_target) == mode
7288#ifdef HAVE_conditional_move
7289		 && (! can_conditionally_move_p (mode)
7290		     || GET_CODE (original_target) == REG
7291		     || TREE_ADDRESSABLE (type))
7292#endif
7293		 && ! (GET_CODE (original_target) == MEM
7294		       && MEM_VOLATILE_P (original_target)))
7295	  temp = original_target;
7296	else if (TREE_ADDRESSABLE (type))
7297	  abort ();
7298	else
7299	  temp = assign_temp (type, 0, 0, 1);
7300
7301	/* If we had X ? A + C : A, with C a constant power of 2, and we can
7302	   do the test of X as a store-flag operation, do this as
7303	   A + ((X != 0) << log C).  Similarly for other simple binary
7304	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
7305	if (temp && singleton && binary_op
7306	    && (TREE_CODE (binary_op) == PLUS_EXPR
7307		|| TREE_CODE (binary_op) == MINUS_EXPR
7308		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
7309		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
7310	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7311		: integer_onep (TREE_OPERAND (binary_op, 1)))
7312	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7313	  {
7314	    rtx result;
7315	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7316			    : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7317			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7318			    : xor_optab);
7319
7320	    /* If we had X ? A : A + 1, do this as A + (X == 0).
7321
7322	       We have to invert the truth value here and then put it
7323	       back later if do_store_flag fails.  We cannot simply copy
7324	       TREE_OPERAND (exp, 0) to another variable and modify that
7325	       because invert_truthvalue can modify the tree pointed to
7326	       by its argument.  */
7327	    if (singleton == TREE_OPERAND (exp, 1))
7328	      TREE_OPERAND (exp, 0)
7329		= invert_truthvalue (TREE_OPERAND (exp, 0));
7330
7331	    result = do_store_flag (TREE_OPERAND (exp, 0),
7332				    (safe_from_p (temp, singleton, 1)
7333				     ? temp : NULL_RTX),
7334				    mode, BRANCH_COST <= 1);
7335
7336	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7337	      result = expand_shift (LSHIFT_EXPR, mode, result,
7338				     build_int_2 (tree_log2
7339						  (TREE_OPERAND
7340						   (binary_op, 1)),
7341						  0),
7342				     (safe_from_p (temp, singleton, 1)
7343				      ? temp : NULL_RTX), 0);
7344
7345	    if (result)
7346	      {
7347		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7348		return expand_binop (mode, boptab, op1, result, temp,
7349				     unsignedp, OPTAB_LIB_WIDEN);
7350	      }
7351	    else if (singleton == TREE_OPERAND (exp, 1))
7352	      TREE_OPERAND (exp, 0)
7353		= invert_truthvalue (TREE_OPERAND (exp, 0));
7354	  }
7355
7356	do_pending_stack_adjust ();
7357	NO_DEFER_POP;
7358	op0 = gen_label_rtx ();
7359
7360	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7361	  {
7362	    if (temp != 0)
7363	      {
7364		/* If the target conflicts with the other operand of the
7365		   binary op, we can't use it.  Also, we can't use the target
7366		   if it is a hard register, because evaluating the condition
7367		   might clobber it.  */
7368		if ((binary_op
7369		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7370		    || (GET_CODE (temp) == REG
7371			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
7372		  temp = gen_reg_rtx (mode);
7373		store_expr (singleton, temp, 0);
7374	      }
7375	    else
7376	      expand_expr (singleton,
7377			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7378	    if (singleton == TREE_OPERAND (exp, 1))
7379	      jumpif (TREE_OPERAND (exp, 0), op0);
7380	    else
7381	      jumpifnot (TREE_OPERAND (exp, 0), op0);
7382
7383	    start_cleanup_deferral ();
7384	    if (binary_op && temp == 0)
7385	      /* Just touch the other operand.  */
7386	      expand_expr (TREE_OPERAND (binary_op, 1),
7387			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7388	    else if (binary_op)
7389	      store_expr (build (TREE_CODE (binary_op), type,
7390				 make_tree (type, temp),
7391				 TREE_OPERAND (binary_op, 1)),
7392			  temp, 0);
7393	    else
7394	      store_expr (build1 (TREE_CODE (unary_op), type,
7395				  make_tree (type, temp)),
7396			  temp, 0);
7397	    op1 = op0;
7398	  }
7399	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7400	   comparison operator.  If we have one of these cases, set the
7401	   output to A, branch on A (cse will merge these two references),
7402	   then set the output to FOO.  */
7403	else if (temp
7404		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7405		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7406		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7407				     TREE_OPERAND (exp, 1), 0)
7408		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7409		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7410		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7411	  {
7412	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7413	      temp = gen_reg_rtx (mode);
7414	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7415	    jumpif (TREE_OPERAND (exp, 0), op0);
7416
7417	    start_cleanup_deferral ();
7418	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7419	    op1 = op0;
7420	  }
7421	else if (temp
7422		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7423		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7424		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7425				     TREE_OPERAND (exp, 2), 0)
7426		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7427		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7428		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7429	  {
7430	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7431	      temp = gen_reg_rtx (mode);
7432	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7433	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7434
7435	    start_cleanup_deferral ();
7436	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7437	    op1 = op0;
7438	  }
7439	else
7440	  {
7441	    op1 = gen_label_rtx ();
7442	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7443
7444	    start_cleanup_deferral ();
7445	    if (temp != 0)
7446	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
7447	    else
7448	      expand_expr (TREE_OPERAND (exp, 1),
7449			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7450	    end_cleanup_deferral ();
7451	    emit_queue ();
7452	    emit_jump_insn (gen_jump (op1));
7453	    emit_barrier ();
7454	    emit_label (op0);
7455	    start_cleanup_deferral ();
7456	    if (temp != 0)
7457	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
7458	    else
7459	      expand_expr (TREE_OPERAND (exp, 2),
7460			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7461	  }
7462
7463	end_cleanup_deferral ();
7464
7465	emit_queue ();
7466	emit_label (op1);
7467	OK_DEFER_POP;
7468
7469	return temp;
7470      }
7471
7472    case TARGET_EXPR:
7473      {
7474	/* Something needs to be initialized, but we didn't know
7475	   where that thing was when building the tree.  For example,
7476	   it could be the return value of a function, or a parameter
7477	   to a function which lays down in the stack, or a temporary
7478	   variable which must be passed by reference.
7479
7480	   We guarantee that the expression will either be constructed
7481	   or copied into our original target.  */
7482
7483	tree slot = TREE_OPERAND (exp, 0);
7484	tree cleanups = NULL_TREE;
7485	tree exp1;
7486
7487	if (TREE_CODE (slot) != VAR_DECL)
7488	  abort ();
7489
7490	if (! ignore)
7491	  target = original_target;
7492
7493	if (target == 0)
7494	  {
7495	    if (DECL_RTL (slot) != 0)
7496	      {
7497		target = DECL_RTL (slot);
7498		/* If we have already expanded the slot, so don't do
7499		   it again.  (mrs)  */
7500		if (TREE_OPERAND (exp, 1) == NULL_TREE)
7501		  return target;
7502	      }
7503	    else
7504	      {
7505		target = assign_temp (type, 2, 0, 1);
7506		/* All temp slots at this level must not conflict.  */
7507		preserve_temp_slots (target);
7508		DECL_RTL (slot) = target;
7509		if (TREE_ADDRESSABLE (slot))
7510		  {
7511		    TREE_ADDRESSABLE (slot) = 0;
7512		    mark_addressable (slot);
7513		  }
7514
7515		/* Since SLOT is not known to the called function
7516		   to belong to its stack frame, we must build an explicit
7517		   cleanup.  This case occurs when we must build up a reference
7518		   to pass the reference as an argument.  In this case,
7519		   it is very likely that such a reference need not be
7520		   built here.  */
7521
7522		if (TREE_OPERAND (exp, 2) == 0)
7523		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7524		cleanups = TREE_OPERAND (exp, 2);
7525	      }
7526	  }
7527	else
7528	  {
7529	    /* This case does occur, when expanding a parameter which
7530	       needs to be constructed on the stack.  The target
7531	       is the actual stack address that we want to initialize.
7532	       The function we call will perform the cleanup in this case.  */
7533
7534	    /* If we have already assigned it space, use that space,
7535	       not target that we were passed in, as our target
7536	       parameter is only a hint.  */
7537	    if (DECL_RTL (slot) != 0)
7538              {
7539                target = DECL_RTL (slot);
7540                /* If we have already expanded the slot, so don't do
7541                   it again.  (mrs)  */
7542                if (TREE_OPERAND (exp, 1) == NULL_TREE)
7543                  return target;
7544	      }
7545	    else
7546	      {
7547		DECL_RTL (slot) = target;
7548		/* If we must have an addressable slot, then make sure that
7549		   the RTL that we just stored in slot is OK.  */
7550		if (TREE_ADDRESSABLE (slot))
7551		  {
7552		    TREE_ADDRESSABLE (slot) = 0;
7553		    mark_addressable (slot);
7554		  }
7555	      }
7556	  }
7557
7558	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7559	/* Mark it as expanded.  */
7560	TREE_OPERAND (exp, 1) = NULL_TREE;
7561
7562	TREE_USED (slot) = 1;
7563	store_expr (exp1, target, 0);
7564
7565	expand_decl_cleanup (NULL_TREE, cleanups);
7566
7567	return target;
7568      }
7569
7570    case INIT_EXPR:
7571      {
7572	tree lhs = TREE_OPERAND (exp, 0);
7573	tree rhs = TREE_OPERAND (exp, 1);
7574	tree noncopied_parts = 0;
7575	tree lhs_type = TREE_TYPE (lhs);
7576
7577	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7578	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7579	  noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7580						  TYPE_NONCOPIED_PARTS (lhs_type));
7581	while (noncopied_parts != 0)
7582	  {
7583	    expand_assignment (TREE_VALUE (noncopied_parts),
7584			       TREE_PURPOSE (noncopied_parts), 0, 0);
7585	    noncopied_parts = TREE_CHAIN (noncopied_parts);
7586	  }
7587	return temp;
7588      }
7589
7590    case MODIFY_EXPR:
7591      {
7592	/* If lhs is complex, expand calls in rhs before computing it.
7593	   That's so we don't compute a pointer and save it over a call.
7594	   If lhs is simple, compute it first so we can give it as a
7595	   target if the rhs is just a call.  This avoids an extra temp and copy
7596	   and that prevents a partial-subsumption which makes bad code.
7597	   Actually we could treat component_ref's of vars like vars.  */
7598
7599	tree lhs = TREE_OPERAND (exp, 0);
7600	tree rhs = TREE_OPERAND (exp, 1);
7601	tree noncopied_parts = 0;
7602	tree lhs_type = TREE_TYPE (lhs);
7603
7604	temp = 0;
7605
7606	if (TREE_CODE (lhs) != VAR_DECL
7607	    && TREE_CODE (lhs) != RESULT_DECL
7608	    && TREE_CODE (lhs) != PARM_DECL
7609	    && ! (TREE_CODE (lhs) == INDIRECT_REF
7610		  && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7611	  preexpand_calls (exp);
7612
7613	/* Check for |= or &= of a bitfield of size one into another bitfield
7614	   of size 1.  In this case, (unless we need the result of the
7615	   assignment) we can do this more efficiently with a
7616	   test followed by an assignment, if necessary.
7617
7618	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
7619	   things change so we do, this code should be enhanced to
7620	   support it.  */
7621	if (ignore
7622	    && TREE_CODE (lhs) == COMPONENT_REF
7623	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
7624		|| TREE_CODE (rhs) == BIT_AND_EXPR)
7625	    && TREE_OPERAND (rhs, 0) == lhs
7626	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7627	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7628	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7629	  {
7630	    rtx label = gen_label_rtx ();
7631
7632	    do_jump (TREE_OPERAND (rhs, 1),
7633		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7634		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7635	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
7636					     (TREE_CODE (rhs) == BIT_IOR_EXPR
7637					      ? integer_one_node
7638					      : integer_zero_node)),
7639			       0, 0);
7640	    do_pending_stack_adjust ();
7641	    emit_label (label);
7642	    return const0_rtx;
7643	  }
7644
7645	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7646	    && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7647	  noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7648						  TYPE_NONCOPIED_PARTS (lhs_type));
7649
7650	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7651	while (noncopied_parts != 0)
7652	  {
7653	    expand_assignment (TREE_PURPOSE (noncopied_parts),
7654			       TREE_VALUE (noncopied_parts), 0, 0);
7655	    noncopied_parts = TREE_CHAIN (noncopied_parts);
7656	  }
7657	return temp;
7658      }
7659
7660    case PREINCREMENT_EXPR:
7661    case PREDECREMENT_EXPR:
7662      return expand_increment (exp, 0, ignore);
7663
7664    case POSTINCREMENT_EXPR:
7665    case POSTDECREMENT_EXPR:
7666      /* Faster to treat as pre-increment if result is not used.  */
7667      return expand_increment (exp, ! ignore, ignore);
7668
7669    case ADDR_EXPR:
7670      /* If nonzero, TEMP will be set to the address of something that might
7671	 be a MEM corresponding to a stack slot.  */
7672      temp = 0;
7673
7674      /* Are we taking the address of a nested function?  */
7675      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7676	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7677	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7678	  && ! TREE_STATIC (exp))
7679	{
7680	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
7681	  op0 = force_operand (op0, target);
7682	}
7683      /* If we are taking the address of something erroneous, just
7684	 return a zero.  */
7685      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7686	return const0_rtx;
7687      else
7688	{
7689	  /* We make sure to pass const0_rtx down if we came in with
7690	     ignore set, to avoid doing the cleanups twice for something.  */
7691	  op0 = expand_expr (TREE_OPERAND (exp, 0),
7692			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
7693			     (modifier == EXPAND_INITIALIZER
7694			      ? modifier : EXPAND_CONST_ADDRESS));
7695
7696	  /* If we are going to ignore the result, OP0 will have been set
7697	     to const0_rtx, so just return it.  Don't get confused and
7698	     think we are taking the address of the constant.  */
7699	  if (ignore)
7700	    return op0;
7701
7702	  op0 = protect_from_queue (op0, 0);
7703
7704	  /* We would like the object in memory.  If it is a constant,
7705	     we can have it be statically allocated into memory.  For
7706	     a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7707	     memory and store the value into it.  */
7708
7709	  if (CONSTANT_P (op0))
7710	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7711				   op0);
7712	  else if (GET_CODE (op0) == MEM)
7713	    {
7714	      mark_temp_addr_taken (op0);
7715	      temp = XEXP (op0, 0);
7716	    }
7717
7718	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7719		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7720	    {
7721	      /* If this object is in a register, it must be not
7722		 be BLKmode.  */
7723	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7724	      rtx memloc = assign_temp (inner_type, 1, 1, 1);
7725
7726	      mark_temp_addr_taken (memloc);
7727	      emit_move_insn (memloc, op0);
7728	      op0 = memloc;
7729	    }
7730
7731	  if (GET_CODE (op0) != MEM)
7732	    abort ();
7733
7734	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7735	    {
7736	      temp = XEXP (op0, 0);
7737#ifdef POINTERS_EXTEND_UNSIGNED
7738	      if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7739		  && mode == ptr_mode)
7740		temp = convert_memory_address (ptr_mode, temp);
7741#endif
7742	      return temp;
7743	    }
7744
7745	  op0 = force_operand (XEXP (op0, 0), target);
7746	}
7747
7748      if (flag_force_addr && GET_CODE (op0) != REG)
7749	op0 = force_reg (Pmode, op0);
7750
7751      if (GET_CODE (op0) == REG
7752	  && ! REG_USERVAR_P (op0))
7753	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7754
7755      /* If we might have had a temp slot, add an equivalent address
7756	 for it.  */
7757      if (temp != 0)
7758	update_temp_slot_address (temp, op0);
7759
7760#ifdef POINTERS_EXTEND_UNSIGNED
7761      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7762	  && mode == ptr_mode)
7763	op0 = convert_memory_address (ptr_mode, op0);
7764#endif
7765
7766      return op0;
7767
7768    case ENTRY_VALUE_EXPR:
7769      abort ();
7770
7771    /* COMPLEX type for Extended Pascal & Fortran  */
7772    case COMPLEX_EXPR:
7773      {
7774	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7775	rtx insns;
7776
7777	/* Get the rtx code of the operands.  */
7778	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7779	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7780
7781	if (! target)
7782	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7783
7784	start_sequence ();
7785
7786	/* Move the real (op0) and imaginary (op1) parts to their location.  */
7787	emit_move_insn (gen_realpart (mode, target), op0);
7788	emit_move_insn (gen_imagpart (mode, target), op1);
7789
7790	insns = get_insns ();
7791	end_sequence ();
7792
7793	/* Complex construction should appear as a single unit.  */
7794	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7795	   each with a separate pseudo as destination.
7796	   It's not correct for flow to treat them as a unit.  */
7797	if (GET_CODE (target) != CONCAT)
7798	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7799	else
7800	  emit_insns (insns);
7801
7802	return target;
7803      }
7804
7805    case REALPART_EXPR:
7806      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7807      return gen_realpart (mode, op0);
7808
7809    case IMAGPART_EXPR:
7810      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7811      return gen_imagpart (mode, op0);
7812
7813    case CONJ_EXPR:
7814      {
7815	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7816	rtx imag_t;
7817	rtx insns;
7818
7819	op0  = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7820
7821	if (! target)
7822	  target = gen_reg_rtx (mode);
7823
7824	start_sequence ();
7825
7826	/* Store the realpart and the negated imagpart to target.  */
7827	emit_move_insn (gen_realpart (partmode, target),
7828			gen_realpart (partmode, op0));
7829
7830	imag_t = gen_imagpart (partmode, target);
7831	temp = expand_unop (partmode, neg_optab,
7832			       gen_imagpart (partmode, op0), imag_t, 0);
7833	if (temp != imag_t)
7834	  emit_move_insn (imag_t, temp);
7835
7836	insns = get_insns ();
7837	end_sequence ();
7838
7839	/* Conjugate should appear as a single unit
7840	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7841	   each with a separate pseudo as destination.
7842	   It's not correct for flow to treat them as a unit.  */
7843	if (GET_CODE (target) != CONCAT)
7844	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7845	else
7846	  emit_insns (insns);
7847
7848	return target;
7849      }
7850
7851    case TRY_CATCH_EXPR:
7852      {
7853	tree handler = TREE_OPERAND (exp, 1);
7854
7855	expand_eh_region_start ();
7856
7857	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7858
7859	expand_eh_region_end (handler);
7860
7861	return op0;
7862      }
7863
7864    case POPDCC_EXPR:
7865      {
7866	rtx dcc = get_dynamic_cleanup_chain ();
7867	emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7868	return const0_rtx;
7869      }
7870
7871    case POPDHC_EXPR:
7872      {
7873	rtx dhc = get_dynamic_handler_chain ();
7874	emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7875	return const0_rtx;
7876      }
7877
7878    case ERROR_MARK:
7879      op0 = CONST0_RTX (tmode);
7880      if (op0 != 0)
7881	return op0;
7882      return const0_rtx;
7883
7884    default:
7885      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7886    }
7887
7888  /* Here to do an ordinary binary operator, generating an instruction
7889     from the optab already placed in `this_optab'.  */
7890 binop:
7891  preexpand_calls (exp);
7892  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7893    subtarget = 0;
7894  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7895  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7896 binop2:
7897  temp = expand_binop (mode, this_optab, op0, op1, target,
7898		       unsignedp, OPTAB_LIB_WIDEN);
7899  if (temp == 0)
7900    abort ();
7901  return temp;
7902}
7903
7904
7905
7906/* Return the alignment in bits of EXP, a pointer valued expression.
7907   But don't return more than MAX_ALIGN no matter what.
7908   The alignment returned is, by default, the alignment of the thing that
7909   EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7910
7911   Otherwise, look at the expression to see if we can do better, i.e., if the
7912   expression is actually pointing at an object whose alignment is tighter.  */
7913
7914static int
7915get_pointer_alignment (exp, max_align)
7916     tree exp;
7917     unsigned max_align;
7918{
7919  unsigned align, inner;
7920
7921  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7922    return 0;
7923
7924  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7925  align = MIN (align, max_align);
7926
7927  while (1)
7928    {
7929      switch (TREE_CODE (exp))
7930	{
7931	case NOP_EXPR:
7932	case CONVERT_EXPR:
7933	case NON_LVALUE_EXPR:
7934	  exp = TREE_OPERAND (exp, 0);
7935	  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7936	    return align;
7937	  inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7938	  align = MIN (inner, max_align);
7939	  break;
7940
7941	case PLUS_EXPR:
7942	  /* If sum of pointer + int, restrict our maximum alignment to that
7943	     imposed by the integer.  If not, we can't do any better than
7944	     ALIGN.  */
7945	  if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7946	    return align;
7947
7948	  while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7949		  & (max_align - 1))
7950		 != 0)
7951	    max_align >>= 1;
7952
7953	  exp = TREE_OPERAND (exp, 0);
7954	  break;
7955
7956	case ADDR_EXPR:
7957	  /* See what we are pointing at and look at its alignment.  */
7958	  exp = TREE_OPERAND (exp, 0);
7959	  if (TREE_CODE (exp) == FUNCTION_DECL)
7960	    align = FUNCTION_BOUNDARY;
7961	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7962	    align = DECL_ALIGN (exp);
7963#ifdef CONSTANT_ALIGNMENT
7964	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7965	    align = CONSTANT_ALIGNMENT (exp, align);
7966#endif
7967	  return MIN (align, max_align);
7968
7969	default:
7970	  return align;
7971	}
7972    }
7973}
7974
7975/* Return the tree node and offset if a given argument corresponds to
7976   a string constant.  */
7977
7978static tree
7979string_constant (arg, ptr_offset)
7980     tree arg;
7981     tree *ptr_offset;
7982{
7983  STRIP_NOPS (arg);
7984
7985  if (TREE_CODE (arg) == ADDR_EXPR
7986      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7987    {
7988      *ptr_offset = integer_zero_node;
7989      return TREE_OPERAND (arg, 0);
7990    }
7991  else if (TREE_CODE (arg) == PLUS_EXPR)
7992    {
7993      tree arg0 = TREE_OPERAND (arg, 0);
7994      tree arg1 = TREE_OPERAND (arg, 1);
7995
7996      STRIP_NOPS (arg0);
7997      STRIP_NOPS (arg1);
7998
7999      if (TREE_CODE (arg0) == ADDR_EXPR
8000	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8001	{
8002	  *ptr_offset = arg1;
8003	  return TREE_OPERAND (arg0, 0);
8004	}
8005      else if (TREE_CODE (arg1) == ADDR_EXPR
8006	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8007	{
8008	  *ptr_offset = arg0;
8009	  return TREE_OPERAND (arg1, 0);
8010	}
8011    }
8012
8013  return 0;
8014}
8015
8016/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
8017   way, because it could contain a zero byte in the middle.
8018   TREE_STRING_LENGTH is the size of the character array, not the string.
8019
8020   Unfortunately, string_constant can't access the values of const char
8021   arrays with initializers, so neither can we do so here.  */
8022
8023static tree
8024c_strlen (src)
8025     tree src;
8026{
8027  tree offset_node;
8028  int offset, max;
8029  char *ptr;
8030
8031  src = string_constant (src, &offset_node);
8032  if (src == 0)
8033    return 0;
8034  max = TREE_STRING_LENGTH (src);
8035  ptr = TREE_STRING_POINTER (src);
8036  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8037    {
8038      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8039	 compute the offset to the following null if we don't know where to
8040	 start searching for it.  */
8041      int i;
8042      for (i = 0; i < max; i++)
8043	if (ptr[i] == 0)
8044	  return 0;
8045      /* We don't know the starting offset, but we do know that the string
8046	 has no internal zero bytes.  We can assume that the offset falls
8047	 within the bounds of the string; otherwise, the programmer deserves
8048	 what he gets.  Subtract the offset from the length of the string,
8049	 and return that.  */
8050      /* This would perhaps not be valid if we were dealing with named
8051         arrays in addition to literal string constants.  */
8052      return size_binop (MINUS_EXPR, size_int (max), offset_node);
8053    }
8054
8055  /* We have a known offset into the string.  Start searching there for
8056     a null character.  */
8057  if (offset_node == 0)
8058    offset = 0;
8059  else
8060    {
8061      /* Did we get a long long offset?  If so, punt.  */
8062      if (TREE_INT_CST_HIGH (offset_node) != 0)
8063	return 0;
8064      offset = TREE_INT_CST_LOW (offset_node);
8065    }
8066  /* If the offset is known to be out of bounds, warn, and call strlen at
8067     runtime.  */
8068  if (offset < 0 || offset > max)
8069    {
8070      warning ("offset outside bounds of constant string");
8071      return 0;
8072    }
8073  /* Use strlen to search for the first zero byte.  Since any strings
8074     constructed with build_string will have nulls appended, we win even
8075     if we get handed something like (char[4])"abcd".
8076
8077     Since OFFSET is our starting index into the string, no further
8078     calculation is needed.  */
8079  return size_int (strlen (ptr + offset));
8080}
8081
8082rtx
8083expand_builtin_return_addr (fndecl_code, count, tem)
8084     enum built_in_function fndecl_code;
8085     int count;
8086     rtx tem;
8087{
8088  int i;
8089
8090  /* Some machines need special handling before we can access
8091     arbitrary frames.  For example, on the sparc, we must first flush
8092     all register windows to the stack.  */
8093#ifdef SETUP_FRAME_ADDRESSES
8094  if (count > 0)
8095    SETUP_FRAME_ADDRESSES ();
8096#endif
8097
8098  /* On the sparc, the return address is not in the frame, it is in a
8099     register.  There is no way to access it off of the current frame
8100     pointer, but it can be accessed off the previous frame pointer by
8101     reading the value from the register window save area.  */
8102#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8103  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8104    count--;
8105#endif
8106
8107  /* Scan back COUNT frames to the specified frame.  */
8108  for (i = 0; i < count; i++)
8109    {
8110      /* Assume the dynamic chain pointer is in the word that the
8111	 frame address points to, unless otherwise specified.  */
8112#ifdef DYNAMIC_CHAIN_ADDRESS
8113      tem = DYNAMIC_CHAIN_ADDRESS (tem);
8114#endif
8115      tem = memory_address (Pmode, tem);
8116      tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8117    }
8118
8119  /* For __builtin_frame_address, return what we've got.  */
8120  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8121    return tem;
8122
8123  /* For __builtin_return_address, Get the return address from that
8124     frame.  */
8125#ifdef RETURN_ADDR_RTX
8126  tem = RETURN_ADDR_RTX (count, tem);
8127#else
8128  tem = memory_address (Pmode,
8129			plus_constant (tem, GET_MODE_SIZE (Pmode)));
8130  tem = gen_rtx_MEM (Pmode, tem);
8131#endif
8132  return tem;
8133}
8134
8135/* __builtin_setjmp is passed a pointer to an array of five words (not
8136   all will be used on all machines).  It operates similarly to the C
8137   library function of the same name, but is more efficient.  Much of
8138   the code below (and for longjmp) is copied from the handling of
8139   non-local gotos.
8140
8141   NOTE: This is intended for use by GNAT and the exception handling
8142   scheme in the compiler and will only work in the method used by
8143   them.  */
8144
8145rtx
8146expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8147     rtx buf_addr;
8148     rtx target;
8149     rtx first_label, next_label;
8150{
8151  rtx lab1 = gen_label_rtx ();
8152  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8153  enum machine_mode value_mode;
8154  rtx stack_save;
8155
8156  value_mode = TYPE_MODE (integer_type_node);
8157
8158#ifdef POINTERS_EXTEND_UNSIGNED
8159  buf_addr = convert_memory_address (Pmode, buf_addr);
8160#endif
8161
8162  buf_addr = force_reg (Pmode, buf_addr);
8163
8164  if (target == 0 || GET_CODE (target) != REG
8165      || REGNO (target) < FIRST_PSEUDO_REGISTER)
8166    target = gen_reg_rtx (value_mode);
8167
8168  emit_queue ();
8169
8170  /* We store the frame pointer and the address of lab1 in the buffer
8171     and use the rest of it for the stack save area, which is
8172     machine-dependent.  */
8173
8174#ifndef BUILTIN_SETJMP_FRAME_VALUE
8175#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8176#endif
8177
8178  emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8179		  BUILTIN_SETJMP_FRAME_VALUE);
8180  emit_move_insn (validize_mem
8181		  (gen_rtx_MEM (Pmode,
8182				plus_constant (buf_addr,
8183					       GET_MODE_SIZE (Pmode)))),
8184		  gen_rtx_LABEL_REF (Pmode, lab1));
8185
8186  stack_save = gen_rtx_MEM (sa_mode,
8187			    plus_constant (buf_addr,
8188					   2 * GET_MODE_SIZE (Pmode)));
8189  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8190
8191  /* If there is further processing to do, do it.  */
8192#ifdef HAVE_builtin_setjmp_setup
8193  if (HAVE_builtin_setjmp_setup)
8194    emit_insn (gen_builtin_setjmp_setup (buf_addr));
8195#endif
8196
8197  /* Set TARGET to zero and branch to the first-time-through label.  */
8198  emit_move_insn (target, const0_rtx);
8199  emit_jump_insn (gen_jump (first_label));
8200  emit_barrier ();
8201  emit_label (lab1);
8202
8203  /* Tell flow about the strange goings on.  */
8204  current_function_has_nonlocal_label = 1;
8205
8206  /* Clobber the FP when we get here, so we have to make sure it's
8207     marked as used by this function.  */
8208  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8209
8210  /* Mark the static chain as clobbered here so life information
8211     doesn't get messed up for it.  */
8212  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8213
8214  /* Now put in the code to restore the frame pointer, and argument
8215     pointer, if needed.  The code below is from expand_end_bindings
8216     in stmt.c; see detailed documentation there.  */
8217#ifdef HAVE_nonlocal_goto
8218  if (! HAVE_nonlocal_goto)
8219#endif
8220    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8221
8222#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8223  if (fixed_regs[ARG_POINTER_REGNUM])
8224    {
8225#ifdef ELIMINABLE_REGS
8226      int i;
8227      static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8228
8229      for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8230	if (elim_regs[i].from == ARG_POINTER_REGNUM
8231	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8232	  break;
8233
8234      if (i == sizeof elim_regs / sizeof elim_regs [0])
8235#endif
8236	{
8237	  /* Now restore our arg pointer from the address at which it
8238	     was saved in our stack frame.
8239	     If there hasn't be space allocated for it yet, make
8240	     some now.  */
8241	  if (arg_pointer_save_area == 0)
8242	    arg_pointer_save_area
8243	      = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8244	  emit_move_insn (virtual_incoming_args_rtx,
8245			  copy_to_reg (arg_pointer_save_area));
8246	}
8247    }
8248#endif
8249
8250#ifdef HAVE_builtin_setjmp_receiver
8251  if (HAVE_builtin_setjmp_receiver)
8252    emit_insn (gen_builtin_setjmp_receiver (lab1));
8253  else
8254#endif
8255#ifdef HAVE_nonlocal_goto_receiver
8256    if (HAVE_nonlocal_goto_receiver)
8257      emit_insn (gen_nonlocal_goto_receiver ());
8258    else
8259#endif
8260      {
8261	; /* Nothing */
8262      }
8263
8264  /* Set TARGET, and branch to the next-time-through label.  */
8265  emit_move_insn (target, const1_rtx);
8266  emit_jump_insn (gen_jump (next_label));
8267  emit_barrier ();
8268
8269  return target;
8270}
8271
8272void
8273expand_builtin_longjmp (buf_addr, value)
8274     rtx buf_addr, value;
8275{
8276  rtx fp, lab, stack;
8277  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8278
8279#ifdef POINTERS_EXTEND_UNSIGNED
8280  buf_addr = convert_memory_address (Pmode, buf_addr);
8281#endif
8282  buf_addr = force_reg (Pmode, buf_addr);
8283
8284  /* We used to store value in static_chain_rtx, but that fails if pointers
8285     are smaller than integers.  We instead require that the user must pass
8286     a second argument of 1, because that is what builtin_setjmp will
8287     return.  This also makes EH slightly more efficient, since we are no
8288     longer copying around a value that we don't care about.  */
8289  if (value != const1_rtx)
8290    abort ();
8291
8292#ifdef HAVE_builtin_longjmp
8293  if (HAVE_builtin_longjmp)
8294    emit_insn (gen_builtin_longjmp (buf_addr));
8295  else
8296#endif
8297    {
8298      fp = gen_rtx_MEM (Pmode, buf_addr);
8299      lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8300					       GET_MODE_SIZE (Pmode)));
8301
8302      stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8303						   2 * GET_MODE_SIZE (Pmode)));
8304
8305      /* Pick up FP, label, and SP from the block and jump.  This code is
8306	 from expand_goto in stmt.c; see there for detailed comments.  */
8307#if HAVE_nonlocal_goto
8308      if (HAVE_nonlocal_goto)
8309	/* We have to pass a value to the nonlocal_goto pattern that will
8310	   get copied into the static_chain pointer, but it does not matter
8311	   what that value is, because builtin_setjmp does not use it.  */
8312	emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8313      else
8314#endif
8315	{
8316	  lab = copy_to_reg (lab);
8317
8318	  emit_move_insn (hard_frame_pointer_rtx, fp);
8319	  emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8320
8321	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8322	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8323	  emit_indirect_jump (lab);
8324	}
8325    }
8326}
8327
8328static rtx
8329get_memory_rtx (exp)
8330     tree exp;
8331{
8332  rtx mem;
8333  int is_aggregate;
8334
8335  mem = gen_rtx_MEM (BLKmode,
8336		     memory_address (BLKmode,
8337				     expand_expr (exp, NULL_RTX,
8338						  ptr_mode, EXPAND_SUM)));
8339
8340  RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8341
8342  /* Figure out the type of the object pointed to.  Set MEM_IN_STRUCT_P
8343     if the value is the address of a structure or if the expression is
8344     cast to a pointer to structure type.  */
8345  is_aggregate = 0;
8346
8347  while (TREE_CODE (exp) == NOP_EXPR)
8348    {
8349      tree cast_type = TREE_TYPE (exp);
8350      if (TREE_CODE (cast_type) == POINTER_TYPE
8351	  && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8352	{
8353	  is_aggregate = 1;
8354	  break;
8355	}
8356      exp = TREE_OPERAND (exp, 0);
8357    }
8358
8359  if (is_aggregate == 0)
8360    {
8361      tree type;
8362
8363      if (TREE_CODE (exp) == ADDR_EXPR)
8364	/* If this is the address of an object, check whether the
8365	   object is an array.  */
8366	type = TREE_TYPE (TREE_OPERAND (exp, 0));
8367      else
8368	type = TREE_TYPE (TREE_TYPE (exp));
8369      is_aggregate = AGGREGATE_TYPE_P (type);
8370    }
8371
8372  MEM_IN_STRUCT_P (mem) = is_aggregate;
8373  return mem;
8374}
8375
8376
8377/* Expand an expression EXP that calls a built-in function,
8378   with result going to TARGET if that's convenient
8379   (and in mode MODE if that's convenient).
8380   SUBTARGET may be used as the target for computing one of EXP's operands.
8381   IGNORE is nonzero if the value is to be ignored.  */
8382
8383#define CALLED_AS_BUILT_IN(NODE) \
8384   (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8385
8386static rtx
8387expand_builtin (exp, target, subtarget, mode, ignore)
8388     tree exp;
8389     rtx target;
8390     rtx subtarget;
8391     enum machine_mode mode;
8392     int ignore;
8393{
8394  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8395  tree arglist = TREE_OPERAND (exp, 1);
8396  rtx op0;
8397  rtx lab1, insns;
8398  enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8399  optab builtin_optab;
8400
8401  switch (DECL_FUNCTION_CODE (fndecl))
8402    {
8403    case BUILT_IN_ABS:
8404    case BUILT_IN_LABS:
8405    case BUILT_IN_FABS:
8406      /* build_function_call changes these into ABS_EXPR.  */
8407      abort ();
8408
8409    case BUILT_IN_SIN:
8410    case BUILT_IN_COS:
8411      /* Treat these like sqrt, but only if the user asks for them.  */
8412      if (! flag_fast_math)
8413	break;
8414    case BUILT_IN_FSQRT:
8415      /* If not optimizing, call the library function.  */
8416      if (! optimize)
8417	break;
8418
8419      if (arglist == 0
8420	  /* Arg could be wrong type if user redeclared this fcn wrong.  */
8421	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8422	break;
8423
8424      /* Stabilize and compute the argument.  */
8425      if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8426	  && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8427	{
8428	  exp = copy_node (exp);
8429	  arglist = copy_node (arglist);
8430	  TREE_OPERAND (exp, 1) = arglist;
8431	  TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8432	}
8433      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8434
8435      /* Make a suitable register to place result in.  */
8436      target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8437
8438      emit_queue ();
8439      start_sequence ();
8440
8441      switch (DECL_FUNCTION_CODE (fndecl))
8442	{
8443	case BUILT_IN_SIN:
8444	  builtin_optab = sin_optab; break;
8445	case BUILT_IN_COS:
8446	  builtin_optab = cos_optab; break;
8447	case BUILT_IN_FSQRT:
8448	  builtin_optab = sqrt_optab; break;
8449	default:
8450	  abort ();
8451	}
8452
8453      /* Compute into TARGET.
8454	 Set TARGET to wherever the result comes back.  */
8455      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8456			    builtin_optab, op0, target, 0);
8457
8458      /* If we were unable to expand via the builtin, stop the
8459	 sequence (without outputting the insns) and break, causing
8460	 a call to the library function.  */
8461      if (target == 0)
8462	{
8463	  end_sequence ();
8464	  break;
8465        }
8466
8467      /* Check the results by default.  But if flag_fast_math is turned on,
8468	 then assume sqrt will always be called with valid arguments.  */
8469
8470      if (! flag_fast_math)
8471	{
8472	  /* Don't define the builtin FP instructions
8473	     if your machine is not IEEE.  */
8474	  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8475	    abort ();
8476
8477	  lab1 = gen_label_rtx ();
8478
8479	  /* Test the result; if it is NaN, set errno=EDOM because
8480	     the argument was not in the domain.  */
8481	  emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8482	  emit_jump_insn (gen_beq (lab1));
8483
8484#ifdef TARGET_EDOM
8485	  {
8486#ifdef GEN_ERRNO_RTX
8487	    rtx errno_rtx = GEN_ERRNO_RTX;
8488#else
8489	    rtx errno_rtx
8490	      = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8491#endif
8492
8493	    emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8494	  }
8495#else
8496	  /* We can't set errno=EDOM directly; let the library call do it.
8497	     Pop the arguments right away in case the call gets deleted.  */
8498	  NO_DEFER_POP;
8499	  expand_call (exp, target, 0);
8500	  OK_DEFER_POP;
8501#endif
8502
8503	  emit_label (lab1);
8504	}
8505
8506      /* Output the entire sequence.  */
8507      insns = get_insns ();
8508      end_sequence ();
8509      emit_insns (insns);
8510
8511      return target;
8512
8513    case BUILT_IN_FMOD:
8514      break;
8515
8516      /* __builtin_apply_args returns block of memory allocated on
8517	 the stack into which is stored the arg pointer, structure
8518	 value address, static chain, and all the registers that might
8519	 possibly be used in performing a function call.  The code is
8520	 moved to the start of the function so the incoming values are
8521	 saved.  */
8522    case BUILT_IN_APPLY_ARGS:
8523      /* Don't do __builtin_apply_args more than once in a function.
8524	 Save the result of the first call and reuse it.  */
8525      if (apply_args_value != 0)
8526	return apply_args_value;
8527      {
8528	/* When this function is called, it means that registers must be
8529	   saved on entry to this function.  So we migrate the
8530	   call to the first insn of this function.  */
8531	rtx temp;
8532	rtx seq;
8533
8534	start_sequence ();
8535	temp = expand_builtin_apply_args ();
8536	seq = get_insns ();
8537	end_sequence ();
8538
8539	apply_args_value = temp;
8540
8541	/* Put the sequence after the NOTE that starts the function.
8542	   If this is inside a SEQUENCE, make the outer-level insn
8543	   chain current, so the code is placed at the start of the
8544	   function.  */
8545	push_topmost_sequence ();
8546	emit_insns_before (seq, NEXT_INSN (get_insns ()));
8547	pop_topmost_sequence ();
8548	return temp;
8549      }
8550
8551      /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8552	 FUNCTION with a copy of the parameters described by
8553	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
8554	 allocated on the stack into which is stored all the registers
8555	 that might possibly be used for returning the result of a
8556	 function.  ARGUMENTS is the value returned by
8557	 __builtin_apply_args.  ARGSIZE is the number of bytes of
8558	 arguments that must be copied.  ??? How should this value be
8559	 computed?  We'll also need a safe worst case value for varargs
8560	 functions.  */
8561    case BUILT_IN_APPLY:
8562      if (arglist == 0
8563	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
8564	  || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8565	  || TREE_CHAIN (arglist) == 0
8566	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8567	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8568	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8569	return const0_rtx;
8570      else
8571	{
8572	  int i;
8573	  tree t;
8574	  rtx ops[3];
8575
8576	  for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8577	    ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8578
8579	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
8580	}
8581
8582      /* __builtin_return (RESULT) causes the function to return the
8583	 value described by RESULT.  RESULT is address of the block of
8584	 memory returned by __builtin_apply.  */
8585    case BUILT_IN_RETURN:
8586      if (arglist
8587	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
8588	  && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8589	expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8590					    NULL_RTX, VOIDmode, 0));
8591      return const0_rtx;
8592
8593    case BUILT_IN_SAVEREGS:
8594      /* Don't do __builtin_saveregs more than once in a function.
8595	 Save the result of the first call and reuse it.  */
8596      if (saveregs_value != 0)
8597	return saveregs_value;
8598      {
8599	/* When this function is called, it means that registers must be
8600	   saved on entry to this function.  So we migrate the
8601	   call to the first insn of this function.  */
8602	rtx temp;
8603	rtx seq;
8604
8605	/* Now really call the function.  `expand_call' does not call
8606	   expand_builtin, so there is no danger of infinite recursion here.  */
8607	start_sequence ();
8608
8609#ifdef EXPAND_BUILTIN_SAVEREGS
8610	/* Do whatever the machine needs done in this case.  */
8611	temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8612#else
8613	/* The register where the function returns its value
8614	   is likely to have something else in it, such as an argument.
8615	   So preserve that register around the call.  */
8616
8617	if (value_mode != VOIDmode)
8618	  {
8619	    rtx valreg = hard_libcall_value (value_mode);
8620	    rtx saved_valreg = gen_reg_rtx (value_mode);
8621
8622	    emit_move_insn (saved_valreg, valreg);
8623	    temp = expand_call (exp, target, ignore);
8624	    emit_move_insn (valreg, saved_valreg);
8625	  }
8626	else
8627	  /* Generate the call, putting the value in a pseudo.  */
8628	  temp = expand_call (exp, target, ignore);
8629#endif
8630
8631	seq = get_insns ();
8632	end_sequence ();
8633
8634	saveregs_value = temp;
8635
8636	/* Put the sequence after the NOTE that starts the function.
8637	   If this is inside a SEQUENCE, make the outer-level insn
8638	   chain current, so the code is placed at the start of the
8639	   function.  */
8640	push_topmost_sequence ();
8641	emit_insns_before (seq, NEXT_INSN (get_insns ()));
8642	pop_topmost_sequence ();
8643	return temp;
8644      }
8645
8646      /* __builtin_args_info (N) returns word N of the arg space info
8647	 for the current function.  The number and meanings of words
8648	 is controlled by the definition of CUMULATIVE_ARGS.  */
8649    case BUILT_IN_ARGS_INFO:
8650      {
8651	int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8652	int *word_ptr = (int *) &current_function_args_info;
8653#if 0
8654	/* These are used by the code below that is if 0'ed away */
8655	int i;
8656	tree type, elts, result;
8657#endif
8658
8659	if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8660	  fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8661		 __FILE__, __LINE__);
8662
8663	if (arglist != 0)
8664	  {
8665	    tree arg = TREE_VALUE (arglist);
8666	    if (TREE_CODE (arg) != INTEGER_CST)
8667	      error ("argument of `__builtin_args_info' must be constant");
8668	    else
8669	      {
8670		int wordnum = TREE_INT_CST_LOW (arg);
8671
8672		if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8673		  error ("argument of `__builtin_args_info' out of range");
8674		else
8675		  return GEN_INT (word_ptr[wordnum]);
8676	      }
8677	  }
8678	else
8679	  error ("missing argument in `__builtin_args_info'");
8680
8681	return const0_rtx;
8682
8683#if 0
8684	for (i = 0; i < nwords; i++)
8685	  elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8686
8687	type = build_array_type (integer_type_node,
8688				 build_index_type (build_int_2 (nwords, 0)));
8689	result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8690	TREE_CONSTANT (result) = 1;
8691	TREE_STATIC (result) = 1;
8692	result = build (INDIRECT_REF, build_pointer_type (type), result);
8693	TREE_CONSTANT (result) = 1;
8694	return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8695#endif
8696      }
8697
8698      /* Return the address of the first anonymous stack arg.  */
8699    case BUILT_IN_NEXT_ARG:
8700      {
8701	tree fntype = TREE_TYPE (current_function_decl);
8702
8703	if ((TYPE_ARG_TYPES (fntype) == 0
8704	     || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8705		 == void_type_node))
8706	    && ! current_function_varargs)
8707	  {
8708	    error ("`va_start' used in function with fixed args");
8709	    return const0_rtx;
8710	  }
8711
8712	if (arglist)
8713	  {
8714	    tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8715	    tree arg = TREE_VALUE (arglist);
8716
8717	    /* Strip off all nops for the sake of the comparison.  This
8718	       is not quite the same as STRIP_NOPS.  It does more.
8719	       We must also strip off INDIRECT_EXPR for C++ reference
8720	       parameters.  */
8721	    while (TREE_CODE (arg) == NOP_EXPR
8722		   || TREE_CODE (arg) == CONVERT_EXPR
8723		   || TREE_CODE (arg) == NON_LVALUE_EXPR
8724		   || TREE_CODE (arg) == INDIRECT_REF)
8725	      arg = TREE_OPERAND (arg, 0);
8726	    if (arg != last_parm)
8727	      warning ("second parameter of `va_start' not last named argument");
8728	  }
8729	else if (! current_function_varargs)
8730	  /* Evidently an out of date version of <stdarg.h>; can't validate
8731	     va_start's second argument, but can still work as intended.  */
8732	  warning ("`__builtin_next_arg' called without an argument");
8733      }
8734
8735      return expand_binop (Pmode, add_optab,
8736			   current_function_internal_arg_pointer,
8737			   current_function_arg_offset_rtx,
8738			   NULL_RTX, 0, OPTAB_LIB_WIDEN);
8739
8740    case BUILT_IN_CLASSIFY_TYPE:
8741      if (arglist != 0)
8742	{
8743	  tree type = TREE_TYPE (TREE_VALUE (arglist));
8744	  enum tree_code code = TREE_CODE (type);
8745	  if (code == VOID_TYPE)
8746	    return GEN_INT (void_type_class);
8747	  if (code == INTEGER_TYPE)
8748	    return GEN_INT (integer_type_class);
8749	  if (code == CHAR_TYPE)
8750	    return GEN_INT (char_type_class);
8751	  if (code == ENUMERAL_TYPE)
8752	    return GEN_INT (enumeral_type_class);
8753	  if (code == BOOLEAN_TYPE)
8754	    return GEN_INT (boolean_type_class);
8755	  if (code == POINTER_TYPE)
8756	    return GEN_INT (pointer_type_class);
8757	  if (code == REFERENCE_TYPE)
8758	    return GEN_INT (reference_type_class);
8759	  if (code == OFFSET_TYPE)
8760	    return GEN_INT (offset_type_class);
8761	  if (code == REAL_TYPE)
8762	    return GEN_INT (real_type_class);
8763	  if (code == COMPLEX_TYPE)
8764	    return GEN_INT (complex_type_class);
8765	  if (code == FUNCTION_TYPE)
8766	    return GEN_INT (function_type_class);
8767	  if (code == METHOD_TYPE)
8768	    return GEN_INT (method_type_class);
8769	  if (code == RECORD_TYPE)
8770	    return GEN_INT (record_type_class);
8771	  if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8772	    return GEN_INT (union_type_class);
8773	  if (code == ARRAY_TYPE)
8774	    {
8775	      if (TYPE_STRING_FLAG (type))
8776		return GEN_INT (string_type_class);
8777	      else
8778		return GEN_INT (array_type_class);
8779	    }
8780	  if (code == SET_TYPE)
8781	    return GEN_INT (set_type_class);
8782	  if (code == FILE_TYPE)
8783	    return GEN_INT (file_type_class);
8784	  if (code == LANG_TYPE)
8785	    return GEN_INT (lang_type_class);
8786	}
8787      return GEN_INT (no_type_class);
8788
8789    case BUILT_IN_CONSTANT_P:
8790      if (arglist == 0)
8791	return const0_rtx;
8792      else
8793	{
8794	  tree arg = TREE_VALUE (arglist);
8795
8796	  STRIP_NOPS (arg);
8797	  if (really_constant_p (arg)
8798	      || (TREE_CODE (arg) == ADDR_EXPR
8799		  && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
8800	    return const1_rtx;
8801
8802	  /* Only emit CONSTANT_P_RTX if CSE will be run.
8803	     Moreover, we don't want to expand trees that have side effects,
8804	     as the original __builtin_constant_p did not evaluate its
8805	     argument at all, and we would break existing usage by changing
8806	     this.  This quirk was generally useful, eliminating a bit of hair
8807	     in the writing of the macros that use this function.  Now the
8808	     same thing can be better accomplished in an inline function.  */
8809
8810	  if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
8811	    {
8812	      /* Lazy fixup of old code: issue a warning and fail the test.  */
8813	      if (! can_handle_constant_p)
8814		{
8815		  warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
8816		  warning ("Please report this as a bug to egcs-bugs@egcs.cygnus.com.");
8817		  return const0_rtx;
8818		}
8819	      return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
8820				             expand_expr (arg, NULL_RTX,
8821							  VOIDmode, 0));
8822	    }
8823
8824	  return const0_rtx;
8825	}
8826
8827    case BUILT_IN_FRAME_ADDRESS:
8828      /* The argument must be a nonnegative integer constant.
8829	 It counts the number of frames to scan up the stack.
8830	 The value is the address of that frame.  */
8831    case BUILT_IN_RETURN_ADDRESS:
8832      /* The argument must be a nonnegative integer constant.
8833	 It counts the number of frames to scan up the stack.
8834	 The value is the return address saved in that frame.  */
8835      if (arglist == 0)
8836	/* Warning about missing arg was already issued.  */
8837	return const0_rtx;
8838      else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8839	       || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8840	{
8841	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8842	    error ("invalid arg to `__builtin_frame_address'");
8843	  else
8844	    error ("invalid arg to `__builtin_return_address'");
8845	  return const0_rtx;
8846	}
8847      else
8848	{
8849	  rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8850						TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8851						hard_frame_pointer_rtx);
8852
8853	  /* Some ports cannot access arbitrary stack frames.  */
8854	  if (tem == NULL)
8855	    {
8856	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8857		warning ("unsupported arg to `__builtin_frame_address'");
8858	      else
8859		warning ("unsupported arg to `__builtin_return_address'");
8860	      return const0_rtx;
8861	    }
8862
8863	  /* For __builtin_frame_address, return what we've got.  */
8864	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8865	    return tem;
8866
8867	  if (GET_CODE (tem) != REG)
8868	    tem = copy_to_reg (tem);
8869	  return tem;
8870	}
8871
8872    /* Returns the address of the area where the structure is returned.
8873       0 otherwise.  */
8874    case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8875      if (arglist != 0
8876          || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8877          || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8878        return const0_rtx;
8879      else
8880        return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8881
8882    case BUILT_IN_ALLOCA:
8883      if (arglist == 0
8884	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
8885	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8886	break;
8887
8888      /* Compute the argument.  */
8889      op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8890
8891      /* Allocate the desired space.  */
8892      return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8893
8894    case BUILT_IN_FFS:
8895      /* If not optimizing, call the library function.  */
8896      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8897	break;
8898
8899      if (arglist == 0
8900	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
8901	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8902	break;
8903
8904      /* Compute the argument.  */
8905      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8906      /* Compute ffs, into TARGET if possible.
8907	 Set TARGET to wherever the result comes back.  */
8908      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8909			    ffs_optab, op0, target, 1);
8910      if (target == 0)
8911	abort ();
8912      return target;
8913
8914    case BUILT_IN_STRLEN:
8915      /* If not optimizing, call the library function.  */
8916      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8917	break;
8918
8919      if (arglist == 0
8920	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
8921	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8922	break;
8923      else
8924	{
8925	  tree src = TREE_VALUE (arglist);
8926	  tree len = c_strlen (src);
8927
8928	  int align
8929	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8930
8931	  rtx result, src_rtx, char_rtx;
8932	  enum machine_mode insn_mode = value_mode, char_mode;
8933	  enum insn_code icode;
8934
8935	  /* If the length is known, just return it.  */
8936	  if (len != 0)
8937	    return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8938
8939	  /* If SRC is not a pointer type, don't do this operation inline.  */
8940	  if (align == 0)
8941	    break;
8942
8943	  /* Call a function if we can't compute strlen in the right mode.  */
8944
8945	  while (insn_mode != VOIDmode)
8946	    {
8947	      icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8948	      if (icode != CODE_FOR_nothing)
8949		break;
8950
8951	      insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8952	    }
8953	  if (insn_mode == VOIDmode)
8954	    break;
8955
8956	  /* Make a place to write the result of the instruction.  */
8957	  result = target;
8958	  if (! (result != 0
8959		 && GET_CODE (result) == REG
8960		 && GET_MODE (result) == insn_mode
8961		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8962	    result = gen_reg_rtx (insn_mode);
8963
8964	  /* Make sure the operands are acceptable to the predicates.  */
8965
8966	  if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8967	    result = gen_reg_rtx (insn_mode);
8968	  src_rtx = memory_address (BLKmode,
8969				    expand_expr (src, NULL_RTX, ptr_mode,
8970						 EXPAND_NORMAL));
8971
8972	  if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8973	    src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8974
8975	  /* Check the string is readable and has an end.  */
8976	  if (flag_check_memory_usage)
8977	    emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8978			       src_rtx, ptr_mode,
8979			       GEN_INT (MEMORY_USE_RO),
8980			       TYPE_MODE (integer_type_node));
8981
8982	  char_rtx = const0_rtx;
8983	  char_mode = insn_operand_mode[(int)icode][2];
8984	  if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8985	    char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8986
8987	  emit_insn (GEN_FCN (icode) (result,
8988				      gen_rtx_MEM (BLKmode, src_rtx),
8989				      char_rtx, GEN_INT (align)));
8990
8991	  /* Return the value in the proper mode for this function.  */
8992	  if (GET_MODE (result) == value_mode)
8993	    return result;
8994	  else if (target != 0)
8995	    {
8996	      convert_move (target, result, 0);
8997	      return target;
8998	    }
8999	  else
9000	    return convert_to_mode (value_mode, result, 0);
9001	}
9002
9003    case BUILT_IN_STRCPY:
9004      /* If not optimizing, call the library function.  */
9005      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9006	break;
9007
9008      if (arglist == 0
9009	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9010	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9011	  || TREE_CHAIN (arglist) == 0
9012	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9013	break;
9014      else
9015	{
9016	  tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9017
9018	  if (len == 0)
9019	    break;
9020
9021	  len = size_binop (PLUS_EXPR, len, integer_one_node);
9022
9023	  chainon (arglist, build_tree_list (NULL_TREE, len));
9024	}
9025
9026      /* Drops in.  */
9027    case BUILT_IN_MEMCPY:
9028      /* If not optimizing, call the library function.  */
9029      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9030	break;
9031
9032      if (arglist == 0
9033	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9034	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9035	  || TREE_CHAIN (arglist) == 0
9036	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9037	      != POINTER_TYPE)
9038	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9039	  || (TREE_CODE (TREE_TYPE (TREE_VALUE
9040				    (TREE_CHAIN (TREE_CHAIN (arglist)))))
9041	      != INTEGER_TYPE))
9042	break;
9043      else
9044	{
9045	  tree dest = TREE_VALUE (arglist);
9046	  tree src = TREE_VALUE (TREE_CHAIN (arglist));
9047	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9048
9049	  int src_align
9050	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9051	  int dest_align
9052	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9053	  rtx dest_mem, src_mem, dest_addr, len_rtx;
9054
9055	  /* If either SRC or DEST is not a pointer type, don't do
9056	     this operation in-line.  */
9057	  if (src_align == 0 || dest_align == 0)
9058	    {
9059	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9060		TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9061	      break;
9062	    }
9063
9064	  dest_mem = get_memory_rtx (dest);
9065	  src_mem = get_memory_rtx (src);
9066	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9067
9068	  /* Just copy the rights of SRC to the rights of DEST.  */
9069	  if (flag_check_memory_usage)
9070	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9071			       XEXP (dest_mem, 0), ptr_mode,
9072			       XEXP (src_mem, 0), ptr_mode,
9073			       len_rtx, TYPE_MODE (sizetype));
9074
9075	  /* Copy word part most expediently.  */
9076	  dest_addr
9077	    = emit_block_move (dest_mem, src_mem, len_rtx,
9078			       MIN (src_align, dest_align));
9079
9080	  if (dest_addr == 0)
9081	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9082
9083	  return dest_addr;
9084	}
9085
9086    case BUILT_IN_MEMSET:
9087      /* If not optimizing, call the library function.  */
9088      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9089	break;
9090
9091      if (arglist == 0
9092	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9093	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9094	  || TREE_CHAIN (arglist) == 0
9095	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9096	      != INTEGER_TYPE)
9097	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9098	  || (INTEGER_TYPE
9099	      != (TREE_CODE (TREE_TYPE
9100			     (TREE_VALUE
9101			      (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9102	break;
9103      else
9104	{
9105	  tree dest = TREE_VALUE (arglist);
9106	  tree val = TREE_VALUE (TREE_CHAIN (arglist));
9107	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9108
9109	  int dest_align
9110	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9111	  rtx dest_mem, dest_addr, len_rtx;
9112
9113	  /* If DEST is not a pointer type, don't do this
9114	     operation in-line.  */
9115	  if (dest_align == 0)
9116	    break;
9117
9118	  /* If the arguments have side-effects, then we can only evaluate
9119	     them at most once.  The following code evaluates them twice if
9120	     they are not constants because we break out to expand_call
9121	     in that case.  They can't be constants if they have side-effects
9122	     so we can check for that first.  Alternatively, we could call
9123	     save_expr to make multiple evaluation safe.  */
9124	  if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9125	    break;
9126
9127	  /* If VAL is not 0, don't do this operation in-line. */
9128	  if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9129	    break;
9130
9131	  /* If LEN does not expand to a constant, don't do this
9132	     operation in-line.  */
9133	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9134	  if (GET_CODE (len_rtx) != CONST_INT)
9135	    break;
9136
9137	  dest_mem = get_memory_rtx (dest);
9138
9139	  /* Just check DST is writable and mark it as readable.  */
9140	  if (flag_check_memory_usage)
9141	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9142			       XEXP (dest_mem, 0), ptr_mode,
9143			       len_rtx, TYPE_MODE (sizetype),
9144			       GEN_INT (MEMORY_USE_WO),
9145			       TYPE_MODE (integer_type_node));
9146
9147
9148	  dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9149
9150	  if (dest_addr == 0)
9151	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9152
9153	  return dest_addr;
9154	}
9155
9156/* These comparison functions need an instruction that returns an actual
9157   index.  An ordinary compare that just sets the condition codes
9158   is not enough.  */
9159#ifdef HAVE_cmpstrsi
9160    case BUILT_IN_STRCMP:
9161      /* If not optimizing, call the library function.  */
9162      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9163	break;
9164
9165      /* If we need to check memory accesses, call the library function.  */
9166      if (flag_check_memory_usage)
9167	break;
9168
9169      if (arglist == 0
9170	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9171	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9172	  || TREE_CHAIN (arglist) == 0
9173	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9174	break;
9175      else if (!HAVE_cmpstrsi)
9176	break;
9177      {
9178	tree arg1 = TREE_VALUE (arglist);
9179	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9180	tree len, len2;
9181
9182	len = c_strlen (arg1);
9183	if (len)
9184	  len = size_binop (PLUS_EXPR, integer_one_node, len);
9185	len2 = c_strlen (arg2);
9186	if (len2)
9187	  len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9188
9189	/* If we don't have a constant length for the first, use the length
9190	   of the second, if we know it.  We don't require a constant for
9191	   this case; some cost analysis could be done if both are available
9192	   but neither is constant.  For now, assume they're equally cheap.
9193
9194	   If both strings have constant lengths, use the smaller.  This
9195	   could arise if optimization results in strcpy being called with
9196	   two fixed strings, or if the code was machine-generated.  We should
9197	   add some code to the `memcmp' handler below to deal with such
9198	   situations, someday.  */
9199	if (!len || TREE_CODE (len) != INTEGER_CST)
9200	  {
9201	    if (len2)
9202	      len = len2;
9203	    else if (len == 0)
9204	      break;
9205	  }
9206	else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9207	  {
9208	    if (tree_int_cst_lt (len2, len))
9209	      len = len2;
9210	  }
9211
9212	chainon (arglist, build_tree_list (NULL_TREE, len));
9213      }
9214
9215      /* Drops in.  */
9216    case BUILT_IN_MEMCMP:
9217      /* If not optimizing, call the library function.  */
9218      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9219	break;
9220
9221      /* If we need to check memory accesses, call the library function.  */
9222      if (flag_check_memory_usage)
9223	break;
9224
9225      if (arglist == 0
9226	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9227	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9228	  || TREE_CHAIN (arglist) == 0
9229	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9230	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9231	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9232	break;
9233      else if (!HAVE_cmpstrsi)
9234	break;
9235      {
9236	tree arg1 = TREE_VALUE (arglist);
9237	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9238	tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9239	rtx result;
9240
9241	int arg1_align
9242	  = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9243	int arg2_align
9244	  = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9245	enum machine_mode insn_mode
9246	  = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9247
9248	/* If we don't have POINTER_TYPE, call the function.  */
9249	if (arg1_align == 0 || arg2_align == 0)
9250	  {
9251	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9252	      TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9253	    break;
9254	  }
9255
9256	/* Make a place to write the result of the instruction.  */
9257	result = target;
9258	if (! (result != 0
9259	       && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9260	       && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9261	  result = gen_reg_rtx (insn_mode);
9262
9263	emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9264				 get_memory_rtx (arg2),
9265				 expand_expr (len, NULL_RTX, VOIDmode, 0),
9266				 GEN_INT (MIN (arg1_align, arg2_align))));
9267
9268	/* Return the value in the proper mode for this function.  */
9269	mode = TYPE_MODE (TREE_TYPE (exp));
9270	if (GET_MODE (result) == mode)
9271	  return result;
9272	else if (target != 0)
9273	  {
9274	    convert_move (target, result, 0);
9275	    return target;
9276	  }
9277	else
9278	  return convert_to_mode (mode, result, 0);
9279      }
9280#else
9281    case BUILT_IN_STRCMP:
9282    case BUILT_IN_MEMCMP:
9283      break;
9284#endif
9285
9286    case BUILT_IN_SETJMP:
9287      if (arglist == 0
9288	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9289	break;
9290      else
9291	{
9292	  rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9293				      VOIDmode, 0);
9294	  rtx lab = gen_label_rtx ();
9295	  rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9296	  emit_label (lab);
9297	  return ret;
9298	}
9299
9300      /* __builtin_longjmp is passed a pointer to an array of five words.
9301	 It's similar to the C library longjmp function but works with
9302	 __builtin_setjmp above.  */
9303    case BUILT_IN_LONGJMP:
9304      if (arglist == 0 || TREE_CHAIN (arglist) == 0
9305	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9306	break;
9307      else
9308	{
9309	  rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9310				      VOIDmode, 0);
9311	  rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9312				   NULL_RTX, VOIDmode, 0);
9313
9314	  if (value != const1_rtx)
9315	    {
9316	      error ("__builtin_longjmp second argument must be 1");
9317	      return const0_rtx;
9318	    }
9319
9320	  expand_builtin_longjmp (buf_addr, value);
9321	  return const0_rtx;
9322	}
9323
9324    case BUILT_IN_TRAP:
9325#ifdef HAVE_trap
9326      if (HAVE_trap)
9327	emit_insn (gen_trap ());
9328      else
9329#endif
9330	error ("__builtin_trap not supported by this target");
9331      emit_barrier ();
9332      return const0_rtx;
9333
9334      /* Various hooks for the DWARF 2 __throw routine.  */
9335    case BUILT_IN_UNWIND_INIT:
9336      expand_builtin_unwind_init ();
9337      return const0_rtx;
9338    case BUILT_IN_FP:
9339      return frame_pointer_rtx;
9340    case BUILT_IN_SP:
9341      return stack_pointer_rtx;
9342#ifdef DWARF2_UNWIND_INFO
9343    case BUILT_IN_DWARF_FP_REGNUM:
9344      return expand_builtin_dwarf_fp_regnum ();
9345    case BUILT_IN_DWARF_REG_SIZE:
9346      return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9347#endif
9348    case BUILT_IN_FROB_RETURN_ADDR:
9349      return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9350    case BUILT_IN_EXTRACT_RETURN_ADDR:
9351      return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9352    case BUILT_IN_SET_RETURN_ADDR_REG:
9353      expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9354      return const0_rtx;
9355    case BUILT_IN_EH_STUB_OLD:
9356      return expand_builtin_eh_stub_old ();
9357    case BUILT_IN_EH_STUB:
9358      return expand_builtin_eh_stub ();
9359    case BUILT_IN_SET_EH_REGS:
9360      expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9361				  TREE_VALUE (TREE_CHAIN (arglist)));
9362      return const0_rtx;
9363
9364    default:			/* just do library call, if unknown builtin */
9365      error ("built-in function `%s' not currently supported",
9366	     IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9367    }
9368
9369  /* The switch statement above can drop through to cause the function
9370     to be called normally.  */
9371
9372  return expand_call (exp, target, ignore);
9373}
9374
9375/* Built-in functions to perform an untyped call and return.  */
9376
9377/* For each register that may be used for calling a function, this
9378   gives a mode used to copy the register's value.  VOIDmode indicates
9379   the register is not used for calling a function.  If the machine
9380   has register windows, this gives only the outbound registers.
9381   INCOMING_REGNO gives the corresponding inbound register.  */
9382static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9383
9384/* For each register that may be used for returning values, this gives
9385   a mode used to copy the register's value.  VOIDmode indicates the
9386   register is not used for returning values.  If the machine has
9387   register windows, this gives only the outbound registers.
9388   INCOMING_REGNO gives the corresponding inbound register.  */
9389static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9390
9391/* For each register that may be used for calling a function, this
9392   gives the offset of that register into the block returned by
9393   __builtin_apply_args.  0 indicates that the register is not
9394   used for calling a function.  */
9395static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9396
9397/* Return the offset of register REGNO into the block returned by
9398   __builtin_apply_args.  This is not declared static, since it is
9399   needed in objc-act.c.  */
9400
9401int
9402apply_args_register_offset (regno)
9403     int regno;
9404{
9405  apply_args_size ();
9406
9407  /* Arguments are always put in outgoing registers (in the argument
9408     block) if such make sense.  */
9409#ifdef OUTGOING_REGNO
9410  regno = OUTGOING_REGNO(regno);
9411#endif
9412  return apply_args_reg_offset[regno];
9413}
9414
9415/* Return the size required for the block returned by __builtin_apply_args,
9416   and initialize apply_args_mode.  */
9417
9418static int
9419apply_args_size ()
9420{
9421  static int size = -1;
9422  int align, regno;
9423  enum machine_mode mode;
9424
9425  /* The values computed by this function never change.  */
9426  if (size < 0)
9427    {
9428      /* The first value is the incoming arg-pointer.  */
9429      size = GET_MODE_SIZE (Pmode);
9430
9431      /* The second value is the structure value address unless this is
9432	 passed as an "invisible" first argument.  */
9433      if (struct_value_rtx)
9434	size += GET_MODE_SIZE (Pmode);
9435
9436      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9437	if (FUNCTION_ARG_REGNO_P (regno))
9438	  {
9439	    /* Search for the proper mode for copying this register's
9440	       value.  I'm not sure this is right, but it works so far.  */
9441	    enum machine_mode best_mode = VOIDmode;
9442
9443	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9444		 mode != VOIDmode;
9445		 mode = GET_MODE_WIDER_MODE (mode))
9446	      if (HARD_REGNO_MODE_OK (regno, mode)
9447		  && HARD_REGNO_NREGS (regno, mode) == 1)
9448		best_mode = mode;
9449
9450	    if (best_mode == VOIDmode)
9451	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9452		   mode != VOIDmode;
9453		   mode = GET_MODE_WIDER_MODE (mode))
9454		if (HARD_REGNO_MODE_OK (regno, mode)
9455		    && (mov_optab->handlers[(int) mode].insn_code
9456			!= CODE_FOR_nothing))
9457		  best_mode = mode;
9458
9459	    mode = best_mode;
9460	    if (mode == VOIDmode)
9461	      abort ();
9462
9463	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9464	    if (size % align != 0)
9465	      size = CEIL (size, align) * align;
9466	    apply_args_reg_offset[regno] = size;
9467	    size += GET_MODE_SIZE (mode);
9468	    apply_args_mode[regno] = mode;
9469	  }
9470	else
9471	  {
9472	    apply_args_mode[regno] = VOIDmode;
9473	    apply_args_reg_offset[regno] = 0;
9474	  }
9475    }
9476  return size;
9477}
9478
9479/* Return the size required for the block returned by __builtin_apply,
9480   and initialize apply_result_mode.  */
9481
9482static int
9483apply_result_size ()
9484{
9485  static int size = -1;
9486  int align, regno;
9487  enum machine_mode mode;
9488
9489  /* The values computed by this function never change.  */
9490  if (size < 0)
9491    {
9492      size = 0;
9493
9494      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9495	if (FUNCTION_VALUE_REGNO_P (regno))
9496	  {
9497	    /* Search for the proper mode for copying this register's
9498	       value.  I'm not sure this is right, but it works so far.  */
9499	    enum machine_mode best_mode = VOIDmode;
9500
9501	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9502		 mode != TImode;
9503		 mode = GET_MODE_WIDER_MODE (mode))
9504	      if (HARD_REGNO_MODE_OK (regno, mode))
9505		best_mode = mode;
9506
9507	    if (best_mode == VOIDmode)
9508	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9509		   mode != VOIDmode;
9510		   mode = GET_MODE_WIDER_MODE (mode))
9511		if (HARD_REGNO_MODE_OK (regno, mode)
9512		    && (mov_optab->handlers[(int) mode].insn_code
9513			!= CODE_FOR_nothing))
9514		  best_mode = mode;
9515
9516	    mode = best_mode;
9517	    if (mode == VOIDmode)
9518	      abort ();
9519
9520	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9521	    if (size % align != 0)
9522	      size = CEIL (size, align) * align;
9523	    size += GET_MODE_SIZE (mode);
9524	    apply_result_mode[regno] = mode;
9525	  }
9526	else
9527	  apply_result_mode[regno] = VOIDmode;
9528
9529      /* Allow targets that use untyped_call and untyped_return to override
9530	 the size so that machine-specific information can be stored here.  */
9531#ifdef APPLY_RESULT_SIZE
9532      size = APPLY_RESULT_SIZE;
9533#endif
9534    }
9535  return size;
9536}
9537
9538#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9539/* Create a vector describing the result block RESULT.  If SAVEP is true,
9540   the result block is used to save the values; otherwise it is used to
9541   restore the values.  */
9542
9543static rtx
9544result_vector (savep, result)
9545     int savep;
9546     rtx result;
9547{
9548  int regno, size, align, nelts;
9549  enum machine_mode mode;
9550  rtx reg, mem;
9551  rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9552
9553  size = nelts = 0;
9554  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9555    if ((mode = apply_result_mode[regno]) != VOIDmode)
9556      {
9557	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9558	if (size % align != 0)
9559	  size = CEIL (size, align) * align;
9560	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9561	mem = change_address (result, mode,
9562			      plus_constant (XEXP (result, 0), size));
9563	savevec[nelts++] = (savep
9564			    ? gen_rtx_SET (VOIDmode, mem, reg)
9565			    : gen_rtx_SET (VOIDmode, reg, mem));
9566	size += GET_MODE_SIZE (mode);
9567      }
9568  return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9569}
9570#endif /* HAVE_untyped_call or HAVE_untyped_return */
9571
9572/* Save the state required to perform an untyped call with the same
9573   arguments as were passed to the current function.  */
9574
9575static rtx
9576expand_builtin_apply_args ()
9577{
9578  rtx registers;
9579  int size, align, regno;
9580  enum machine_mode mode;
9581
9582  /* Create a block where the arg-pointer, structure value address,
9583     and argument registers can be saved.  */
9584  registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9585
9586  /* Walk past the arg-pointer and structure value address.  */
9587  size = GET_MODE_SIZE (Pmode);
9588  if (struct_value_rtx)
9589    size += GET_MODE_SIZE (Pmode);
9590
9591  /* Save each register used in calling a function to the block.  */
9592  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9593    if ((mode = apply_args_mode[regno]) != VOIDmode)
9594      {
9595	rtx tem;
9596
9597	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9598	if (size % align != 0)
9599	  size = CEIL (size, align) * align;
9600
9601	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9602
9603#ifdef STACK_REGS
9604        /* For reg-stack.c's stack register household.
9605	   Compare with a similar piece of code in function.c.  */
9606
9607        emit_insn (gen_rtx_USE (mode, tem));
9608#endif
9609
9610	emit_move_insn (change_address (registers, mode,
9611					plus_constant (XEXP (registers, 0),
9612						       size)),
9613			tem);
9614	size += GET_MODE_SIZE (mode);
9615      }
9616
9617  /* Save the arg pointer to the block.  */
9618  emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9619		  copy_to_reg (virtual_incoming_args_rtx));
9620  size = GET_MODE_SIZE (Pmode);
9621
9622  /* Save the structure value address unless this is passed as an
9623     "invisible" first argument.  */
9624  if (struct_value_incoming_rtx)
9625    {
9626      emit_move_insn (change_address (registers, Pmode,
9627				      plus_constant (XEXP (registers, 0),
9628						     size)),
9629		      copy_to_reg (struct_value_incoming_rtx));
9630      size += GET_MODE_SIZE (Pmode);
9631    }
9632
9633  /* Return the address of the block.  */
9634  return copy_addr_to_reg (XEXP (registers, 0));
9635}
9636
9637/* Perform an untyped call and save the state required to perform an
9638   untyped return of whatever value was returned by the given function.  */
9639
9640static rtx
9641expand_builtin_apply (function, arguments, argsize)
9642     rtx function, arguments, argsize;
9643{
9644  int size, align, regno;
9645  enum machine_mode mode;
9646  rtx incoming_args, result, reg, dest, call_insn;
9647  rtx old_stack_level = 0;
9648  rtx call_fusage = 0;
9649
9650  /* Create a block where the return registers can be saved.  */
9651  result = assign_stack_local (BLKmode, apply_result_size (), -1);
9652
9653  /* ??? The argsize value should be adjusted here.  */
9654
9655  /* Fetch the arg pointer from the ARGUMENTS block.  */
9656  incoming_args = gen_reg_rtx (Pmode);
9657  emit_move_insn (incoming_args,
9658		  gen_rtx_MEM (Pmode, arguments));
9659#ifndef STACK_GROWS_DOWNWARD
9660  incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9661				incoming_args, 0, OPTAB_LIB_WIDEN);
9662#endif
9663
9664  /* Perform postincrements before actually calling the function.  */
9665  emit_queue ();
9666
9667  /* Push a new argument block and copy the arguments.  */
9668  do_pending_stack_adjust ();
9669
9670  /* Save the stack with nonlocal if available */
9671#ifdef HAVE_save_stack_nonlocal
9672  if (HAVE_save_stack_nonlocal)
9673    emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9674  else
9675#endif
9676    emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9677
9678  /* Push a block of memory onto the stack to store the memory arguments.
9679     Save the address in a register, and copy the memory arguments.  ??? I
9680     haven't figured out how the calling convention macros effect this,
9681     but it's likely that the source and/or destination addresses in
9682     the block copy will need updating in machine specific ways.  */
9683  dest = allocate_dynamic_stack_space (argsize, 0, 0);
9684  emit_block_move (gen_rtx_MEM (BLKmode, dest),
9685		   gen_rtx_MEM (BLKmode, incoming_args),
9686		   argsize,
9687		   PARM_BOUNDARY / BITS_PER_UNIT);
9688
9689  /* Refer to the argument block.  */
9690  apply_args_size ();
9691  arguments = gen_rtx_MEM (BLKmode, arguments);
9692
9693  /* Walk past the arg-pointer and structure value address.  */
9694  size = GET_MODE_SIZE (Pmode);
9695  if (struct_value_rtx)
9696    size += GET_MODE_SIZE (Pmode);
9697
9698  /* Restore each of the registers previously saved.  Make USE insns
9699     for each of these registers for use in making the call.  */
9700  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9701    if ((mode = apply_args_mode[regno]) != VOIDmode)
9702      {
9703	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9704	if (size % align != 0)
9705	  size = CEIL (size, align) * align;
9706	reg = gen_rtx_REG (mode, regno);
9707	emit_move_insn (reg,
9708			change_address (arguments, mode,
9709					plus_constant (XEXP (arguments, 0),
9710						       size)));
9711
9712	use_reg (&call_fusage, reg);
9713	size += GET_MODE_SIZE (mode);
9714      }
9715
9716  /* Restore the structure value address unless this is passed as an
9717     "invisible" first argument.  */
9718  size = GET_MODE_SIZE (Pmode);
9719  if (struct_value_rtx)
9720    {
9721      rtx value = gen_reg_rtx (Pmode);
9722      emit_move_insn (value,
9723		      change_address (arguments, Pmode,
9724				      plus_constant (XEXP (arguments, 0),
9725						     size)));
9726      emit_move_insn (struct_value_rtx, value);
9727      if (GET_CODE (struct_value_rtx) == REG)
9728	  use_reg (&call_fusage, struct_value_rtx);
9729      size += GET_MODE_SIZE (Pmode);
9730    }
9731
9732  /* All arguments and registers used for the call are set up by now!  */
9733  function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9734
9735  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
9736     and we don't want to load it into a register as an optimization,
9737     because prepare_call_address already did it if it should be done.  */
9738  if (GET_CODE (function) != SYMBOL_REF)
9739    function = memory_address (FUNCTION_MODE, function);
9740
9741  /* Generate the actual call instruction and save the return value.  */
9742#ifdef HAVE_untyped_call
9743  if (HAVE_untyped_call)
9744    emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9745				      result, result_vector (1, result)));
9746  else
9747#endif
9748#ifdef HAVE_call_value
9749  if (HAVE_call_value)
9750    {
9751      rtx valreg = 0;
9752
9753      /* Locate the unique return register.  It is not possible to
9754	 express a call that sets more than one return register using
9755	 call_value; use untyped_call for that.  In fact, untyped_call
9756	 only needs to save the return registers in the given block.  */
9757      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9758	if ((mode = apply_result_mode[regno]) != VOIDmode)
9759	  {
9760	    if (valreg)
9761	      abort (); /* HAVE_untyped_call required.  */
9762	    valreg = gen_rtx_REG (mode, regno);
9763	  }
9764
9765      emit_call_insn (gen_call_value (valreg,
9766				      gen_rtx_MEM (FUNCTION_MODE, function),
9767				      const0_rtx, NULL_RTX, const0_rtx));
9768
9769      emit_move_insn (change_address (result, GET_MODE (valreg),
9770				      XEXP (result, 0)),
9771		      valreg);
9772    }
9773  else
9774#endif
9775    abort ();
9776
9777  /* Find the CALL insn we just emitted.  */
9778  for (call_insn = get_last_insn ();
9779       call_insn && GET_CODE (call_insn) != CALL_INSN;
9780       call_insn = PREV_INSN (call_insn))
9781    ;
9782
9783  if (! call_insn)
9784    abort ();
9785
9786  /* Put the register usage information on the CALL.  If there is already
9787     some usage information, put ours at the end.  */
9788  if (CALL_INSN_FUNCTION_USAGE (call_insn))
9789    {
9790      rtx link;
9791
9792      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9793	   link = XEXP (link, 1))
9794	;
9795
9796      XEXP (link, 1) = call_fusage;
9797    }
9798  else
9799    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9800
9801  /* Restore the stack.  */
9802#ifdef HAVE_save_stack_nonlocal
9803  if (HAVE_save_stack_nonlocal)
9804    emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9805  else
9806#endif
9807    emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9808
9809  /* Return the address of the result block.  */
9810  return copy_addr_to_reg (XEXP (result, 0));
9811}
9812
9813/* Perform an untyped return.  */
9814
9815static void
9816expand_builtin_return (result)
9817     rtx result;
9818{
9819  int size, align, regno;
9820  enum machine_mode mode;
9821  rtx reg;
9822  rtx call_fusage = 0;
9823
9824  apply_result_size ();
9825  result = gen_rtx_MEM (BLKmode, result);
9826
9827#ifdef HAVE_untyped_return
9828  if (HAVE_untyped_return)
9829    {
9830      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9831      emit_barrier ();
9832      return;
9833    }
9834#endif
9835
9836  /* Restore the return value and note that each value is used.  */
9837  size = 0;
9838  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9839    if ((mode = apply_result_mode[regno]) != VOIDmode)
9840      {
9841	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9842	if (size % align != 0)
9843	  size = CEIL (size, align) * align;
9844	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9845	emit_move_insn (reg,
9846			change_address (result, mode,
9847					plus_constant (XEXP (result, 0),
9848						       size)));
9849
9850	push_to_sequence (call_fusage);
9851	emit_insn (gen_rtx_USE (VOIDmode, reg));
9852	call_fusage = get_insns ();
9853	end_sequence ();
9854	size += GET_MODE_SIZE (mode);
9855      }
9856
9857  /* Put the USE insns before the return.  */
9858  emit_insns (call_fusage);
9859
9860  /* Return whatever values was restored by jumping directly to the end
9861     of the function.  */
9862  expand_null_return ();
9863}
9864
9865/* Expand code for a post- or pre- increment or decrement
9866   and return the RTX for the result.
9867   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
9868
9869static rtx
9870expand_increment (exp, post, ignore)
9871     register tree exp;
9872     int post, ignore;
9873{
9874  register rtx op0, op1;
9875  register rtx temp, value;
9876  register tree incremented = TREE_OPERAND (exp, 0);
9877  optab this_optab = add_optab;
9878  int icode;
9879  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9880  int op0_is_copy = 0;
9881  int single_insn = 0;
9882  /* 1 means we can't store into OP0 directly,
9883     because it is a subreg narrower than a word,
9884     and we don't dare clobber the rest of the word.  */
9885  int bad_subreg = 0;
9886
9887  /* Stabilize any component ref that might need to be
9888     evaluated more than once below.  */
9889  if (!post
9890      || TREE_CODE (incremented) == BIT_FIELD_REF
9891      || (TREE_CODE (incremented) == COMPONENT_REF
9892	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9893	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9894    incremented = stabilize_reference (incremented);
9895  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
9896     ones into save exprs so that they don't accidentally get evaluated
9897     more than once by the code below.  */
9898  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9899      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9900    incremented = save_expr (incremented);
9901
9902  /* Compute the operands as RTX.
9903     Note whether OP0 is the actual lvalue or a copy of it:
9904     I believe it is a copy iff it is a register or subreg
9905     and insns were generated in computing it.   */
9906
9907  temp = get_last_insn ();
9908  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9909
9910  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9911     in place but instead must do sign- or zero-extension during assignment,
9912     so we copy it into a new register and let the code below use it as
9913     a copy.
9914
9915     Note that we can safely modify this SUBREG since it is know not to be
9916     shared (it was made by the expand_expr call above).  */
9917
9918  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9919    {
9920      if (post)
9921	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9922      else
9923	bad_subreg = 1;
9924    }
9925  else if (GET_CODE (op0) == SUBREG
9926	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9927    {
9928      /* We cannot increment this SUBREG in place.  If we are
9929	 post-incrementing, get a copy of the old value.  Otherwise,
9930	 just mark that we cannot increment in place.  */
9931      if (post)
9932	op0 = copy_to_reg (op0);
9933      else
9934	bad_subreg = 1;
9935    }
9936
9937  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9938		 && temp != get_last_insn ());
9939  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9940		     EXPAND_MEMORY_USE_BAD);
9941
9942  /* Decide whether incrementing or decrementing.  */
9943  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9944      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9945    this_optab = sub_optab;
9946
9947  /* Convert decrement by a constant into a negative increment.  */
9948  if (this_optab == sub_optab
9949      && GET_CODE (op1) == CONST_INT)
9950    {
9951      op1 = GEN_INT (- INTVAL (op1));
9952      this_optab = add_optab;
9953    }
9954
9955  /* For a preincrement, see if we can do this with a single instruction.  */
9956  if (!post)
9957    {
9958      icode = (int) this_optab->handlers[(int) mode].insn_code;
9959      if (icode != (int) CODE_FOR_nothing
9960	  /* Make sure that OP0 is valid for operands 0 and 1
9961	     of the insn we want to queue.  */
9962	  && (*insn_operand_predicate[icode][0]) (op0, mode)
9963	  && (*insn_operand_predicate[icode][1]) (op0, mode)
9964	  && (*insn_operand_predicate[icode][2]) (op1, mode))
9965	single_insn = 1;
9966    }
9967
9968  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9969     then we cannot just increment OP0.  We must therefore contrive to
9970     increment the original value.  Then, for postincrement, we can return
9971     OP0 since it is a copy of the old value.  For preincrement, expand here
9972     unless we can do it with a single insn.
9973
9974     Likewise if storing directly into OP0 would clobber high bits
9975     we need to preserve (bad_subreg).  */
9976  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9977    {
9978      /* This is the easiest way to increment the value wherever it is.
9979	 Problems with multiple evaluation of INCREMENTED are prevented
9980	 because either (1) it is a component_ref or preincrement,
9981	 in which case it was stabilized above, or (2) it is an array_ref
9982	 with constant index in an array in a register, which is
9983	 safe to reevaluate.  */
9984      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9985			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9986			    ? MINUS_EXPR : PLUS_EXPR),
9987			   TREE_TYPE (exp),
9988			   incremented,
9989			   TREE_OPERAND (exp, 1));
9990
9991      while (TREE_CODE (incremented) == NOP_EXPR
9992	     || TREE_CODE (incremented) == CONVERT_EXPR)
9993	{
9994	  newexp = convert (TREE_TYPE (incremented), newexp);
9995	  incremented = TREE_OPERAND (incremented, 0);
9996	}
9997
9998      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9999      return post ? op0 : temp;
10000    }
10001
10002  if (post)
10003    {
10004      /* We have a true reference to the value in OP0.
10005	 If there is an insn to add or subtract in this mode, queue it.
10006	 Queueing the increment insn avoids the register shuffling
10007	 that often results if we must increment now and first save
10008	 the old value for subsequent use.  */
10009
10010#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
10011      op0 = stabilize (op0);
10012#endif
10013
10014      icode = (int) this_optab->handlers[(int) mode].insn_code;
10015      if (icode != (int) CODE_FOR_nothing
10016	  /* Make sure that OP0 is valid for operands 0 and 1
10017	     of the insn we want to queue.  */
10018	  && (*insn_operand_predicate[icode][0]) (op0, mode)
10019	  && (*insn_operand_predicate[icode][1]) (op0, mode))
10020	{
10021	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10022	    op1 = force_reg (mode, op1);
10023
10024	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10025	}
10026      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10027	{
10028	  rtx addr = (general_operand (XEXP (op0, 0), mode)
10029		      ? force_reg (Pmode, XEXP (op0, 0))
10030		      : copy_to_reg (XEXP (op0, 0)));
10031	  rtx temp, result;
10032
10033	  op0 = change_address (op0, VOIDmode, addr);
10034	  temp = force_reg (GET_MODE (op0), op0);
10035	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10036	    op1 = force_reg (mode, op1);
10037
10038	  /* The increment queue is LIFO, thus we have to `queue'
10039	     the instructions in reverse order.  */
10040	  enqueue_insn (op0, gen_move_insn (op0, temp));
10041	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10042	  return result;
10043	}
10044    }
10045
10046  /* Preincrement, or we can't increment with one simple insn.  */
10047  if (post)
10048    /* Save a copy of the value before inc or dec, to return it later.  */
10049    temp = value = copy_to_reg (op0);
10050  else
10051    /* Arrange to return the incremented value.  */
10052    /* Copy the rtx because expand_binop will protect from the queue,
10053       and the results of that would be invalid for us to return
10054       if our caller does emit_queue before using our result.  */
10055    temp = copy_rtx (value = op0);
10056
10057  /* Increment however we can.  */
10058  op1 = expand_binop (mode, this_optab, value, op1,
10059  		      flag_check_memory_usage ? NULL_RTX : op0,
10060		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10061  /* Make sure the value is stored into OP0.  */
10062  if (op1 != op0)
10063    emit_move_insn (op0, op1);
10064
10065  return temp;
10066}
10067
10068/* Expand all function calls contained within EXP, innermost ones first.
10069   But don't look within expressions that have sequence points.
10070   For each CALL_EXPR, record the rtx for its value
10071   in the CALL_EXPR_RTL field.  */
10072
10073static void
10074preexpand_calls (exp)
10075     tree exp;
10076{
10077  register int nops, i;
10078  int type = TREE_CODE_CLASS (TREE_CODE (exp));
10079
10080  if (! do_preexpand_calls)
10081    return;
10082
10083  /* Only expressions and references can contain calls.  */
10084
10085  if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10086    return;
10087
10088  switch (TREE_CODE (exp))
10089    {
10090    case CALL_EXPR:
10091      /* Do nothing if already expanded.  */
10092      if (CALL_EXPR_RTL (exp) != 0
10093	  /* Do nothing if the call returns a variable-sized object.  */
10094	  || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10095	  /* Do nothing to built-in functions.  */
10096	  || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10097	      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10098		  == FUNCTION_DECL)
10099	      && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10100	return;
10101
10102      CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10103      return;
10104
10105    case COMPOUND_EXPR:
10106    case COND_EXPR:
10107    case TRUTH_ANDIF_EXPR:
10108    case TRUTH_ORIF_EXPR:
10109      /* If we find one of these, then we can be sure
10110	 the adjust will be done for it (since it makes jumps).
10111	 Do it now, so that if this is inside an argument
10112	 of a function, we don't get the stack adjustment
10113	 after some other args have already been pushed.  */
10114      do_pending_stack_adjust ();
10115      return;
10116
10117    case BLOCK:
10118    case RTL_EXPR:
10119    case WITH_CLEANUP_EXPR:
10120    case CLEANUP_POINT_EXPR:
10121    case TRY_CATCH_EXPR:
10122      return;
10123
10124    case SAVE_EXPR:
10125      if (SAVE_EXPR_RTL (exp) != 0)
10126	return;
10127
10128    default:
10129      break;
10130    }
10131
10132  nops = tree_code_length[(int) TREE_CODE (exp)];
10133  for (i = 0; i < nops; i++)
10134    if (TREE_OPERAND (exp, i) != 0)
10135      {
10136	type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10137	if (type == 'e' || type == '<' || type == '1' || type == '2'
10138	    || type == 'r')
10139	  preexpand_calls (TREE_OPERAND (exp, i));
10140      }
10141}
10142
10143/* At the start of a function, record that we have no previously-pushed
10144   arguments waiting to be popped.  */
10145
10146void
10147init_pending_stack_adjust ()
10148{
10149  pending_stack_adjust = 0;
10150}
10151
10152/* When exiting from function, if safe, clear out any pending stack adjust
10153   so the adjustment won't get done.
10154
10155   Note, if the current function calls alloca, then it must have a
10156   frame pointer regardless of the value of flag_omit_frame_pointer.  */
10157
10158void
10159clear_pending_stack_adjust ()
10160{
10161#ifdef EXIT_IGNORE_STACK
10162  if (optimize > 0
10163      && (! flag_omit_frame_pointer || current_function_calls_alloca)
10164      && EXIT_IGNORE_STACK
10165      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10166      && ! flag_inline_functions)
10167    pending_stack_adjust = 0;
10168#endif
10169}
10170
10171/* Pop any previously-pushed arguments that have not been popped yet.  */
10172
10173void
10174do_pending_stack_adjust ()
10175{
10176  if (inhibit_defer_pop == 0)
10177    {
10178      if (pending_stack_adjust != 0)
10179	adjust_stack (GEN_INT (pending_stack_adjust));
10180      pending_stack_adjust = 0;
10181    }
10182}
10183
10184/* Expand conditional expressions.  */
10185
10186/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10187   LABEL is an rtx of code CODE_LABEL, in this function and all the
10188   functions here.  */
10189
10190void
10191jumpifnot (exp, label)
10192     tree exp;
10193     rtx label;
10194{
10195  do_jump (exp, label, NULL_RTX);
10196}
10197
10198/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
10199
10200void
10201jumpif (exp, label)
10202     tree exp;
10203     rtx label;
10204{
10205  do_jump (exp, NULL_RTX, label);
10206}
10207
10208/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10209   the result is zero, or IF_TRUE_LABEL if the result is one.
10210   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10211   meaning fall through in that case.
10212
10213   do_jump always does any pending stack adjust except when it does not
10214   actually perform a jump.  An example where there is no jump
10215   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10216
10217   This function is responsible for optimizing cases such as
10218   &&, || and comparison operators in EXP.  */
10219
10220void
10221do_jump (exp, if_false_label, if_true_label)
10222     tree exp;
10223     rtx if_false_label, if_true_label;
10224{
10225  register enum tree_code code = TREE_CODE (exp);
10226  /* Some cases need to create a label to jump to
10227     in order to properly fall through.
10228     These cases set DROP_THROUGH_LABEL nonzero.  */
10229  rtx drop_through_label = 0;
10230  rtx temp;
10231  rtx comparison = 0;
10232  int i;
10233  tree type;
10234  enum machine_mode mode;
10235
10236#ifdef MAX_INTEGER_COMPUTATION_MODE
10237  check_max_integer_computation_mode (exp);
10238#endif
10239
10240  emit_queue ();
10241
10242  switch (code)
10243    {
10244    case ERROR_MARK:
10245      break;
10246
10247    case INTEGER_CST:
10248      temp = integer_zerop (exp) ? if_false_label : if_true_label;
10249      if (temp)
10250	emit_jump (temp);
10251      break;
10252
10253#if 0
10254      /* This is not true with #pragma weak  */
10255    case ADDR_EXPR:
10256      /* The address of something can never be zero.  */
10257      if (if_true_label)
10258	emit_jump (if_true_label);
10259      break;
10260#endif
10261
10262    case NOP_EXPR:
10263      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10264	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10265	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10266	goto normal;
10267    case CONVERT_EXPR:
10268      /* If we are narrowing the operand, we have to do the compare in the
10269	 narrower mode.  */
10270      if ((TYPE_PRECISION (TREE_TYPE (exp))
10271	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10272	goto normal;
10273    case NON_LVALUE_EXPR:
10274    case REFERENCE_EXPR:
10275    case ABS_EXPR:
10276    case NEGATE_EXPR:
10277    case LROTATE_EXPR:
10278    case RROTATE_EXPR:
10279      /* These cannot change zero->non-zero or vice versa.  */
10280      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10281      break;
10282
10283#if 0
10284      /* This is never less insns than evaluating the PLUS_EXPR followed by
10285	 a test and can be longer if the test is eliminated.  */
10286    case PLUS_EXPR:
10287      /* Reduce to minus.  */
10288      exp = build (MINUS_EXPR, TREE_TYPE (exp),
10289		   TREE_OPERAND (exp, 0),
10290		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10291				 TREE_OPERAND (exp, 1))));
10292      /* Process as MINUS.  */
10293#endif
10294
10295    case MINUS_EXPR:
10296      /* Non-zero iff operands of minus differ.  */
10297      comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10298				   TREE_OPERAND (exp, 0),
10299				   TREE_OPERAND (exp, 1)),
10300			    NE, NE);
10301      break;
10302
10303    case BIT_AND_EXPR:
10304      /* If we are AND'ing with a small constant, do this comparison in the
10305	 smallest type that fits.  If the machine doesn't have comparisons
10306	 that small, it will be converted back to the wider comparison.
10307	 This helps if we are testing the sign bit of a narrower object.
10308	 combine can't do this for us because it can't know whether a
10309	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
10310
10311      if (! SLOW_BYTE_ACCESS
10312	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10313	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10314	  && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10315	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10316	  && (type = type_for_mode (mode, 1)) != 0
10317	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10318	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10319	      != CODE_FOR_nothing))
10320	{
10321	  do_jump (convert (type, exp), if_false_label, if_true_label);
10322	  break;
10323	}
10324      goto normal;
10325
10326    case TRUTH_NOT_EXPR:
10327      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10328      break;
10329
10330    case TRUTH_ANDIF_EXPR:
10331      if (if_false_label == 0)
10332	if_false_label = drop_through_label = gen_label_rtx ();
10333      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10334      start_cleanup_deferral ();
10335      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10336      end_cleanup_deferral ();
10337      break;
10338
10339    case TRUTH_ORIF_EXPR:
10340      if (if_true_label == 0)
10341	if_true_label = drop_through_label = gen_label_rtx ();
10342      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10343      start_cleanup_deferral ();
10344      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10345      end_cleanup_deferral ();
10346      break;
10347
10348    case COMPOUND_EXPR:
10349      push_temp_slots ();
10350      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10351      preserve_temp_slots (NULL_RTX);
10352      free_temp_slots ();
10353      pop_temp_slots ();
10354      emit_queue ();
10355      do_pending_stack_adjust ();
10356      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10357      break;
10358
10359    case COMPONENT_REF:
10360    case BIT_FIELD_REF:
10361    case ARRAY_REF:
10362      {
10363	int bitsize, bitpos, unsignedp;
10364	enum machine_mode mode;
10365	tree type;
10366	tree offset;
10367	int volatilep = 0;
10368	int alignment;
10369
10370	/* Get description of this reference.  We don't actually care
10371	   about the underlying object here.  */
10372	get_inner_reference (exp, &bitsize, &bitpos, &offset,
10373			     &mode, &unsignedp, &volatilep,
10374			     &alignment);
10375
10376	type = type_for_size (bitsize, unsignedp);
10377	if (! SLOW_BYTE_ACCESS
10378	    && type != 0 && bitsize >= 0
10379	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10380	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10381		!= CODE_FOR_nothing))
10382	  {
10383	    do_jump (convert (type, exp), if_false_label, if_true_label);
10384	    break;
10385	  }
10386	goto normal;
10387      }
10388
10389    case COND_EXPR:
10390      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
10391      if (integer_onep (TREE_OPERAND (exp, 1))
10392	  && integer_zerop (TREE_OPERAND (exp, 2)))
10393	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10394
10395      else if (integer_zerop (TREE_OPERAND (exp, 1))
10396	       && integer_onep (TREE_OPERAND (exp, 2)))
10397	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10398
10399      else
10400	{
10401	  register rtx label1 = gen_label_rtx ();
10402	  drop_through_label = gen_label_rtx ();
10403
10404	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10405
10406	  start_cleanup_deferral ();
10407	  /* Now the THEN-expression.  */
10408	  do_jump (TREE_OPERAND (exp, 1),
10409		   if_false_label ? if_false_label : drop_through_label,
10410		   if_true_label ? if_true_label : drop_through_label);
10411	  /* In case the do_jump just above never jumps.  */
10412	  do_pending_stack_adjust ();
10413	  emit_label (label1);
10414
10415	  /* Now the ELSE-expression.  */
10416	  do_jump (TREE_OPERAND (exp, 2),
10417		   if_false_label ? if_false_label : drop_through_label,
10418		   if_true_label ? if_true_label : drop_through_label);
10419	  end_cleanup_deferral ();
10420	}
10421      break;
10422
10423    case EQ_EXPR:
10424      {
10425	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10426
10427	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10428	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10429	  {
10430	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10431	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10432	    do_jump
10433	      (fold
10434	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10435		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10436				    fold (build1 (REALPART_EXPR,
10437						  TREE_TYPE (inner_type),
10438						  exp0)),
10439				    fold (build1 (REALPART_EXPR,
10440						  TREE_TYPE (inner_type),
10441						  exp1)))),
10442		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10443				    fold (build1 (IMAGPART_EXPR,
10444						  TREE_TYPE (inner_type),
10445						  exp0)),
10446				    fold (build1 (IMAGPART_EXPR,
10447						  TREE_TYPE (inner_type),
10448						  exp1)))))),
10449	       if_false_label, if_true_label);
10450	  }
10451
10452	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10453	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10454
10455	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10456		 && !can_compare_p (TYPE_MODE (inner_type)))
10457	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10458	else
10459	  comparison = compare (exp, EQ, EQ);
10460	break;
10461      }
10462
10463    case NE_EXPR:
10464      {
10465	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10466
10467	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10468	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10469	  {
10470	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10471	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10472	    do_jump
10473	      (fold
10474	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10475		       fold (build (NE_EXPR, TREE_TYPE (exp),
10476				    fold (build1 (REALPART_EXPR,
10477						  TREE_TYPE (inner_type),
10478						  exp0)),
10479				    fold (build1 (REALPART_EXPR,
10480						  TREE_TYPE (inner_type),
10481						  exp1)))),
10482		       fold (build (NE_EXPR, TREE_TYPE (exp),
10483				    fold (build1 (IMAGPART_EXPR,
10484						  TREE_TYPE (inner_type),
10485						  exp0)),
10486				    fold (build1 (IMAGPART_EXPR,
10487						  TREE_TYPE (inner_type),
10488						  exp1)))))),
10489	       if_false_label, if_true_label);
10490	  }
10491
10492	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10493	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10494
10495	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10496		 && !can_compare_p (TYPE_MODE (inner_type)))
10497	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10498	else
10499	  comparison = compare (exp, NE, NE);
10500	break;
10501      }
10502
10503    case LT_EXPR:
10504      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10505	   == MODE_INT)
10506	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10507	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10508      else
10509	comparison = compare (exp, LT, LTU);
10510      break;
10511
10512    case LE_EXPR:
10513      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10514	   == MODE_INT)
10515	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10516	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10517      else
10518	comparison = compare (exp, LE, LEU);
10519      break;
10520
10521    case GT_EXPR:
10522      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10523	   == MODE_INT)
10524	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10525	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10526      else
10527	comparison = compare (exp, GT, GTU);
10528      break;
10529
10530    case GE_EXPR:
10531      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10532	   == MODE_INT)
10533	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10534	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10535      else
10536	comparison = compare (exp, GE, GEU);
10537      break;
10538
10539    default:
10540    normal:
10541      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10542#if 0
10543      /* This is not needed any more and causes poor code since it causes
10544	 comparisons and tests from non-SI objects to have different code
10545	 sequences.  */
10546      /* Copy to register to avoid generating bad insns by cse
10547	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
10548      if (!cse_not_expected && GET_CODE (temp) == MEM)
10549	temp = copy_to_reg (temp);
10550#endif
10551      do_pending_stack_adjust ();
10552      if (GET_CODE (temp) == CONST_INT)
10553	comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10554      else if (GET_CODE (temp) == LABEL_REF)
10555	comparison = const_true_rtx;
10556      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10557	       && !can_compare_p (GET_MODE (temp)))
10558	/* Note swapping the labels gives us not-equal.  */
10559	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10560      else if (GET_MODE (temp) != VOIDmode)
10561	comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10562				       NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10563				       GET_MODE (temp), NULL_RTX, 0);
10564      else
10565	abort ();
10566    }
10567
10568  /* Do any postincrements in the expression that was tested.  */
10569  emit_queue ();
10570
10571  /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10572     straight into a conditional jump instruction as the jump condition.
10573     Otherwise, all the work has been done already.  */
10574
10575  if (comparison == const_true_rtx)
10576    {
10577      if (if_true_label)
10578	emit_jump (if_true_label);
10579    }
10580  else if (comparison == const0_rtx)
10581    {
10582      if (if_false_label)
10583	emit_jump (if_false_label);
10584    }
10585  else if (comparison)
10586    do_jump_for_compare (comparison, if_false_label, if_true_label);
10587
10588  if (drop_through_label)
10589    {
10590      /* If do_jump produces code that might be jumped around,
10591	 do any stack adjusts from that code, before the place
10592	 where control merges in.  */
10593      do_pending_stack_adjust ();
10594      emit_label (drop_through_label);
10595    }
10596}
10597
10598/* Given a comparison expression EXP for values too wide to be compared
10599   with one insn, test the comparison and jump to the appropriate label.
10600   The code of EXP is ignored; we always test GT if SWAP is 0,
10601   and LT if SWAP is 1.  */
10602
10603static void
10604do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10605     tree exp;
10606     int swap;
10607     rtx if_false_label, if_true_label;
10608{
10609  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10610  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10611  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10612  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10613  rtx drop_through_label = 0;
10614  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10615  int i;
10616
10617  if (! if_true_label || ! if_false_label)
10618    drop_through_label = gen_label_rtx ();
10619  if (! if_true_label)
10620    if_true_label = drop_through_label;
10621  if (! if_false_label)
10622    if_false_label = drop_through_label;
10623
10624  /* Compare a word at a time, high order first.  */
10625  for (i = 0; i < nwords; i++)
10626    {
10627      rtx comp;
10628      rtx op0_word, op1_word;
10629
10630      if (WORDS_BIG_ENDIAN)
10631	{
10632	  op0_word = operand_subword_force (op0, i, mode);
10633	  op1_word = operand_subword_force (op1, i, mode);
10634	}
10635      else
10636	{
10637	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10638	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10639	}
10640
10641      /* All but high-order word must be compared as unsigned.  */
10642      comp = compare_from_rtx (op0_word, op1_word,
10643			       (unsignedp || i > 0) ? GTU : GT,
10644			       unsignedp, word_mode, NULL_RTX, 0);
10645      if (comp == const_true_rtx)
10646	emit_jump (if_true_label);
10647      else if (comp != const0_rtx)
10648	do_jump_for_compare (comp, NULL_RTX, if_true_label);
10649
10650      /* Consider lower words only if these are equal.  */
10651      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10652			       NULL_RTX, 0);
10653      if (comp == const_true_rtx)
10654	emit_jump (if_false_label);
10655      else if (comp != const0_rtx)
10656	do_jump_for_compare (comp, NULL_RTX, if_false_label);
10657    }
10658
10659  if (if_false_label)
10660    emit_jump (if_false_label);
10661  if (drop_through_label)
10662    emit_label (drop_through_label);
10663}
10664
10665/* Compare OP0 with OP1, word at a time, in mode MODE.
10666   UNSIGNEDP says to do unsigned comparison.
10667   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
10668
10669void
10670do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10671     enum machine_mode mode;
10672     int unsignedp;
10673     rtx op0, op1;
10674     rtx if_false_label, if_true_label;
10675{
10676  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10677  rtx drop_through_label = 0;
10678  int i;
10679
10680  if (! if_true_label || ! if_false_label)
10681    drop_through_label = gen_label_rtx ();
10682  if (! if_true_label)
10683    if_true_label = drop_through_label;
10684  if (! if_false_label)
10685    if_false_label = drop_through_label;
10686
10687  /* Compare a word at a time, high order first.  */
10688  for (i = 0; i < nwords; i++)
10689    {
10690      rtx comp;
10691      rtx op0_word, op1_word;
10692
10693      if (WORDS_BIG_ENDIAN)
10694	{
10695	  op0_word = operand_subword_force (op0, i, mode);
10696	  op1_word = operand_subword_force (op1, i, mode);
10697	}
10698      else
10699	{
10700	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10701	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10702	}
10703
10704      /* All but high-order word must be compared as unsigned.  */
10705      comp = compare_from_rtx (op0_word, op1_word,
10706			       (unsignedp || i > 0) ? GTU : GT,
10707			       unsignedp, word_mode, NULL_RTX, 0);
10708      if (comp == const_true_rtx)
10709	emit_jump (if_true_label);
10710      else if (comp != const0_rtx)
10711	do_jump_for_compare (comp, NULL_RTX, if_true_label);
10712
10713      /* Consider lower words only if these are equal.  */
10714      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10715			       NULL_RTX, 0);
10716      if (comp == const_true_rtx)
10717	emit_jump (if_false_label);
10718      else if (comp != const0_rtx)
10719	do_jump_for_compare (comp, NULL_RTX, if_false_label);
10720    }
10721
10722  if (if_false_label)
10723    emit_jump (if_false_label);
10724  if (drop_through_label)
10725    emit_label (drop_through_label);
10726}
10727
10728/* Given an EQ_EXPR expression EXP for values too wide to be compared
10729   with one insn, test the comparison and jump to the appropriate label.  */
10730
10731static void
10732do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10733     tree exp;
10734     rtx if_false_label, if_true_label;
10735{
10736  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10737  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10738  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10739  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10740  int i;
10741  rtx drop_through_label = 0;
10742
10743  if (! if_false_label)
10744    drop_through_label = if_false_label = gen_label_rtx ();
10745
10746  for (i = 0; i < nwords; i++)
10747    {
10748      rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10749				   operand_subword_force (op1, i, mode),
10750				   EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10751				   word_mode, NULL_RTX, 0);
10752      if (comp == const_true_rtx)
10753	emit_jump (if_false_label);
10754      else if (comp != const0_rtx)
10755	do_jump_for_compare (comp, if_false_label, NULL_RTX);
10756    }
10757
10758  if (if_true_label)
10759    emit_jump (if_true_label);
10760  if (drop_through_label)
10761    emit_label (drop_through_label);
10762}
10763
10764/* Jump according to whether OP0 is 0.
10765   We assume that OP0 has an integer mode that is too wide
10766   for the available compare insns.  */
10767
10768void
10769do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10770     rtx op0;
10771     rtx if_false_label, if_true_label;
10772{
10773  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10774  rtx part;
10775  int i;
10776  rtx drop_through_label = 0;
10777
10778  /* The fastest way of doing this comparison on almost any machine is to
10779     "or" all the words and compare the result.  If all have to be loaded
10780     from memory and this is a very wide item, it's possible this may
10781     be slower, but that's highly unlikely.  */
10782
10783  part = gen_reg_rtx (word_mode);
10784  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10785  for (i = 1; i < nwords && part != 0; i++)
10786    part = expand_binop (word_mode, ior_optab, part,
10787			 operand_subword_force (op0, i, GET_MODE (op0)),
10788			 part, 1, OPTAB_WIDEN);
10789
10790  if (part != 0)
10791    {
10792      rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10793				   NULL_RTX, 0);
10794
10795      if (comp == const_true_rtx)
10796	emit_jump (if_false_label);
10797      else if (comp == const0_rtx)
10798	emit_jump (if_true_label);
10799      else
10800	do_jump_for_compare (comp, if_false_label, if_true_label);
10801
10802      return;
10803    }
10804
10805  /* If we couldn't do the "or" simply, do this with a series of compares.  */
10806  if (! if_false_label)
10807    drop_through_label = if_false_label = gen_label_rtx ();
10808
10809  for (i = 0; i < nwords; i++)
10810    {
10811      rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10812							  GET_MODE (op0)),
10813				   const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10814      if (comp == const_true_rtx)
10815	emit_jump (if_false_label);
10816      else if (comp != const0_rtx)
10817	do_jump_for_compare (comp, if_false_label, NULL_RTX);
10818    }
10819
10820  if (if_true_label)
10821    emit_jump (if_true_label);
10822
10823  if (drop_through_label)
10824    emit_label (drop_through_label);
10825}
10826
10827/* Given a comparison expression in rtl form, output conditional branches to
10828   IF_TRUE_LABEL, IF_FALSE_LABEL, or both.  */
10829
10830static void
10831do_jump_for_compare (comparison, if_false_label, if_true_label)
10832     rtx comparison, if_false_label, if_true_label;
10833{
10834  if (if_true_label)
10835    {
10836      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10837	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10838      else
10839	abort ();
10840
10841      if (if_false_label)
10842	emit_jump (if_false_label);
10843    }
10844  else if (if_false_label)
10845    {
10846      rtx insn;
10847      rtx prev = get_last_insn ();
10848      rtx branch = 0;
10849
10850      /* Output the branch with the opposite condition.  Then try to invert
10851	 what is generated.  If more than one insn is a branch, or if the
10852	 branch is not the last insn written, abort. If we can't invert
10853	 the branch, emit make a true label, redirect this jump to that,
10854	 emit a jump to the false label and define the true label.  */
10855
10856      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10857	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10858      else
10859	abort ();
10860
10861      /* Here we get the first insn that was just emitted.  It used to be  the
10862	 case that, on some machines, emitting the branch would discard
10863	 the previous compare insn and emit a replacement.  This isn't
10864	 done anymore, but abort if we see that PREV is deleted.  */
10865
10866      if (prev == 0)
10867	insn = get_insns ();
10868      else if (INSN_DELETED_P (prev))
10869	abort ();
10870      else
10871	insn = NEXT_INSN (prev);
10872
10873      for (; insn; insn = NEXT_INSN (insn))
10874	if (GET_CODE (insn) == JUMP_INSN)
10875	  {
10876	    if (branch)
10877	      abort ();
10878	    branch = insn;
10879	  }
10880
10881      if (branch != get_last_insn ())
10882	abort ();
10883
10884      JUMP_LABEL (branch) = if_false_label;
10885      if (! invert_jump (branch, if_false_label))
10886	{
10887	  if_true_label = gen_label_rtx ();
10888	  redirect_jump (branch, if_true_label);
10889	  emit_jump (if_false_label);
10890	  emit_label (if_true_label);
10891	}
10892    }
10893}
10894
10895/* Generate code for a comparison expression EXP
10896   (including code to compute the values to be compared)
10897   and set (CC0) according to the result.
10898   SIGNED_CODE should be the rtx operation for this comparison for
10899   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10900
10901   We force a stack adjustment unless there are currently
10902   things pushed on the stack that aren't yet used.  */
10903
10904static rtx
10905compare (exp, signed_code, unsigned_code)
10906     register tree exp;
10907     enum rtx_code signed_code, unsigned_code;
10908{
10909  register rtx op0
10910    = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10911  register rtx op1
10912    = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10913  register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10914  register enum machine_mode mode = TYPE_MODE (type);
10915  int unsignedp = TREE_UNSIGNED (type);
10916  enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10917
10918#ifdef HAVE_canonicalize_funcptr_for_compare
10919  /* If function pointers need to be "canonicalized" before they can
10920     be reliably compared, then canonicalize them.  */
10921  if (HAVE_canonicalize_funcptr_for_compare
10922      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10923      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10924	  == FUNCTION_TYPE))
10925    {
10926      rtx new_op0 = gen_reg_rtx (mode);
10927
10928      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10929      op0 = new_op0;
10930    }
10931
10932  if (HAVE_canonicalize_funcptr_for_compare
10933      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10934      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10935	  == FUNCTION_TYPE))
10936    {
10937      rtx new_op1 = gen_reg_rtx (mode);
10938
10939      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10940      op1 = new_op1;
10941    }
10942#endif
10943
10944  return compare_from_rtx (op0, op1, code, unsignedp, mode,
10945			   ((mode == BLKmode)
10946			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10947			   TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10948}
10949
10950/* Like compare but expects the values to compare as two rtx's.
10951   The decision as to signed or unsigned comparison must be made by the caller.
10952
10953   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10954   compared.
10955
10956   If ALIGN is non-zero, it is the alignment of this type; if zero, the
10957   size of MODE should be used.  */
10958
10959rtx
10960compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10961     register rtx op0, op1;
10962     enum rtx_code code;
10963     int unsignedp;
10964     enum machine_mode mode;
10965     rtx size;
10966     int align;
10967{
10968  rtx tem;
10969
10970  /* If one operand is constant, make it the second one.  Only do this
10971     if the other operand is not constant as well.  */
10972
10973  if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10974      || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10975    {
10976      tem = op0;
10977      op0 = op1;
10978      op1 = tem;
10979      code = swap_condition (code);
10980    }
10981
10982  if (flag_force_mem)
10983    {
10984      op0 = force_not_mem (op0);
10985      op1 = force_not_mem (op1);
10986    }
10987
10988  do_pending_stack_adjust ();
10989
10990  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10991      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10992    return tem;
10993
10994#if 0
10995  /* There's no need to do this now that combine.c can eliminate lots of
10996     sign extensions.  This can be less efficient in certain cases on other
10997     machines.  */
10998
10999  /* If this is a signed equality comparison, we can do it as an
11000     unsigned comparison since zero-extension is cheaper than sign
11001     extension and comparisons with zero are done as unsigned.  This is
11002     the case even on machines that can do fast sign extension, since
11003     zero-extension is easier to combine with other operations than
11004     sign-extension is.  If we are comparing against a constant, we must
11005     convert it to what it would look like unsigned.  */
11006  if ((code == EQ || code == NE) && ! unsignedp
11007      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11008    {
11009      if (GET_CODE (op1) == CONST_INT
11010	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11011	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11012      unsignedp = 1;
11013    }
11014#endif
11015
11016  emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11017
11018  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11019}
11020
11021/* Generate code to calculate EXP using a store-flag instruction
11022   and return an rtx for the result.  EXP is either a comparison
11023   or a TRUTH_NOT_EXPR whose operand is a comparison.
11024
11025   If TARGET is nonzero, store the result there if convenient.
11026
11027   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11028   cheap.
11029
11030   Return zero if there is no suitable set-flag instruction
11031   available on this machine.
11032
11033   Once expand_expr has been called on the arguments of the comparison,
11034   we are committed to doing the store flag, since it is not safe to
11035   re-evaluate the expression.  We emit the store-flag insn by calling
11036   emit_store_flag, but only expand the arguments if we have a reason
11037   to believe that emit_store_flag will be successful.  If we think that
11038   it will, but it isn't, we have to simulate the store-flag with a
11039   set/jump/set sequence.  */
11040
11041static rtx
11042do_store_flag (exp, target, mode, only_cheap)
11043     tree exp;
11044     rtx target;
11045     enum machine_mode mode;
11046     int only_cheap;
11047{
11048  enum rtx_code code;
11049  tree arg0, arg1, type;
11050  tree tem;
11051  enum machine_mode operand_mode;
11052  int invert = 0;
11053  int unsignedp;
11054  rtx op0, op1;
11055  enum insn_code icode;
11056  rtx subtarget = target;
11057  rtx result, label;
11058
11059  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11060     result at the end.  We can't simply invert the test since it would
11061     have already been inverted if it were valid.  This case occurs for
11062     some floating-point comparisons.  */
11063
11064  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11065    invert = 1, exp = TREE_OPERAND (exp, 0);
11066
11067  arg0 = TREE_OPERAND (exp, 0);
11068  arg1 = TREE_OPERAND (exp, 1);
11069  type = TREE_TYPE (arg0);
11070  operand_mode = TYPE_MODE (type);
11071  unsignedp = TREE_UNSIGNED (type);
11072
11073  /* We won't bother with BLKmode store-flag operations because it would mean
11074     passing a lot of information to emit_store_flag.  */
11075  if (operand_mode == BLKmode)
11076    return 0;
11077
11078  /* We won't bother with store-flag operations involving function pointers
11079     when function pointers must be canonicalized before comparisons.  */
11080#ifdef HAVE_canonicalize_funcptr_for_compare
11081  if (HAVE_canonicalize_funcptr_for_compare
11082      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11083	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11084	       == FUNCTION_TYPE))
11085	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11086	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11087		  == FUNCTION_TYPE))))
11088    return 0;
11089#endif
11090
11091  STRIP_NOPS (arg0);
11092  STRIP_NOPS (arg1);
11093
11094  /* Get the rtx comparison code to use.  We know that EXP is a comparison
11095     operation of some type.  Some comparisons against 1 and -1 can be
11096     converted to comparisons with zero.  Do so here so that the tests
11097     below will be aware that we have a comparison with zero.   These
11098     tests will not catch constants in the first operand, but constants
11099     are rarely passed as the first operand.  */
11100
11101  switch (TREE_CODE (exp))
11102    {
11103    case EQ_EXPR:
11104      code = EQ;
11105      break;
11106    case NE_EXPR:
11107      code = NE;
11108      break;
11109    case LT_EXPR:
11110      if (integer_onep (arg1))
11111	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11112      else
11113	code = unsignedp ? LTU : LT;
11114      break;
11115    case LE_EXPR:
11116      if (! unsignedp && integer_all_onesp (arg1))
11117	arg1 = integer_zero_node, code = LT;
11118      else
11119	code = unsignedp ? LEU : LE;
11120      break;
11121    case GT_EXPR:
11122      if (! unsignedp && integer_all_onesp (arg1))
11123	arg1 = integer_zero_node, code = GE;
11124      else
11125	code = unsignedp ? GTU : GT;
11126      break;
11127    case GE_EXPR:
11128      if (integer_onep (arg1))
11129	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11130      else
11131	code = unsignedp ? GEU : GE;
11132      break;
11133    default:
11134      abort ();
11135    }
11136
11137  /* Put a constant second.  */
11138  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11139    {
11140      tem = arg0; arg0 = arg1; arg1 = tem;
11141      code = swap_condition (code);
11142    }
11143
11144  /* If this is an equality or inequality test of a single bit, we can
11145     do this by shifting the bit being tested to the low-order bit and
11146     masking the result with the constant 1.  If the condition was EQ,
11147     we xor it with 1.  This does not require an scc insn and is faster
11148     than an scc insn even if we have it.  */
11149
11150  if ((code == NE || code == EQ)
11151      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11152      && integer_pow2p (TREE_OPERAND (arg0, 1)))
11153    {
11154      tree inner = TREE_OPERAND (arg0, 0);
11155      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11156      int ops_unsignedp;
11157
11158      /* If INNER is a right shift of a constant and it plus BITNUM does
11159	 not overflow, adjust BITNUM and INNER.  */
11160
11161      if (TREE_CODE (inner) == RSHIFT_EXPR
11162	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11163	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11164	  && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11165	      < TYPE_PRECISION (type)))
11166	{
11167	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11168	  inner = TREE_OPERAND (inner, 0);
11169	}
11170
11171      /* If we are going to be able to omit the AND below, we must do our
11172	 operations as unsigned.  If we must use the AND, we have a choice.
11173	 Normally unsigned is faster, but for some machines signed is.  */
11174      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11175#ifdef LOAD_EXTEND_OP
11176		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11177#else
11178		       : 1
11179#endif
11180		       );
11181
11182      if (subtarget == 0 || GET_CODE (subtarget) != REG
11183	  || GET_MODE (subtarget) != operand_mode
11184	  || ! safe_from_p (subtarget, inner, 1))
11185	subtarget = 0;
11186
11187      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11188
11189      if (bitnum != 0)
11190	op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11191			    size_int (bitnum), subtarget, ops_unsignedp);
11192
11193      if (GET_MODE (op0) != mode)
11194	op0 = convert_to_mode (mode, op0, ops_unsignedp);
11195
11196      if ((code == EQ && ! invert) || (code == NE && invert))
11197	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11198			    ops_unsignedp, OPTAB_LIB_WIDEN);
11199
11200      /* Put the AND last so it can combine with more things.  */
11201      if (bitnum != TYPE_PRECISION (type) - 1)
11202	op0 = expand_and (op0, const1_rtx, subtarget);
11203
11204      return op0;
11205    }
11206
11207  /* Now see if we are likely to be able to do this.  Return if not.  */
11208  if (! can_compare_p (operand_mode))
11209    return 0;
11210  icode = setcc_gen_code[(int) code];
11211  if (icode == CODE_FOR_nothing
11212      || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11213    {
11214      /* We can only do this if it is one of the special cases that
11215	 can be handled without an scc insn.  */
11216      if ((code == LT && integer_zerop (arg1))
11217	  || (! only_cheap && code == GE && integer_zerop (arg1)))
11218	;
11219      else if (BRANCH_COST >= 0
11220	       && ! only_cheap && (code == NE || code == EQ)
11221	       && TREE_CODE (type) != REAL_TYPE
11222	       && ((abs_optab->handlers[(int) operand_mode].insn_code
11223		    != CODE_FOR_nothing)
11224		   || (ffs_optab->handlers[(int) operand_mode].insn_code
11225		       != CODE_FOR_nothing)))
11226	;
11227      else
11228	return 0;
11229    }
11230
11231  preexpand_calls (exp);
11232  if (subtarget == 0 || GET_CODE (subtarget) != REG
11233      || GET_MODE (subtarget) != operand_mode
11234      || ! safe_from_p (subtarget, arg1, 1))
11235    subtarget = 0;
11236
11237  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11238  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11239
11240  if (target == 0)
11241    target = gen_reg_rtx (mode);
11242
11243  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
11244     because, if the emit_store_flag does anything it will succeed and
11245     OP0 and OP1 will not be used subsequently.  */
11246
11247  result = emit_store_flag (target, code,
11248			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11249			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11250			    operand_mode, unsignedp, 1);
11251
11252  if (result)
11253    {
11254      if (invert)
11255	result = expand_binop (mode, xor_optab, result, const1_rtx,
11256			       result, 0, OPTAB_LIB_WIDEN);
11257      return result;
11258    }
11259
11260  /* If this failed, we have to do this with set/compare/jump/set code.  */
11261  if (GET_CODE (target) != REG
11262      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11263    target = gen_reg_rtx (GET_MODE (target));
11264
11265  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11266  result = compare_from_rtx (op0, op1, code, unsignedp,
11267			     operand_mode, NULL_RTX, 0);
11268  if (GET_CODE (result) == CONST_INT)
11269    return (((result == const0_rtx && ! invert)
11270	     || (result != const0_rtx && invert))
11271	    ? const0_rtx : const1_rtx);
11272
11273  label = gen_label_rtx ();
11274  if (bcc_gen_fctn[(int) code] == 0)
11275    abort ();
11276
11277  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11278  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11279  emit_label (label);
11280
11281  return target;
11282}
11283
11284/* Generate a tablejump instruction (used for switch statements).  */
11285
11286#ifdef HAVE_tablejump
11287
11288/* INDEX is the value being switched on, with the lowest value
11289   in the table already subtracted.
11290   MODE is its expected mode (needed if INDEX is constant).
11291   RANGE is the length of the jump table.
11292   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11293
11294   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11295   index value is out of range.  */
11296
11297void
11298do_tablejump (index, mode, range, table_label, default_label)
11299     rtx index, range, table_label, default_label;
11300     enum machine_mode mode;
11301{
11302  register rtx temp, vector;
11303
11304  /* Do an unsigned comparison (in the proper mode) between the index
11305     expression and the value which represents the length of the range.
11306     Since we just finished subtracting the lower bound of the range
11307     from the index expression, this comparison allows us to simultaneously
11308     check that the original index expression value is both greater than
11309     or equal to the minimum value of the range and less than or equal to
11310     the maximum value of the range.  */
11311
11312  emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11313  emit_jump_insn (gen_bgtu (default_label));
11314
11315  /* If index is in range, it must fit in Pmode.
11316     Convert to Pmode so we can index with it.  */
11317  if (mode != Pmode)
11318    index = convert_to_mode (Pmode, index, 1);
11319
11320  /* Don't let a MEM slip thru, because then INDEX that comes
11321     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11322     and break_out_memory_refs will go to work on it and mess it up.  */
11323#ifdef PIC_CASE_VECTOR_ADDRESS
11324  if (flag_pic && GET_CODE (index) != REG)
11325    index = copy_to_mode_reg (Pmode, index);
11326#endif
11327
11328  /* If flag_force_addr were to affect this address
11329     it could interfere with the tricky assumptions made
11330     about addresses that contain label-refs,
11331     which may be valid only very near the tablejump itself.  */
11332  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11333     GET_MODE_SIZE, because this indicates how large insns are.  The other
11334     uses should all be Pmode, because they are addresses.  This code
11335     could fail if addresses and insns are not the same size.  */
11336  index = gen_rtx_PLUS (Pmode,
11337			gen_rtx_MULT (Pmode, index,
11338				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11339			gen_rtx_LABEL_REF (Pmode, table_label));
11340#ifdef PIC_CASE_VECTOR_ADDRESS
11341  if (flag_pic)
11342    index = PIC_CASE_VECTOR_ADDRESS (index);
11343  else
11344#endif
11345    index = memory_address_noforce (CASE_VECTOR_MODE, index);
11346  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11347  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11348  RTX_UNCHANGING_P (vector) = 1;
11349  convert_move (temp, vector, 0);
11350
11351  emit_jump_insn (gen_tablejump (temp, table_label));
11352
11353  /* If we are generating PIC code or if the table is PC-relative, the
11354     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11355  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11356    emit_barrier ();
11357}
11358
11359#endif /* HAVE_tablejump */
11360