1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23#include "config.h"
24#include "system.h"
25#include "machmode.h"
26#include "rtl.h"
27#include "tree.h"
28#include "obstack.h"
29#include "flags.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "except.h"
33#include "function.h"
34#include "insn-flags.h"
35#include "insn-codes.h"
36#include "insn-config.h"
37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38#include "expr.h"
39#include "recog.h"
40#include "output.h"
41#include "typeclass.h"
42#include "defaults.h"
43#include "toplev.h"
44
45#define CEIL(x,y) (((x) + (y) - 1) / (y))
46
47/* Decide whether a function's arguments should be processed
48   from first to last or from last to first.
49
50   They should if the stack and args grow in opposite directions, but
51   only if we have push insns.  */
52
53#ifdef PUSH_ROUNDING
54
55#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56#define PUSH_ARGS_REVERSED	/* If it's last to first */
57#endif
58
59#endif
60
61#ifndef STACK_PUSH_CODE
62#ifdef STACK_GROWS_DOWNWARD
63#define STACK_PUSH_CODE PRE_DEC
64#else
65#define STACK_PUSH_CODE PRE_INC
66#endif
67#endif
68
69/* Assume that case vectors are not pc-relative.  */
70#ifndef CASE_VECTOR_PC_RELATIVE
71#define CASE_VECTOR_PC_RELATIVE 0
72#endif
73
74/* If this is nonzero, we do not bother generating VOLATILE
75   around volatile memory references, and we are willing to
76   output indirect addresses.  If cse is to follow, we reject
77   indirect addresses so a useful potential cse is generated;
78   if it is used only once, instruction combination will produce
79   the same indirect address eventually.  */
80int cse_not_expected;
81
82/* Nonzero to generate code for all the subroutines within an
83   expression before generating the upper levels of the expression.
84   Nowadays this is never zero.  */
85int do_preexpand_calls = 1;
86
87/* Number of units that we should eventually pop off the stack.
88   These are the arguments to function calls that have already returned.  */
89int pending_stack_adjust;
90
91/* Under some ABIs, it is the caller's responsibility to pop arguments
92   pushed for function calls.  A naive implementation would simply pop
93   the arguments immediately after each call.  However, if several
94   function calls are made in a row, it is typically cheaper to pop
95   all the arguments after all of the calls are complete since a
96   single pop instruction can be used.  Therefore, GCC attempts to
97   defer popping the arguments until absolutely necessary.  (For
98   example, at the end of a conditional, the arguments must be popped,
99   since code outside the conditional won't know whether or not the
100   arguments need to be popped.)
101
102   When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
103   attempt to defer pops.  Instead, the stack is popped immediately
104   after each call.  Rather then setting this variable directly, use
105   NO_DEFER_POP and OK_DEFER_POP.  */
106int inhibit_defer_pop;
107
108/* Nonzero means __builtin_saveregs has already been done in this function.
109   The value is the pseudoreg containing the value __builtin_saveregs
110   returned.  */
111static rtx saveregs_value;
112
113/* Similarly for __builtin_apply_args.  */
114static rtx apply_args_value;
115
116/* Don't check memory usage, since code is being emitted to check a memory
117   usage.  Used when current_function_check_memory_usage is true, to avoid
118   infinite recursion.  */
119static int in_check_memory_usage;
120
121/* Postincrements that still need to be expanded.  */
122static rtx pending_chain;
123
124/* This structure is used by move_by_pieces to describe the move to
125   be performed.  */
126struct move_by_pieces
127{
128  rtx to;
129  rtx to_addr;
130  int autinc_to;
131  int explicit_inc_to;
132  int to_struct;
133  rtx from;
134  rtx from_addr;
135  int autinc_from;
136  int explicit_inc_from;
137  int from_struct;
138  int len;
139  int offset;
140  int reverse;
141};
142
143/* This structure is used by clear_by_pieces to describe the clear to
144   be performed.  */
145
146struct clear_by_pieces
147{
148  rtx to;
149  rtx to_addr;
150  int autinc_to;
151  int explicit_inc_to;
152  int to_struct;
153  int len;
154  int offset;
155  int reverse;
156};
157
158extern struct obstack permanent_obstack;
159extern rtx arg_pointer_save_area;
160
161static rtx get_push_address	PROTO ((int));
162
163static rtx enqueue_insn		PROTO((rtx, rtx));
164static void init_queue		PROTO((void));
165static int move_by_pieces_ninsns PROTO((unsigned int, int));
166static void move_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
167				       struct move_by_pieces *));
168static void clear_by_pieces	PROTO((rtx, int, int));
169static void clear_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
170				       struct clear_by_pieces *));
171static int is_zeros_p		PROTO((tree));
172static int mostly_zeros_p	PROTO((tree));
173static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
174					   tree, tree, int));
175static void store_constructor	PROTO((tree, rtx, int));
176static rtx store_field		PROTO((rtx, int, int, enum machine_mode, tree,
177				       enum machine_mode, int, int,
178				       int, int));
179static enum memory_use_mode
180  get_memory_usage_from_modifier PROTO((enum expand_modifier));
181static tree save_noncopied_parts PROTO((tree, tree));
182static tree init_noncopied_parts PROTO((tree, tree));
183static int safe_from_p		PROTO((rtx, tree, int));
184static int fixed_type_p		PROTO((tree));
185static rtx var_rtx		PROTO((tree));
186static int get_pointer_alignment PROTO((tree, unsigned));
187static tree string_constant	PROTO((tree, tree *));
188static tree c_strlen		PROTO((tree));
189static rtx get_memory_rtx	PROTO((tree));
190static rtx expand_builtin	PROTO((tree, rtx, rtx,
191				       enum machine_mode, int));
192static int apply_args_size	PROTO((void));
193static int apply_result_size	PROTO((void));
194static rtx result_vector	PROTO((int, rtx));
195static rtx expand_builtin_setjmp PROTO((tree, rtx));
196static rtx expand_builtin_apply_args PROTO((void));
197static rtx expand_builtin_apply	PROTO((rtx, rtx, rtx));
198static void expand_builtin_return PROTO((rtx));
199static rtx expand_increment	PROTO((tree, int, int));
200static void preexpand_calls	PROTO((tree));
201static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
202static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
203static void do_jump_for_compare	PROTO((rtx, rtx, rtx));
204static rtx compare		PROTO((tree, enum rtx_code, enum rtx_code));
205static rtx do_store_flag	PROTO((tree, rtx, enum machine_mode, int));
206
207/* Record for each mode whether we can move a register directly to or
208   from an object of that mode in memory.  If we can't, we won't try
209   to use that mode directly when accessing a field of that mode.  */
210
211static char direct_load[NUM_MACHINE_MODES];
212static char direct_store[NUM_MACHINE_MODES];
213
214/* If a memory-to-memory move would take MOVE_RATIO or more simple
215   move-instruction sequences, we will do a movstr or libcall instead.  */
216
217#ifndef MOVE_RATIO
218#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
219#define MOVE_RATIO 2
220#else
221/* If we are optimizing for space (-Os), cut down the default move ratio */
222#define MOVE_RATIO (optimize_size ? 3 : 15)
223#endif
224#endif
225
226/* This macro is used to determine whether move_by_pieces should be called
227   to perform a structure copy. */
228#ifndef MOVE_BY_PIECES_P
229#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns        \
230                                       (SIZE, ALIGN) < MOVE_RATIO)
231#endif
232
233/* This array records the insn_code of insns to perform block moves.  */
234enum insn_code movstr_optab[NUM_MACHINE_MODES];
235
236/* This array records the insn_code of insns to perform block clears.  */
237enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238
239/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
240
241#ifndef SLOW_UNALIGNED_ACCESS
242#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
243#endif
244
245/* Register mappings for target machines without register windows.  */
246#ifndef INCOMING_REGNO
247#define INCOMING_REGNO(OUT) (OUT)
248#endif
249#ifndef OUTGOING_REGNO
250#define OUTGOING_REGNO(IN) (IN)
251#endif
252
253/* This is run once per compilation to set up which modes can be used
254   directly in memory and to initialize the block move optab.  */
255
256void
257init_expr_once ()
258{
259  rtx insn, pat;
260  enum machine_mode mode;
261  int num_clobbers;
262  rtx mem, mem1;
263  char *free_point;
264
265  start_sequence ();
266
267  /* Since we are on the permanent obstack, we must be sure we save this
268     spot AFTER we call start_sequence, since it will reuse the rtl it
269     makes.  */
270  free_point = (char *) oballoc (0);
271
272  /* Try indexing by frame ptr and try by stack ptr.
273     It is known that on the Convex the stack ptr isn't a valid index.
274     With luck, one or the other is valid on any machine.  */
275  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
276  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
277
278  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
279  pat = PATTERN (insn);
280
281  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
282       mode = (enum machine_mode) ((int) mode + 1))
283    {
284      int regno;
285      rtx reg;
286
287      direct_load[(int) mode] = direct_store[(int) mode] = 0;
288      PUT_MODE (mem, mode);
289      PUT_MODE (mem1, mode);
290
291      /* See if there is some register that can be used in this mode and
292	 directly loaded or stored from memory.  */
293
294      if (mode != VOIDmode && mode != BLKmode)
295	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
296	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
297	     regno++)
298	  {
299	    if (! HARD_REGNO_MODE_OK (regno, mode))
300	      continue;
301
302	    reg = gen_rtx_REG (mode, regno);
303
304	    SET_SRC (pat) = mem;
305	    SET_DEST (pat) = reg;
306	    if (recog (pat, insn, &num_clobbers) >= 0)
307	      direct_load[(int) mode] = 1;
308
309	    SET_SRC (pat) = mem1;
310	    SET_DEST (pat) = reg;
311	    if (recog (pat, insn, &num_clobbers) >= 0)
312	      direct_load[(int) mode] = 1;
313
314	    SET_SRC (pat) = reg;
315	    SET_DEST (pat) = mem;
316	    if (recog (pat, insn, &num_clobbers) >= 0)
317	      direct_store[(int) mode] = 1;
318
319	    SET_SRC (pat) = reg;
320	    SET_DEST (pat) = mem1;
321	    if (recog (pat, insn, &num_clobbers) >= 0)
322	      direct_store[(int) mode] = 1;
323	  }
324    }
325
326  end_sequence ();
327  obfree (free_point);
328}
329
330/* This is run at the start of compiling a function.  */
331
332void
333init_expr ()
334{
335  init_queue ();
336
337  pending_stack_adjust = 0;
338  inhibit_defer_pop = 0;
339  saveregs_value = 0;
340  apply_args_value = 0;
341  forced_labels = 0;
342}
343
344/* Save all variables describing the current status into the structure *P.
345   This is used before starting a nested function.  */
346
347void
348save_expr_status (p)
349     struct function *p;
350{
351  p->pending_chain = pending_chain;
352  p->pending_stack_adjust = pending_stack_adjust;
353  p->inhibit_defer_pop = inhibit_defer_pop;
354  p->saveregs_value = saveregs_value;
355  p->apply_args_value = apply_args_value;
356  p->forced_labels = forced_labels;
357
358  pending_chain = NULL_RTX;
359  pending_stack_adjust = 0;
360  inhibit_defer_pop = 0;
361  saveregs_value = 0;
362  apply_args_value = 0;
363  forced_labels = 0;
364}
365
366/* Restore all variables describing the current status from the structure *P.
367   This is used after a nested function.  */
368
369void
370restore_expr_status (p)
371     struct function *p;
372{
373  pending_chain = p->pending_chain;
374  pending_stack_adjust = p->pending_stack_adjust;
375  inhibit_defer_pop = p->inhibit_defer_pop;
376  saveregs_value = p->saveregs_value;
377  apply_args_value = p->apply_args_value;
378  forced_labels = p->forced_labels;
379}
380
381/* Manage the queue of increment instructions to be output
382   for POSTINCREMENT_EXPR expressions, etc.  */
383
384/* Queue up to increment (or change) VAR later.  BODY says how:
385   BODY should be the same thing you would pass to emit_insn
386   to increment right away.  It will go to emit_insn later on.
387
388   The value is a QUEUED expression to be used in place of VAR
389   where you want to guarantee the pre-incrementation value of VAR.  */
390
391static rtx
392enqueue_insn (var, body)
393     rtx var, body;
394{
395  pending_chain = gen_rtx_QUEUED (GET_MODE (var),
396				  var, NULL_RTX, NULL_RTX, body,
397				  pending_chain);
398  return pending_chain;
399}
400
401/* Use protect_from_queue to convert a QUEUED expression
402   into something that you can put immediately into an instruction.
403   If the queued incrementation has not happened yet,
404   protect_from_queue returns the variable itself.
405   If the incrementation has happened, protect_from_queue returns a temp
406   that contains a copy of the old value of the variable.
407
408   Any time an rtx which might possibly be a QUEUED is to be put
409   into an instruction, it must be passed through protect_from_queue first.
410   QUEUED expressions are not meaningful in instructions.
411
412   Do not pass a value through protect_from_queue and then hold
413   on to it for a while before putting it in an instruction!
414   If the queue is flushed in between, incorrect code will result.  */
415
416rtx
417protect_from_queue (x, modify)
418     register rtx x;
419     int modify;
420{
421  register RTX_CODE code = GET_CODE (x);
422
423#if 0  /* A QUEUED can hang around after the queue is forced out.  */
424  /* Shortcut for most common case.  */
425  if (pending_chain == 0)
426    return x;
427#endif
428
429  if (code != QUEUED)
430    {
431      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
432	 use of autoincrement.  Make a copy of the contents of the memory
433	 location rather than a copy of the address, but not if the value is
434	 of mode BLKmode.  Don't modify X in place since it might be
435	 shared.  */
436      if (code == MEM && GET_MODE (x) != BLKmode
437	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
438	{
439	  register rtx y = XEXP (x, 0);
440	  register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
441
442	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
443	  MEM_COPY_ATTRIBUTES (new, x);
444	  MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
445
446	  if (QUEUED_INSN (y))
447	    {
448	      register rtx temp = gen_reg_rtx (GET_MODE (new));
449	      emit_insn_before (gen_move_insn (temp, new),
450				QUEUED_INSN (y));
451	      return temp;
452	    }
453	  /* Copy the address into a pseudo, so that the returned value
454	     remains correct across calls to emit_queue.  */
455	  XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
456	  return new;
457	}
458      /* Otherwise, recursively protect the subexpressions of all
459	 the kinds of rtx's that can contain a QUEUED.  */
460      if (code == MEM)
461	{
462	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
463	  if (tem != XEXP (x, 0))
464	    {
465	      x = copy_rtx (x);
466	      XEXP (x, 0) = tem;
467	    }
468	}
469      else if (code == PLUS || code == MULT)
470	{
471	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
472	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
473	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
474	    {
475	      x = copy_rtx (x);
476	      XEXP (x, 0) = new0;
477	      XEXP (x, 1) = new1;
478	    }
479	}
480      return x;
481    }
482  /* If the increment has not happened, use the variable itself.  Copy it
483     into a new pseudo so that the value remains correct across calls to
484     emit_queue.  */
485  if (QUEUED_INSN (x) == 0)
486    return copy_to_reg (QUEUED_VAR (x));
487  /* If the increment has happened and a pre-increment copy exists,
488     use that copy.  */
489  if (QUEUED_COPY (x) != 0)
490    return QUEUED_COPY (x);
491  /* The increment has happened but we haven't set up a pre-increment copy.
492     Set one up now, and use it.  */
493  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
494  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
495		    QUEUED_INSN (x));
496  return QUEUED_COPY (x);
497}
498
499/* Return nonzero if X contains a QUEUED expression:
500   if it contains anything that will be altered by a queued increment.
501   We handle only combinations of MEM, PLUS, MINUS and MULT operators
502   since memory addresses generally contain only those.  */
503
504int
505queued_subexp_p (x)
506     rtx x;
507{
508  register enum rtx_code code = GET_CODE (x);
509  switch (code)
510    {
511    case QUEUED:
512      return 1;
513    case MEM:
514      return queued_subexp_p (XEXP (x, 0));
515    case MULT:
516    case PLUS:
517    case MINUS:
518      return (queued_subexp_p (XEXP (x, 0))
519	      || queued_subexp_p (XEXP (x, 1)));
520    default:
521      return 0;
522    }
523}
524
525/* Perform all the pending incrementations.  */
526
527void
528emit_queue ()
529{
530  register rtx p;
531  while ((p = pending_chain))
532    {
533      rtx body = QUEUED_BODY (p);
534
535      if (GET_CODE (body) == SEQUENCE)
536	{
537	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
538	  emit_insn (QUEUED_BODY (p));
539	}
540      else
541	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
542      pending_chain = QUEUED_NEXT (p);
543    }
544}
545
546static void
547init_queue ()
548{
549  if (pending_chain)
550    abort ();
551}
552
553/* Copy data from FROM to TO, where the machine modes are not the same.
554   Both modes may be integer, or both may be floating.
555   UNSIGNEDP should be nonzero if FROM is an unsigned type.
556   This causes zero-extension instead of sign-extension.  */
557
558void
559convert_move (to, from, unsignedp)
560     register rtx to, from;
561     int unsignedp;
562{
563  enum machine_mode to_mode = GET_MODE (to);
564  enum machine_mode from_mode = GET_MODE (from);
565  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
566  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
567  enum insn_code code;
568  rtx libcall;
569
570  /* rtx code for making an equivalent value.  */
571  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
572
573  to = protect_from_queue (to, 1);
574  from = protect_from_queue (from, 0);
575
576  if (to_real != from_real)
577    abort ();
578
579  /* If FROM is a SUBREG that indicates that we have already done at least
580     the required extension, strip it.  We don't handle such SUBREGs as
581     TO here.  */
582
583  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
584      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
585	  >= GET_MODE_SIZE (to_mode))
586      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
587    from = gen_lowpart (to_mode, from), from_mode = to_mode;
588
589  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
590    abort ();
591
592  if (to_mode == from_mode
593      || (from_mode == VOIDmode && CONSTANT_P (from)))
594    {
595      emit_move_insn (to, from);
596      return;
597    }
598
599  if (to_real)
600    {
601      rtx value;
602
603      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
604	{
605	  /* Try converting directly if the insn is supported.  */
606	  if ((code = can_extend_p (to_mode, from_mode, 0))
607	      != CODE_FOR_nothing)
608	    {
609	      emit_unop_insn (code, to, from, UNKNOWN);
610	      return;
611	    }
612	}
613
614#ifdef HAVE_trunchfqf2
615      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
616	{
617	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
618	  return;
619	}
620#endif
621#ifdef HAVE_trunctqfqf2
622      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
623	{
624	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
625	  return;
626	}
627#endif
628#ifdef HAVE_truncsfqf2
629      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
630	{
631	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
632	  return;
633	}
634#endif
635#ifdef HAVE_truncdfqf2
636      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
637	{
638	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
639	  return;
640	}
641#endif
642#ifdef HAVE_truncxfqf2
643      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
644	{
645	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
646	  return;
647	}
648#endif
649#ifdef HAVE_trunctfqf2
650      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
651	{
652	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
653	  return;
654	}
655#endif
656
657#ifdef HAVE_trunctqfhf2
658      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
659	{
660	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
661	  return;
662	}
663#endif
664#ifdef HAVE_truncsfhf2
665      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
666	{
667	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
668	  return;
669	}
670#endif
671#ifdef HAVE_truncdfhf2
672      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
673	{
674	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
675	  return;
676	}
677#endif
678#ifdef HAVE_truncxfhf2
679      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
680	{
681	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
682	  return;
683	}
684#endif
685#ifdef HAVE_trunctfhf2
686      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
687	{
688	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
689	  return;
690	}
691#endif
692
693#ifdef HAVE_truncsftqf2
694      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
695	{
696	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
697	  return;
698	}
699#endif
700#ifdef HAVE_truncdftqf2
701      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
702	{
703	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
704	  return;
705	}
706#endif
707#ifdef HAVE_truncxftqf2
708      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
709	{
710	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
711	  return;
712	}
713#endif
714#ifdef HAVE_trunctftqf2
715      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
716	{
717	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
718	  return;
719	}
720#endif
721
722#ifdef HAVE_truncdfsf2
723      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
724	{
725	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
726	  return;
727	}
728#endif
729#ifdef HAVE_truncxfsf2
730      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
731	{
732	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
733	  return;
734	}
735#endif
736#ifdef HAVE_trunctfsf2
737      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
738	{
739	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
740	  return;
741	}
742#endif
743#ifdef HAVE_truncxfdf2
744      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
745	{
746	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
747	  return;
748	}
749#endif
750#ifdef HAVE_trunctfdf2
751      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
752	{
753	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
754	  return;
755	}
756#endif
757
758      libcall = (rtx) 0;
759      switch (from_mode)
760	{
761	case SFmode:
762	  switch (to_mode)
763	    {
764	    case DFmode:
765	      libcall = extendsfdf2_libfunc;
766	      break;
767
768	    case XFmode:
769	      libcall = extendsfxf2_libfunc;
770	      break;
771
772	    case TFmode:
773	      libcall = extendsftf2_libfunc;
774	      break;
775
776	    default:
777	      break;
778	    }
779	  break;
780
781	case DFmode:
782	  switch (to_mode)
783	    {
784	    case SFmode:
785	      libcall = truncdfsf2_libfunc;
786	      break;
787
788	    case XFmode:
789	      libcall = extenddfxf2_libfunc;
790	      break;
791
792	    case TFmode:
793	      libcall = extenddftf2_libfunc;
794	      break;
795
796	    default:
797	      break;
798	    }
799	  break;
800
801	case XFmode:
802	  switch (to_mode)
803	    {
804	    case SFmode:
805	      libcall = truncxfsf2_libfunc;
806	      break;
807
808	    case DFmode:
809	      libcall = truncxfdf2_libfunc;
810	      break;
811
812	    default:
813	      break;
814	    }
815	  break;
816
817	case TFmode:
818	  switch (to_mode)
819	    {
820	    case SFmode:
821	      libcall = trunctfsf2_libfunc;
822	      break;
823
824	    case DFmode:
825	      libcall = trunctfdf2_libfunc;
826	      break;
827
828	    default:
829	      break;
830	    }
831	  break;
832
833	default:
834	  break;
835	}
836
837      if (libcall == (rtx) 0)
838	/* This conversion is not implemented yet.  */
839	abort ();
840
841      value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
842				       1, from, from_mode);
843      emit_move_insn (to, value);
844      return;
845    }
846
847  /* Now both modes are integers.  */
848
849  /* Handle expanding beyond a word.  */
850  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
851      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
852    {
853      rtx insns;
854      rtx lowpart;
855      rtx fill_value;
856      rtx lowfrom;
857      int i;
858      enum machine_mode lowpart_mode;
859      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
860
861      /* Try converting directly if the insn is supported.  */
862      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
863	  != CODE_FOR_nothing)
864	{
865	  /* If FROM is a SUBREG, put it into a register.  Do this
866	     so that we always generate the same set of insns for
867	     better cse'ing; if an intermediate assignment occurred,
868	     we won't be doing the operation directly on the SUBREG.  */
869	  if (optimize > 0 && GET_CODE (from) == SUBREG)
870	    from = force_reg (from_mode, from);
871	  emit_unop_insn (code, to, from, equiv_code);
872	  return;
873	}
874      /* Next, try converting via full word.  */
875      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
876	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
877		   != CODE_FOR_nothing))
878	{
879	  if (GET_CODE (to) == REG)
880	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
881	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
882	  emit_unop_insn (code, to,
883			  gen_lowpart (word_mode, to), equiv_code);
884	  return;
885	}
886
887      /* No special multiword conversion insn; do it by hand.  */
888      start_sequence ();
889
890      /* Since we will turn this into a no conflict block, we must ensure
891	 that the source does not overlap the target.  */
892
893      if (reg_overlap_mentioned_p (to, from))
894	from = force_reg (from_mode, from);
895
896      /* Get a copy of FROM widened to a word, if necessary.  */
897      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
898	lowpart_mode = word_mode;
899      else
900	lowpart_mode = from_mode;
901
902      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
903
904      lowpart = gen_lowpart (lowpart_mode, to);
905      emit_move_insn (lowpart, lowfrom);
906
907      /* Compute the value to put in each remaining word.  */
908      if (unsignedp)
909	fill_value = const0_rtx;
910      else
911	{
912#ifdef HAVE_slt
913	  if (HAVE_slt
914	      && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
915	      && STORE_FLAG_VALUE == -1)
916	    {
917	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
918			     lowpart_mode, 0, 0);
919	      fill_value = gen_reg_rtx (word_mode);
920	      emit_insn (gen_slt (fill_value));
921	    }
922	  else
923#endif
924	    {
925	      fill_value
926		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
927				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
928				NULL_RTX, 0);
929	      fill_value = convert_to_mode (word_mode, fill_value, 1);
930	    }
931	}
932
933      /* Fill the remaining words.  */
934      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
935	{
936	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
937	  rtx subword = operand_subword (to, index, 1, to_mode);
938
939	  if (subword == 0)
940	    abort ();
941
942	  if (fill_value != subword)
943	    emit_move_insn (subword, fill_value);
944	}
945
946      insns = get_insns ();
947      end_sequence ();
948
949      emit_no_conflict_block (insns, to, from, NULL_RTX,
950			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
951      return;
952    }
953
954  /* Truncating multi-word to a word or less.  */
955  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
956      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
957    {
958      if (!((GET_CODE (from) == MEM
959	     && ! MEM_VOLATILE_P (from)
960	     && direct_load[(int) to_mode]
961	     && ! mode_dependent_address_p (XEXP (from, 0)))
962	    || GET_CODE (from) == REG
963	    || GET_CODE (from) == SUBREG))
964	from = force_reg (from_mode, from);
965      convert_move (to, gen_lowpart (word_mode, from), 0);
966      return;
967    }
968
969  /* Handle pointer conversion */			/* SPEE 900220 */
970  if (to_mode == PQImode)
971    {
972      if (from_mode != QImode)
973	from = convert_to_mode (QImode, from, unsignedp);
974
975#ifdef HAVE_truncqipqi2
976      if (HAVE_truncqipqi2)
977	{
978	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
979	  return;
980	}
981#endif /* HAVE_truncqipqi2 */
982      abort ();
983    }
984
985  if (from_mode == PQImode)
986    {
987      if (to_mode != QImode)
988	{
989	  from = convert_to_mode (QImode, from, unsignedp);
990	  from_mode = QImode;
991	}
992      else
993	{
994#ifdef HAVE_extendpqiqi2
995	  if (HAVE_extendpqiqi2)
996	    {
997	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
998	      return;
999	    }
1000#endif /* HAVE_extendpqiqi2 */
1001	  abort ();
1002	}
1003    }
1004
1005  if (to_mode == PSImode)
1006    {
1007      if (from_mode != SImode)
1008	from = convert_to_mode (SImode, from, unsignedp);
1009
1010#ifdef HAVE_truncsipsi2
1011      if (HAVE_truncsipsi2)
1012	{
1013	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1014	  return;
1015	}
1016#endif /* HAVE_truncsipsi2 */
1017      abort ();
1018    }
1019
1020  if (from_mode == PSImode)
1021    {
1022      if (to_mode != SImode)
1023	{
1024	  from = convert_to_mode (SImode, from, unsignedp);
1025	  from_mode = SImode;
1026	}
1027      else
1028	{
1029#ifdef HAVE_extendpsisi2
1030	  if (HAVE_extendpsisi2)
1031	    {
1032	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1033	      return;
1034	    }
1035#endif /* HAVE_extendpsisi2 */
1036	  abort ();
1037	}
1038    }
1039
1040  if (to_mode == PDImode)
1041    {
1042      if (from_mode != DImode)
1043	from = convert_to_mode (DImode, from, unsignedp);
1044
1045#ifdef HAVE_truncdipdi2
1046      if (HAVE_truncdipdi2)
1047	{
1048	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1049	  return;
1050	}
1051#endif /* HAVE_truncdipdi2 */
1052      abort ();
1053    }
1054
1055  if (from_mode == PDImode)
1056    {
1057      if (to_mode != DImode)
1058	{
1059	  from = convert_to_mode (DImode, from, unsignedp);
1060	  from_mode = DImode;
1061	}
1062      else
1063	{
1064#ifdef HAVE_extendpdidi2
1065	  if (HAVE_extendpdidi2)
1066	    {
1067	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1068	      return;
1069	    }
1070#endif /* HAVE_extendpdidi2 */
1071	  abort ();
1072	}
1073    }
1074
1075  /* Now follow all the conversions between integers
1076     no more than a word long.  */
1077
1078  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1079  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1080      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1081				GET_MODE_BITSIZE (from_mode)))
1082    {
1083      if (!((GET_CODE (from) == MEM
1084	     && ! MEM_VOLATILE_P (from)
1085	     && direct_load[(int) to_mode]
1086	     && ! mode_dependent_address_p (XEXP (from, 0)))
1087	    || GET_CODE (from) == REG
1088	    || GET_CODE (from) == SUBREG))
1089	from = force_reg (from_mode, from);
1090      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1091	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1092	from = copy_to_reg (from);
1093      emit_move_insn (to, gen_lowpart (to_mode, from));
1094      return;
1095    }
1096
1097  /* Handle extension.  */
1098  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1099    {
1100      /* Convert directly if that works.  */
1101      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1102	  != CODE_FOR_nothing)
1103	{
1104	  emit_unop_insn (code, to, from, equiv_code);
1105	  return;
1106	}
1107      else
1108	{
1109	  enum machine_mode intermediate;
1110	  rtx tmp;
1111	  tree shift_amount;
1112
1113	  /* Search for a mode to convert via.  */
1114	  for (intermediate = from_mode; intermediate != VOIDmode;
1115	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1116	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1117		  != CODE_FOR_nothing)
1118		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1119		     && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1120		&& (can_extend_p (intermediate, from_mode, unsignedp)
1121		    != CODE_FOR_nothing))
1122	      {
1123		convert_move (to, convert_to_mode (intermediate, from,
1124						   unsignedp), unsignedp);
1125		return;
1126	      }
1127
1128	  /* No suitable intermediate mode.
1129	     Generate what we need with	shifts. */
1130	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1131				      - GET_MODE_BITSIZE (from_mode), 0);
1132	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1133	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1134			      to, unsignedp);
1135	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp,  shift_amount,
1136			      to, unsignedp);
1137	  if (tmp != to)
1138	    emit_move_insn (to, tmp);
1139	  return;
1140	}
1141    }
1142
1143  /* Support special truncate insns for certain modes.  */
1144
1145  if (from_mode == DImode && to_mode == SImode)
1146    {
1147#ifdef HAVE_truncdisi2
1148      if (HAVE_truncdisi2)
1149	{
1150	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1151	  return;
1152	}
1153#endif
1154      convert_move (to, force_reg (from_mode, from), unsignedp);
1155      return;
1156    }
1157
1158  if (from_mode == DImode && to_mode == HImode)
1159    {
1160#ifdef HAVE_truncdihi2
1161      if (HAVE_truncdihi2)
1162	{
1163	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1164	  return;
1165	}
1166#endif
1167      convert_move (to, force_reg (from_mode, from), unsignedp);
1168      return;
1169    }
1170
1171  if (from_mode == DImode && to_mode == QImode)
1172    {
1173#ifdef HAVE_truncdiqi2
1174      if (HAVE_truncdiqi2)
1175	{
1176	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1177	  return;
1178	}
1179#endif
1180      convert_move (to, force_reg (from_mode, from), unsignedp);
1181      return;
1182    }
1183
1184  if (from_mode == SImode && to_mode == HImode)
1185    {
1186#ifdef HAVE_truncsihi2
1187      if (HAVE_truncsihi2)
1188	{
1189	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1190	  return;
1191	}
1192#endif
1193      convert_move (to, force_reg (from_mode, from), unsignedp);
1194      return;
1195    }
1196
1197  if (from_mode == SImode && to_mode == QImode)
1198    {
1199#ifdef HAVE_truncsiqi2
1200      if (HAVE_truncsiqi2)
1201	{
1202	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1203	  return;
1204	}
1205#endif
1206      convert_move (to, force_reg (from_mode, from), unsignedp);
1207      return;
1208    }
1209
1210  if (from_mode == HImode && to_mode == QImode)
1211    {
1212#ifdef HAVE_trunchiqi2
1213      if (HAVE_trunchiqi2)
1214	{
1215	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1216	  return;
1217	}
1218#endif
1219      convert_move (to, force_reg (from_mode, from), unsignedp);
1220      return;
1221    }
1222
1223  if (from_mode == TImode && to_mode == DImode)
1224    {
1225#ifdef HAVE_trunctidi2
1226      if (HAVE_trunctidi2)
1227	{
1228	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1229	  return;
1230	}
1231#endif
1232      convert_move (to, force_reg (from_mode, from), unsignedp);
1233      return;
1234    }
1235
1236  if (from_mode == TImode && to_mode == SImode)
1237    {
1238#ifdef HAVE_trunctisi2
1239      if (HAVE_trunctisi2)
1240	{
1241	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1242	  return;
1243	}
1244#endif
1245      convert_move (to, force_reg (from_mode, from), unsignedp);
1246      return;
1247    }
1248
1249  if (from_mode == TImode && to_mode == HImode)
1250    {
1251#ifdef HAVE_trunctihi2
1252      if (HAVE_trunctihi2)
1253	{
1254	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1255	  return;
1256	}
1257#endif
1258      convert_move (to, force_reg (from_mode, from), unsignedp);
1259      return;
1260    }
1261
1262  if (from_mode == TImode && to_mode == QImode)
1263    {
1264#ifdef HAVE_trunctiqi2
1265      if (HAVE_trunctiqi2)
1266	{
1267	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1268	  return;
1269	}
1270#endif
1271      convert_move (to, force_reg (from_mode, from), unsignedp);
1272      return;
1273    }
1274
1275  /* Handle truncation of volatile memrefs, and so on;
1276     the things that couldn't be truncated directly,
1277     and for which there was no special instruction.  */
1278  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1279    {
1280      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1281      emit_move_insn (to, temp);
1282      return;
1283    }
1284
1285  /* Mode combination is not recognized.  */
1286  abort ();
1287}
1288
1289/* Return an rtx for a value that would result
1290   from converting X to mode MODE.
1291   Both X and MODE may be floating, or both integer.
1292   UNSIGNEDP is nonzero if X is an unsigned value.
1293   This can be done by referring to a part of X in place
1294   or by copying to a new temporary with conversion.
1295
1296   This function *must not* call protect_from_queue
1297   except when putting X into an insn (in which case convert_move does it).  */
1298
1299rtx
1300convert_to_mode (mode, x, unsignedp)
1301     enum machine_mode mode;
1302     rtx x;
1303     int unsignedp;
1304{
1305  return convert_modes (mode, VOIDmode, x, unsignedp);
1306}
1307
1308/* Return an rtx for a value that would result
1309   from converting X from mode OLDMODE to mode MODE.
1310   Both modes may be floating, or both integer.
1311   UNSIGNEDP is nonzero if X is an unsigned value.
1312
1313   This can be done by referring to a part of X in place
1314   or by copying to a new temporary with conversion.
1315
1316   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1317
1318   This function *must not* call protect_from_queue
1319   except when putting X into an insn (in which case convert_move does it).  */
1320
1321rtx
1322convert_modes (mode, oldmode, x, unsignedp)
1323     enum machine_mode mode, oldmode;
1324     rtx x;
1325     int unsignedp;
1326{
1327  register rtx temp;
1328
1329  /* If FROM is a SUBREG that indicates that we have already done at least
1330     the required extension, strip it.  */
1331
1332  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1333      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1334      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1335    x = gen_lowpart (mode, x);
1336
1337  if (GET_MODE (x) != VOIDmode)
1338    oldmode = GET_MODE (x);
1339
1340  if (mode == oldmode)
1341    return x;
1342
1343  /* There is one case that we must handle specially: If we are converting
1344     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1345     we are to interpret the constant as unsigned, gen_lowpart will do
1346     the wrong if the constant appears negative.  What we want to do is
1347     make the high-order word of the constant zero, not all ones.  */
1348
1349  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1350      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1351      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1352    {
1353      HOST_WIDE_INT val = INTVAL (x);
1354
1355      if (oldmode != VOIDmode
1356	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1357	{
1358	  int width = GET_MODE_BITSIZE (oldmode);
1359
1360	  /* We need to zero extend VAL.  */
1361	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1362	}
1363
1364      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1365    }
1366
1367  /* We can do this with a gen_lowpart if both desired and current modes
1368     are integer, and this is either a constant integer, a register, or a
1369     non-volatile MEM.  Except for the constant case where MODE is no
1370     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1371
1372  if ((GET_CODE (x) == CONST_INT
1373       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1374      || (GET_MODE_CLASS (mode) == MODE_INT
1375	  && GET_MODE_CLASS (oldmode) == MODE_INT
1376	  && (GET_CODE (x) == CONST_DOUBLE
1377	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1378		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1379		       && direct_load[(int) mode])
1380		      || (GET_CODE (x) == REG
1381			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1382						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1383    {
1384      /* ?? If we don't know OLDMODE, we have to assume here that
1385	 X does not need sign- or zero-extension.   This may not be
1386	 the case, but it's the best we can do.  */
1387      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1388	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1389	{
1390	  HOST_WIDE_INT val = INTVAL (x);
1391	  int width = GET_MODE_BITSIZE (oldmode);
1392
1393	  /* We must sign or zero-extend in this case.  Start by
1394	     zero-extending, then sign extend if we need to.  */
1395	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1396	  if (! unsignedp
1397	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1398	    val |= (HOST_WIDE_INT) (-1) << width;
1399
1400	  return GEN_INT (val);
1401	}
1402
1403      return gen_lowpart (mode, x);
1404    }
1405
1406  temp = gen_reg_rtx (mode);
1407  convert_move (temp, x, unsignedp);
1408  return temp;
1409}
1410
1411
1412/* This macro is used to determine what the largest unit size that
1413   move_by_pieces can use is. */
1414
1415/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1416   move efficiently, as opposed to  MOVE_MAX which is the maximum
1417   number of bhytes we can move with a single instruction. */
1418
1419#ifndef MOVE_MAX_PIECES
1420#define MOVE_MAX_PIECES   MOVE_MAX
1421#endif
1422
1423/* Generate several move instructions to copy LEN bytes
1424   from block FROM to block TO.  (These are MEM rtx's with BLKmode).
1425   The caller must pass FROM and TO
1426    through protect_from_queue before calling.
1427   ALIGN (in bytes) is maximum alignment we can assume.  */
1428
1429void
1430move_by_pieces (to, from, len, align)
1431     rtx to, from;
1432     int len, align;
1433{
1434  struct move_by_pieces data;
1435  rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1436  int max_size = MOVE_MAX_PIECES + 1;
1437  enum machine_mode mode = VOIDmode, tmode;
1438  enum insn_code icode;
1439
1440  data.offset = 0;
1441  data.to_addr = to_addr;
1442  data.from_addr = from_addr;
1443  data.to = to;
1444  data.from = from;
1445  data.autinc_to
1446    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1447       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1448  data.autinc_from
1449    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1450       || GET_CODE (from_addr) == POST_INC
1451       || GET_CODE (from_addr) == POST_DEC);
1452
1453  data.explicit_inc_from = 0;
1454  data.explicit_inc_to = 0;
1455  data.reverse
1456    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1457  if (data.reverse) data.offset = len;
1458  data.len = len;
1459
1460  data.to_struct = MEM_IN_STRUCT_P (to);
1461  data.from_struct = MEM_IN_STRUCT_P (from);
1462
1463  /* If copying requires more than two move insns,
1464     copy addresses to registers (to make displacements shorter)
1465     and use post-increment if available.  */
1466  if (!(data.autinc_from && data.autinc_to)
1467      && move_by_pieces_ninsns (len, align) > 2)
1468    {
1469      /* Find the mode of the largest move... */
1470      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1471	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1472	if (GET_MODE_SIZE (tmode) < max_size)
1473	  mode = tmode;
1474
1475      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1476	{
1477	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1478	  data.autinc_from = 1;
1479	  data.explicit_inc_from = -1;
1480	}
1481      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1482	{
1483	  data.from_addr = copy_addr_to_reg (from_addr);
1484	  data.autinc_from = 1;
1485	  data.explicit_inc_from = 1;
1486	}
1487      if (!data.autinc_from && CONSTANT_P (from_addr))
1488	data.from_addr = copy_addr_to_reg (from_addr);
1489      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1490	{
1491	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1492	  data.autinc_to = 1;
1493	  data.explicit_inc_to = -1;
1494	}
1495      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1496	{
1497	  data.to_addr = copy_addr_to_reg (to_addr);
1498	  data.autinc_to = 1;
1499	  data.explicit_inc_to = 1;
1500	}
1501      if (!data.autinc_to && CONSTANT_P (to_addr))
1502	data.to_addr = copy_addr_to_reg (to_addr);
1503    }
1504
1505  if (! SLOW_UNALIGNED_ACCESS
1506      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1507    align = MOVE_MAX;
1508
1509  /* First move what we can in the largest integer mode, then go to
1510     successively smaller modes.  */
1511
1512  while (max_size > 1)
1513    {
1514      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1515	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1516	if (GET_MODE_SIZE (tmode) < max_size)
1517	  mode = tmode;
1518
1519      if (mode == VOIDmode)
1520	break;
1521
1522      icode = mov_optab->handlers[(int) mode].insn_code;
1523      if (icode != CODE_FOR_nothing
1524	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1525			   GET_MODE_SIZE (mode)))
1526	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1527
1528      max_size = GET_MODE_SIZE (mode);
1529    }
1530
1531  /* The code above should have handled everything.  */
1532  if (data.len > 0)
1533    abort ();
1534}
1535
1536/* Return number of insns required to move L bytes by pieces.
1537   ALIGN (in bytes) is maximum alignment we can assume.  */
1538
1539static int
1540move_by_pieces_ninsns (l, align)
1541     unsigned int l;
1542     int align;
1543{
1544  register int n_insns = 0;
1545  int max_size = MOVE_MAX + 1;
1546
1547  if (! SLOW_UNALIGNED_ACCESS
1548      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1549    align = MOVE_MAX;
1550
1551  while (max_size > 1)
1552    {
1553      enum machine_mode mode = VOIDmode, tmode;
1554      enum insn_code icode;
1555
1556      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1557	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1558	if (GET_MODE_SIZE (tmode) < max_size)
1559	  mode = tmode;
1560
1561      if (mode == VOIDmode)
1562	break;
1563
1564      icode = mov_optab->handlers[(int) mode].insn_code;
1565      if (icode != CODE_FOR_nothing
1566	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1567			   GET_MODE_SIZE (mode)))
1568	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1569
1570      max_size = GET_MODE_SIZE (mode);
1571    }
1572
1573  return n_insns;
1574}
1575
1576/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1577   with move instructions for mode MODE.  GENFUN is the gen_... function
1578   to make a move insn for that mode.  DATA has all the other info.  */
1579
1580static void
1581move_by_pieces_1 (genfun, mode, data)
1582     rtx (*genfun) PROTO ((rtx, ...));
1583     enum machine_mode mode;
1584     struct move_by_pieces *data;
1585{
1586  register int size = GET_MODE_SIZE (mode);
1587  register rtx to1, from1;
1588
1589  while (data->len >= size)
1590    {
1591      if (data->reverse) data->offset -= size;
1592
1593      to1 = (data->autinc_to
1594	     ? gen_rtx_MEM (mode, data->to_addr)
1595	     : copy_rtx (change_address (data->to, mode,
1596					 plus_constant (data->to_addr,
1597							data->offset))));
1598      MEM_IN_STRUCT_P (to1) = data->to_struct;
1599
1600      from1
1601	= (data->autinc_from
1602	   ? gen_rtx_MEM (mode, data->from_addr)
1603	   : copy_rtx (change_address (data->from, mode,
1604				       plus_constant (data->from_addr,
1605						      data->offset))));
1606      MEM_IN_STRUCT_P (from1) = data->from_struct;
1607
1608      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1609	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1610      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1611	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1612
1613      emit_insn ((*genfun) (to1, from1));
1614      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1615	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1616      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1617	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1618
1619      if (! data->reverse) data->offset += size;
1620
1621      data->len -= size;
1622    }
1623}
1624
1625/* Emit code to move a block Y to a block X.
1626   This may be done with string-move instructions,
1627   with multiple scalar move instructions, or with a library call.
1628
1629   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630   with mode BLKmode.
1631   SIZE is an rtx that says how long they are.
1632   ALIGN is the maximum alignment we can assume they have,
1633   measured in bytes.
1634
1635   Return the address of the new block, if memcpy is called and returns it,
1636   0 otherwise.  */
1637
1638rtx
1639emit_block_move (x, y, size, align)
1640     rtx x, y;
1641     rtx size;
1642     int align;
1643{
1644  rtx retval = 0;
1645#ifdef TARGET_MEM_FUNCTIONS
1646  static tree fn;
1647  tree call_expr, arg_list;
1648#endif
1649
1650  if (GET_MODE (x) != BLKmode)
1651    abort ();
1652
1653  if (GET_MODE (y) != BLKmode)
1654    abort ();
1655
1656  x = protect_from_queue (x, 1);
1657  y = protect_from_queue (y, 0);
1658  size = protect_from_queue (size, 0);
1659
1660  if (GET_CODE (x) != MEM)
1661    abort ();
1662  if (GET_CODE (y) != MEM)
1663    abort ();
1664  if (size == 0)
1665    abort ();
1666
1667  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1668    move_by_pieces (x, y, INTVAL (size), align);
1669  else
1670    {
1671      /* Try the most limited insn first, because there's no point
1672	 including more than one in the machine description unless
1673	 the more limited one has some advantage.  */
1674
1675      rtx opalign = GEN_INT (align);
1676      enum machine_mode mode;
1677
1678      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1679	   mode = GET_MODE_WIDER_MODE (mode))
1680	{
1681	  enum insn_code code = movstr_optab[(int) mode];
1682
1683	  if (code != CODE_FOR_nothing
1684	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1685		 here because if SIZE is less than the mode mask, as it is
1686		 returned by the macro, it will definitely be less than the
1687		 actual mode mask.  */
1688	      && ((GET_CODE (size) == CONST_INT
1689		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1690		       <= (GET_MODE_MASK (mode) >> 1)))
1691		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1692	      && (insn_operand_predicate[(int) code][0] == 0
1693		  || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1694	      && (insn_operand_predicate[(int) code][1] == 0
1695		  || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1696	      && (insn_operand_predicate[(int) code][3] == 0
1697		  || (*insn_operand_predicate[(int) code][3]) (opalign,
1698							       VOIDmode)))
1699	    {
1700	      rtx op2;
1701	      rtx last = get_last_insn ();
1702	      rtx pat;
1703
1704	      op2 = convert_to_mode (mode, size, 1);
1705	      if (insn_operand_predicate[(int) code][2] != 0
1706		  && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1707		op2 = copy_to_mode_reg (mode, op2);
1708
1709	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1710	      if (pat)
1711		{
1712		  emit_insn (pat);
1713		  return 0;
1714		}
1715	      else
1716		delete_insns_since (last);
1717	    }
1718	}
1719
1720      /* X, Y, or SIZE may have been passed through protect_from_queue.
1721
1722	 It is unsafe to save the value generated by protect_from_queue
1723	 and reuse it later.  Consider what happens if emit_queue is
1724	 called before the return value from protect_from_queue is used.
1725
1726	 Expansion of the CALL_EXPR below will call emit_queue before
1727	 we are finished emitting RTL for argument setup.  So if we are
1728	 not careful we could get the wrong value for an argument.
1729
1730	 To avoid this problem we go ahead and emit code to copy X, Y &
1731	 SIZE into new pseudos.  We can then place those new pseudos
1732	 into an RTL_EXPR and use them later, even after a call to
1733	 emit_queue.
1734
1735	 Note this is not strictly needed for library calls since they
1736	 do not call emit_queue before loading their arguments.  However,
1737	 we may need to have library calls call emit_queue in the future
1738	 since failing to do so could cause problems for targets which
1739	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1740      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1741      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1742
1743#ifdef TARGET_MEM_FUNCTIONS
1744      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1745#else
1746      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1747			      TREE_UNSIGNED (integer_type_node));
1748      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1749#endif
1750
1751#ifdef TARGET_MEM_FUNCTIONS
1752      /* It is incorrect to use the libcall calling conventions to call
1753	 memcpy in this context.
1754
1755	 This could be a user call to memcpy and the user may wish to
1756	 examine the return value from memcpy.
1757
1758	 For targets where libcalls and normal calls have different conventions
1759	 for returning pointers, we could end up generating incorrect code.
1760
1761	 So instead of using a libcall sequence we build up a suitable
1762	 CALL_EXPR and expand the call in the normal fashion.  */
1763      if (fn == NULL_TREE)
1764	{
1765	  tree fntype;
1766
1767	  /* This was copied from except.c, I don't know if all this is
1768	     necessary in this context or not.  */
1769	  fn = get_identifier ("memcpy");
1770	  push_obstacks_nochange ();
1771	  end_temporary_allocation ();
1772	  fntype = build_pointer_type (void_type_node);
1773	  fntype = build_function_type (fntype, NULL_TREE);
1774	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1775	  DECL_EXTERNAL (fn) = 1;
1776	  TREE_PUBLIC (fn) = 1;
1777	  DECL_ARTIFICIAL (fn) = 1;
1778	  make_decl_rtl (fn, NULL_PTR, 1);
1779	  assemble_external (fn);
1780	  pop_obstacks ();
1781	}
1782
1783      /* We need to make an argument list for the function call.
1784
1785	 memcpy has three arguments, the first two are void * addresses and
1786	 the last is a size_t byte count for the copy.  */
1787      arg_list
1788	= build_tree_list (NULL_TREE,
1789			   make_tree (build_pointer_type (void_type_node), x));
1790      TREE_CHAIN (arg_list)
1791	= build_tree_list (NULL_TREE,
1792			   make_tree (build_pointer_type (void_type_node), y));
1793      TREE_CHAIN (TREE_CHAIN (arg_list))
1794	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1795      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1796
1797      /* Now we have to build up the CALL_EXPR itself.  */
1798      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1799      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1800			 call_expr, arg_list, NULL_TREE);
1801      TREE_SIDE_EFFECTS (call_expr) = 1;
1802
1803      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1804#else
1805      emit_library_call (bcopy_libfunc, 0,
1806			 VOIDmode, 3, y, Pmode, x, Pmode,
1807			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1808					  TREE_UNSIGNED (integer_type_node)),
1809			 TYPE_MODE (integer_type_node));
1810#endif
1811    }
1812
1813  return retval;
1814}
1815
1816/* Copy all or part of a value X into registers starting at REGNO.
1817   The number of registers to be filled is NREGS.  */
1818
1819void
1820move_block_to_reg (regno, x, nregs, mode)
1821     int regno;
1822     rtx x;
1823     int nregs;
1824     enum machine_mode mode;
1825{
1826  int i;
1827#ifdef HAVE_load_multiple
1828  rtx pat;
1829  rtx last;
1830#endif
1831
1832  if (nregs == 0)
1833    return;
1834
1835  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1836    x = validize_mem (force_const_mem (mode, x));
1837
1838  /* See if the machine can do this with a load multiple insn.  */
1839#ifdef HAVE_load_multiple
1840  if (HAVE_load_multiple)
1841    {
1842      last = get_last_insn ();
1843      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1844			       GEN_INT (nregs));
1845      if (pat)
1846	{
1847	  emit_insn (pat);
1848	  return;
1849	}
1850      else
1851	delete_insns_since (last);
1852    }
1853#endif
1854
1855  for (i = 0; i < nregs; i++)
1856    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1857		    operand_subword_force (x, i, mode));
1858}
1859
1860/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1861   The number of registers to be filled is NREGS.  SIZE indicates the number
1862   of bytes in the object X.  */
1863
1864
1865void
1866move_block_from_reg (regno, x, nregs, size)
1867     int regno;
1868     rtx x;
1869     int nregs;
1870     int size;
1871{
1872  int i;
1873#ifdef HAVE_store_multiple
1874  rtx pat;
1875  rtx last;
1876#endif
1877  enum machine_mode mode;
1878
1879  /* If SIZE is that of a mode no bigger than a word, just use that
1880     mode's store operation.  */
1881  if (size <= UNITS_PER_WORD
1882      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1883    {
1884      emit_move_insn (change_address (x, mode, NULL),
1885		      gen_rtx_REG (mode, regno));
1886      return;
1887    }
1888
1889  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1890     to the left before storing to memory.  Note that the previous test
1891     doesn't handle all cases (e.g. SIZE == 3).  */
1892  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1893    {
1894      rtx tem = operand_subword (x, 0, 1, BLKmode);
1895      rtx shift;
1896
1897      if (tem == 0)
1898	abort ();
1899
1900      shift = expand_shift (LSHIFT_EXPR, word_mode,
1901			    gen_rtx_REG (word_mode, regno),
1902			    build_int_2 ((UNITS_PER_WORD - size)
1903					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1904      emit_move_insn (tem, shift);
1905      return;
1906    }
1907
1908  /* See if the machine can do this with a store multiple insn.  */
1909#ifdef HAVE_store_multiple
1910  if (HAVE_store_multiple)
1911    {
1912      last = get_last_insn ();
1913      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1914				GEN_INT (nregs));
1915      if (pat)
1916	{
1917	  emit_insn (pat);
1918	  return;
1919	}
1920      else
1921	delete_insns_since (last);
1922    }
1923#endif
1924
1925  for (i = 0; i < nregs; i++)
1926    {
1927      rtx tem = operand_subword (x, i, 1, BLKmode);
1928
1929      if (tem == 0)
1930	abort ();
1931
1932      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933    }
1934}
1935
1936/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1937   registers represented by a PARALLEL.  SSIZE represents the total size of
1938   block SRC in bytes, or -1 if not known.  ALIGN is the known alignment of
1939   SRC in bits.  */
1940/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1941   the balance will be in what would be the low-order memory addresses, i.e.
1942   left justified for big endian, right justified for little endian.  This
1943   happens to be true for the targets currently using this support.  If this
1944   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1945   would be needed.  */
1946
1947void
1948emit_group_load (dst, orig_src, ssize, align)
1949     rtx dst, orig_src;
1950     int align, ssize;
1951{
1952  rtx *tmps, src;
1953  int start, i;
1954
1955  if (GET_CODE (dst) != PARALLEL)
1956    abort ();
1957
1958  /* Check for a NULL entry, used to indicate that the parameter goes
1959     both on the stack and in registers.  */
1960  if (XEXP (XVECEXP (dst, 0, 0), 0))
1961    start = 0;
1962  else
1963    start = 1;
1964
1965  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1966
1967  /* If we won't be loading directly from memory, protect the real source
1968     from strange tricks we might play.  */
1969  src = orig_src;
1970  if (GET_CODE (src) != MEM)
1971    {
1972      src = gen_reg_rtx (GET_MODE (orig_src));
1973      emit_move_insn (src, orig_src);
1974    }
1975
1976  /* Process the pieces.  */
1977  for (i = start; i < XVECLEN (dst, 0); i++)
1978    {
1979      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1980      int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1981      int bytelen = GET_MODE_SIZE (mode);
1982      int shift = 0;
1983
1984      /* Handle trailing fragments that run over the size of the struct.  */
1985      if (ssize >= 0 && bytepos + bytelen > ssize)
1986	{
1987	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1988	  bytelen = ssize - bytepos;
1989	  if (bytelen <= 0)
1990	    abort();
1991	}
1992
1993      /* Optimize the access just a bit.  */
1994      if (GET_CODE (src) == MEM
1995	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1996	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1997	  && bytelen == GET_MODE_SIZE (mode))
1998	{
1999	  tmps[i] = gen_reg_rtx (mode);
2000	  emit_move_insn (tmps[i],
2001			  change_address (src, mode,
2002					  plus_constant (XEXP (src, 0),
2003							 bytepos)));
2004	}
2005      else
2006	{
2007	  tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
2008				       bytepos*BITS_PER_UNIT, 1, NULL_RTX,
2009				       mode, mode, align, ssize);
2010	}
2011
2012      if (BYTES_BIG_ENDIAN && shift)
2013	{
2014	  expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2015			tmps[i], 0, OPTAB_WIDEN);
2016	}
2017    }
2018  emit_queue();
2019
2020  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2021  for (i = start; i < XVECLEN (dst, 0); i++)
2022    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2023}
2024
2025/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2026   registers represented by a PARALLEL.  SSIZE represents the total size of
2027   block DST, or -1 if not known.  ALIGN is the known alignment of DST.  */
2028
2029void
2030emit_group_store (orig_dst, src, ssize, align)
2031     rtx orig_dst, src;
2032     int ssize, align;
2033{
2034  rtx *tmps, dst;
2035  int start, i;
2036
2037  if (GET_CODE (src) != PARALLEL)
2038    abort ();
2039
2040  /* Check for a NULL entry, used to indicate that the parameter goes
2041     both on the stack and in registers.  */
2042  if (XEXP (XVECEXP (src, 0, 0), 0))
2043    start = 0;
2044  else
2045    start = 1;
2046
2047  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2048
2049  /* Copy the (probable) hard regs into pseudos.  */
2050  for (i = start; i < XVECLEN (src, 0); i++)
2051    {
2052      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2053      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2054      emit_move_insn (tmps[i], reg);
2055    }
2056  emit_queue();
2057
2058  /* If we won't be storing directly into memory, protect the real destination
2059     from strange tricks we might play.  */
2060  dst = orig_dst;
2061  if (GET_CODE (dst) == PARALLEL)
2062    {
2063      rtx temp;
2064
2065      /* We can get a PARALLEL dst if there is a conditional expression in
2066	 a return statement.  In that case, the dst and src are the same,
2067	 so no action is necessary.  */
2068      if (rtx_equal_p (dst, src))
2069	return;
2070
2071      /* It is unclear if we can ever reach here, but we may as well handle
2072	 it.  Allocate a temporary, and split this into a store/load to/from
2073	 the temporary.  */
2074
2075      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2076      emit_group_store (temp, src, ssize, align);
2077      emit_group_load (dst, temp, ssize, align);
2078      return;
2079    }
2080  else if (GET_CODE (dst) != MEM)
2081    {
2082      dst = gen_reg_rtx (GET_MODE (orig_dst));
2083      /* Make life a bit easier for combine.  */
2084      emit_move_insn (dst, const0_rtx);
2085    }
2086  else if (! MEM_IN_STRUCT_P (dst))
2087    {
2088      /* store_bit_field requires that memory operations have
2089	 mem_in_struct_p set; we might not.  */
2090
2091      dst = copy_rtx (orig_dst);
2092      MEM_SET_IN_STRUCT_P (dst, 1);
2093    }
2094
2095  /* Process the pieces.  */
2096  for (i = start; i < XVECLEN (src, 0); i++)
2097    {
2098      int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2099      enum machine_mode mode = GET_MODE (tmps[i]);
2100      int bytelen = GET_MODE_SIZE (mode);
2101
2102      /* Handle trailing fragments that run over the size of the struct.  */
2103      if (ssize >= 0 && bytepos + bytelen > ssize)
2104	{
2105	  if (BYTES_BIG_ENDIAN)
2106	    {
2107	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2108	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2109			    tmps[i], 0, OPTAB_WIDEN);
2110	    }
2111	  bytelen = ssize - bytepos;
2112	}
2113
2114      /* Optimize the access just a bit.  */
2115      if (GET_CODE (dst) == MEM
2116	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2117	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2118	  && bytelen == GET_MODE_SIZE (mode))
2119	{
2120	  emit_move_insn (change_address (dst, mode,
2121					  plus_constant (XEXP (dst, 0),
2122							 bytepos)),
2123			  tmps[i]);
2124	}
2125      else
2126	{
2127	  store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2128			   mode, tmps[i], align, ssize);
2129	}
2130    }
2131  emit_queue();
2132
2133  /* Copy from the pseudo into the (probable) hard reg.  */
2134  if (GET_CODE (dst) == REG)
2135    emit_move_insn (orig_dst, dst);
2136}
2137
2138/* Generate code to copy a BLKmode object of TYPE out of a
2139   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2140   is null, a stack temporary is created.  TGTBLK is returned.
2141
2142   The primary purpose of this routine is to handle functions
2143   that return BLKmode structures in registers.  Some machines
2144   (the PA for example) want to return all small structures
2145   in registers regardless of the structure's alignment.
2146  */
2147
2148rtx
2149copy_blkmode_from_reg(tgtblk,srcreg,type)
2150     rtx tgtblk;
2151     rtx srcreg;
2152     tree type;
2153{
2154      int bytes = int_size_in_bytes (type);
2155      rtx src = NULL, dst = NULL;
2156      int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2157      int bitpos, xbitpos, big_endian_correction = 0;
2158
2159      if (tgtblk == 0)
2160	{
2161	  tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2162	  MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2163	  preserve_temp_slots (tgtblk);
2164	}
2165
2166      /* This code assumes srcreg is at least a full word.  If it isn't,
2167	 copy it into a new pseudo which is a full word.  */
2168      if (GET_MODE (srcreg) != BLKmode
2169	  && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2170	srcreg = convert_to_mode (word_mode, srcreg,
2171				  TREE_UNSIGNED (type));
2172
2173      /* Structures whose size is not a multiple of a word are aligned
2174	 to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2175	 machine, this means we must skip the empty high order bytes when
2176	 calculating the bit offset.  */
2177      if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178	big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2179						  * BITS_PER_UNIT));
2180
2181      /* Copy the structure BITSIZE bites at a time.
2182
2183	 We could probably emit more efficient code for machines
2184	 which do not use strict alignment, but it doesn't seem
2185	 worth the effort at the current time.  */
2186      for (bitpos = 0, xbitpos = big_endian_correction;
2187	   bitpos < bytes * BITS_PER_UNIT;
2188	   bitpos += bitsize, xbitpos += bitsize)
2189	{
2190
2191	  /* We need a new source operand each time xbitpos is on a
2192	     word boundary and when xbitpos == big_endian_correction
2193	     (the first time through).  */
2194	  if (xbitpos % BITS_PER_WORD == 0
2195	      || xbitpos == big_endian_correction)
2196	    src = operand_subword_force (srcreg,
2197					 xbitpos / BITS_PER_WORD,
2198					 BLKmode);
2199
2200	  /* We need a new destination operand each time bitpos is on
2201	     a word boundary.  */
2202	  if (bitpos % BITS_PER_WORD == 0)
2203	    dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2204
2205	  /* Use xbitpos for the source extraction (right justified) and
2206	     xbitpos for the destination store (left justified).  */
2207	  store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2208			   extract_bit_field (src, bitsize,
2209					      xbitpos % BITS_PER_WORD, 1,
2210					      NULL_RTX, word_mode,
2211					      word_mode,
2212					      bitsize / BITS_PER_UNIT,
2213					      BITS_PER_WORD),
2214			   bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2215	}
2216      return tgtblk;
2217}
2218
2219
2220/* Add a USE expression for REG to the (possibly empty) list pointed
2221   to by CALL_FUSAGE.  REG must denote a hard register.  */
2222
2223void
2224use_reg (call_fusage, reg)
2225     rtx *call_fusage, reg;
2226{
2227  if (GET_CODE (reg) != REG
2228      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2229    abort();
2230
2231  *call_fusage
2232    = gen_rtx_EXPR_LIST (VOIDmode,
2233			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2234}
2235
2236/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2237   starting at REGNO.  All of these registers must be hard registers.  */
2238
2239void
2240use_regs (call_fusage, regno, nregs)
2241     rtx *call_fusage;
2242     int regno;
2243     int nregs;
2244{
2245  int i;
2246
2247  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2248    abort ();
2249
2250  for (i = 0; i < nregs; i++)
2251    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2252}
2253
2254/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2255   PARALLEL REGS.  This is for calls that pass values in multiple
2256   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2257
2258void
2259use_group_regs (call_fusage, regs)
2260     rtx *call_fusage;
2261     rtx regs;
2262{
2263  int i;
2264
2265  for (i = 0; i < XVECLEN (regs, 0); i++)
2266    {
2267      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2268
2269      /* A NULL entry means the parameter goes both on the stack and in
2270	 registers.  This can also be a MEM for targets that pass values
2271	 partially on the stack and partially in registers.  */
2272      if (reg != 0 && GET_CODE (reg) == REG)
2273	use_reg (call_fusage, reg);
2274    }
2275}
2276
2277/* Generate several move instructions to clear LEN bytes of block TO.
2278   (A MEM rtx with BLKmode).   The caller must pass TO through
2279   protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2280   we can assume.  */
2281
2282static void
2283clear_by_pieces (to, len, align)
2284     rtx to;
2285     int len, align;
2286{
2287  struct clear_by_pieces data;
2288  rtx to_addr = XEXP (to, 0);
2289  int max_size = MOVE_MAX_PIECES + 1;
2290  enum machine_mode mode = VOIDmode, tmode;
2291  enum insn_code icode;
2292
2293  data.offset = 0;
2294  data.to_addr = to_addr;
2295  data.to = to;
2296  data.autinc_to
2297    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2298       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2299
2300  data.explicit_inc_to = 0;
2301  data.reverse
2302    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2303  if (data.reverse) data.offset = len;
2304  data.len = len;
2305
2306  data.to_struct = MEM_IN_STRUCT_P (to);
2307
2308  /* If copying requires more than two move insns,
2309     copy addresses to registers (to make displacements shorter)
2310     and use post-increment if available.  */
2311  if (!data.autinc_to
2312      && move_by_pieces_ninsns (len, align) > 2)
2313    {
2314      /* Determine the main mode we'll be using */
2315      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2317	if (GET_MODE_SIZE (tmode) < max_size)
2318	  mode = tmode;
2319
2320      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2321	{
2322	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2323	  data.autinc_to = 1;
2324	  data.explicit_inc_to = -1;
2325	}
2326      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2327	{
2328	  data.to_addr = copy_addr_to_reg (to_addr);
2329	  data.autinc_to = 1;
2330	  data.explicit_inc_to = 1;
2331	}
2332      if (!data.autinc_to && CONSTANT_P (to_addr))
2333	data.to_addr = copy_addr_to_reg (to_addr);
2334    }
2335
2336  if (! SLOW_UNALIGNED_ACCESS
2337      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2338    align = MOVE_MAX;
2339
2340  /* First move what we can in the largest integer mode, then go to
2341     successively smaller modes.  */
2342
2343  while (max_size > 1)
2344    {
2345      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2346	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2347	if (GET_MODE_SIZE (tmode) < max_size)
2348	  mode = tmode;
2349
2350      if (mode == VOIDmode)
2351	break;
2352
2353      icode = mov_optab->handlers[(int) mode].insn_code;
2354      if (icode != CODE_FOR_nothing
2355	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2356			   GET_MODE_SIZE (mode)))
2357	clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2358
2359      max_size = GET_MODE_SIZE (mode);
2360    }
2361
2362  /* The code above should have handled everything.  */
2363  if (data.len != 0)
2364    abort ();
2365}
2366
2367/* Subroutine of clear_by_pieces.  Clear as many bytes as appropriate
2368   with move instructions for mode MODE.  GENFUN is the gen_... function
2369   to make a move insn for that mode.  DATA has all the other info.  */
2370
2371static void
2372clear_by_pieces_1 (genfun, mode, data)
2373     rtx (*genfun) PROTO ((rtx, ...));
2374     enum machine_mode mode;
2375     struct clear_by_pieces *data;
2376{
2377  register int size = GET_MODE_SIZE (mode);
2378  register rtx to1;
2379
2380  while (data->len >= size)
2381    {
2382      if (data->reverse) data->offset -= size;
2383
2384      to1 = (data->autinc_to
2385	     ? gen_rtx_MEM (mode, data->to_addr)
2386	     : copy_rtx (change_address (data->to, mode,
2387					 plus_constant (data->to_addr,
2388							data->offset))));
2389      MEM_IN_STRUCT_P (to1) = data->to_struct;
2390
2391      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2392	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2393
2394      emit_insn ((*genfun) (to1, const0_rtx));
2395      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2396	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2397
2398      if (! data->reverse) data->offset += size;
2399
2400      data->len -= size;
2401    }
2402}
2403
2404/* Write zeros through the storage of OBJECT.
2405   If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2406   the maximum alignment we can is has, measured in bytes.
2407
2408   If we call a function that returns the length of the block, return it.  */
2409
2410rtx
2411clear_storage (object, size, align)
2412     rtx object;
2413     rtx size;
2414     int align;
2415{
2416#ifdef TARGET_MEM_FUNCTIONS
2417  static tree fn;
2418  tree call_expr, arg_list;
2419#endif
2420  rtx retval = 0;
2421
2422  if (GET_MODE (object) == BLKmode)
2423    {
2424      object = protect_from_queue (object, 1);
2425      size = protect_from_queue (size, 0);
2426
2427      if (GET_CODE (size) == CONST_INT
2428	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2429	clear_by_pieces (object, INTVAL (size), align);
2430
2431      else
2432	{
2433	  /* Try the most limited insn first, because there's no point
2434	     including more than one in the machine description unless
2435	     the more limited one has some advantage.  */
2436
2437	  rtx opalign = GEN_INT (align);
2438	  enum machine_mode mode;
2439
2440	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2441	       mode = GET_MODE_WIDER_MODE (mode))
2442	    {
2443	      enum insn_code code = clrstr_optab[(int) mode];
2444
2445	      if (code != CODE_FOR_nothing
2446		  /* We don't need MODE to be narrower than
2447		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2448		     the mode mask, as it is returned by the macro, it will
2449		     definitely be less than the actual mode mask.  */
2450		  && ((GET_CODE (size) == CONST_INT
2451		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2452			   <= (GET_MODE_MASK (mode) >> 1)))
2453		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2454		  && (insn_operand_predicate[(int) code][0] == 0
2455		      || (*insn_operand_predicate[(int) code][0]) (object,
2456								   BLKmode))
2457		  && (insn_operand_predicate[(int) code][2] == 0
2458		      || (*insn_operand_predicate[(int) code][2]) (opalign,
2459								   VOIDmode)))
2460		{
2461		  rtx op1;
2462		  rtx last = get_last_insn ();
2463		  rtx pat;
2464
2465		  op1 = convert_to_mode (mode, size, 1);
2466		  if (insn_operand_predicate[(int) code][1] != 0
2467		      && ! (*insn_operand_predicate[(int) code][1]) (op1,
2468								     mode))
2469		    op1 = copy_to_mode_reg (mode, op1);
2470
2471		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2472		  if (pat)
2473		    {
2474		      emit_insn (pat);
2475		      return 0;
2476		    }
2477		  else
2478		    delete_insns_since (last);
2479		}
2480	    }
2481
2482	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2483
2484	     It is unsafe to save the value generated by protect_from_queue
2485	     and reuse it later.  Consider what happens if emit_queue is
2486	     called before the return value from protect_from_queue is used.
2487
2488	     Expansion of the CALL_EXPR below will call emit_queue before
2489	     we are finished emitting RTL for argument setup.  So if we are
2490	     not careful we could get the wrong value for an argument.
2491
2492	     To avoid this problem we go ahead and emit code to copy OBJECT
2493	     and SIZE into new pseudos.  We can then place those new pseudos
2494	     into an RTL_EXPR and use them later, even after a call to
2495	     emit_queue.
2496
2497	     Note this is not strictly needed for library calls since they
2498	     do not call emit_queue before loading their arguments.  However,
2499	     we may need to have library calls call emit_queue in the future
2500	     since failing to do so could cause problems for targets which
2501	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2502	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2503
2504#ifdef TARGET_MEM_FUNCTIONS
2505	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2506#else
2507	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2508				  TREE_UNSIGNED (integer_type_node));
2509	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2510#endif
2511
2512
2513#ifdef TARGET_MEM_FUNCTIONS
2514	  /* It is incorrect to use the libcall calling conventions to call
2515	     memset in this context.
2516
2517	     This could be a user call to memset and the user may wish to
2518	     examine the return value from memset.
2519
2520	     For targets where libcalls and normal calls have different
2521	     conventions for returning pointers, we could end up generating
2522	      incorrect code.
2523
2524	     So instead of using a libcall sequence we build up a suitable
2525	     CALL_EXPR and expand the call in the normal fashion.  */
2526	  if (fn == NULL_TREE)
2527	    {
2528	      tree fntype;
2529
2530	      /* This was copied from except.c, I don't know if all this is
2531		 necessary in this context or not.  */
2532	      fn = get_identifier ("memset");
2533	      push_obstacks_nochange ();
2534	      end_temporary_allocation ();
2535	      fntype = build_pointer_type (void_type_node);
2536	      fntype = build_function_type (fntype, NULL_TREE);
2537	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2538	      DECL_EXTERNAL (fn) = 1;
2539	      TREE_PUBLIC (fn) = 1;
2540	      DECL_ARTIFICIAL (fn) = 1;
2541	      make_decl_rtl (fn, NULL_PTR, 1);
2542	      assemble_external (fn);
2543	      pop_obstacks ();
2544	    }
2545
2546	  /* We need to make an argument list for the function call.
2547
2548	     memset has three arguments, the first is a void * addresses, the
2549	     second a integer with the initialization value, the last is a
2550	     size_t byte count for the copy.  */
2551	  arg_list
2552	    = build_tree_list (NULL_TREE,
2553			       make_tree (build_pointer_type (void_type_node),
2554					  object));
2555	  TREE_CHAIN (arg_list)
2556	    = build_tree_list (NULL_TREE,
2557			        make_tree (integer_type_node, const0_rtx));
2558	  TREE_CHAIN (TREE_CHAIN (arg_list))
2559	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2560	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2561
2562	  /* Now we have to build up the CALL_EXPR itself.  */
2563	  call_expr = build1 (ADDR_EXPR,
2564			      build_pointer_type (TREE_TYPE (fn)), fn);
2565	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2566			     call_expr, arg_list, NULL_TREE);
2567	  TREE_SIDE_EFFECTS (call_expr) = 1;
2568
2569	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2570#else
2571	  emit_library_call (bzero_libfunc, 0,
2572			     VOIDmode, 2, object, Pmode, size,
2573			     TYPE_MODE (integer_type_node));
2574#endif
2575	}
2576    }
2577  else
2578    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2579
2580  return retval;
2581}
2582
2583/* Generate code to copy Y into X.
2584   Both Y and X must have the same mode, except that
2585   Y can be a constant with VOIDmode.
2586   This mode cannot be BLKmode; use emit_block_move for that.
2587
2588   Return the last instruction emitted.  */
2589
2590rtx
2591emit_move_insn (x, y)
2592     rtx x, y;
2593{
2594  enum machine_mode mode = GET_MODE (x);
2595
2596  x = protect_from_queue (x, 1);
2597  y = protect_from_queue (y, 0);
2598
2599  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2600    abort ();
2601
2602  /* Never force constant_p_rtx to memory.  */
2603  if (GET_CODE (y) == CONSTANT_P_RTX)
2604    ;
2605  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2606    y = force_const_mem (mode, y);
2607
2608  /* If X or Y are memory references, verify that their addresses are valid
2609     for the machine.  */
2610  if (GET_CODE (x) == MEM
2611      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2612	   && ! push_operand (x, GET_MODE (x)))
2613	  || (flag_force_addr
2614	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2615    x = change_address (x, VOIDmode, XEXP (x, 0));
2616
2617  if (GET_CODE (y) == MEM
2618      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2619	  || (flag_force_addr
2620	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2621    y = change_address (y, VOIDmode, XEXP (y, 0));
2622
2623  if (mode == BLKmode)
2624    abort ();
2625
2626  return emit_move_insn_1 (x, y);
2627}
2628
2629/* Low level part of emit_move_insn.
2630   Called just like emit_move_insn, but assumes X and Y
2631   are basically valid.  */
2632
2633rtx
2634emit_move_insn_1 (x, y)
2635     rtx x, y;
2636{
2637  enum machine_mode mode = GET_MODE (x);
2638  enum machine_mode submode;
2639  enum mode_class class = GET_MODE_CLASS (mode);
2640  int i;
2641
2642  if (mode >= MAX_MACHINE_MODE)
2643      abort ();
2644
2645  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2646    return
2647      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2648
2649  /* Expand complex moves by moving real part and imag part, if possible.  */
2650  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2651	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2652						    * BITS_PER_UNIT),
2653						   (class == MODE_COMPLEX_INT
2654						    ? MODE_INT : MODE_FLOAT),
2655						   0))
2656	   && (mov_optab->handlers[(int) submode].insn_code
2657	       != CODE_FOR_nothing))
2658    {
2659      /* Don't split destination if it is a stack push.  */
2660      int stack = push_operand (x, GET_MODE (x));
2661
2662      /* If this is a stack, push the highpart first, so it
2663	 will be in the argument order.
2664
2665	 In that case, change_address is used only to convert
2666	 the mode, not to change the address.  */
2667      if (stack)
2668	{
2669	  /* Note that the real part always precedes the imag part in memory
2670	     regardless of machine's endianness.  */
2671#ifdef STACK_GROWS_DOWNWARD
2672	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2673		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2674		      gen_imagpart (submode, y)));
2675	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2676		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2677		      gen_realpart (submode, y)));
2678#else
2679	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2680		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2681		      gen_realpart (submode, y)));
2682	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2683		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2684		      gen_imagpart (submode, y)));
2685#endif
2686	}
2687      else
2688	{
2689	  rtx realpart_x, realpart_y;
2690	  rtx imagpart_x, imagpart_y;
2691
2692	  /* If this is a complex value with each part being smaller than a
2693	     word, the usual calling sequence will likely pack the pieces into
2694	     a single register.  Unfortunately, SUBREG of hard registers only
2695	     deals in terms of words, so we have a problem converting input
2696	     arguments to the CONCAT of two registers that is used elsewhere
2697	     for complex values.  If this is before reload, we can copy it into
2698	     memory and reload.  FIXME, we should see about using extract and
2699	     insert on integer registers, but complex short and complex char
2700	     variables should be rarely used.  */
2701	  if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2702	      && (reload_in_progress | reload_completed) == 0)
2703	    {
2704	      int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2705	      int packed_src_p  = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2706
2707	      if (packed_dest_p || packed_src_p)
2708		{
2709		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2710					       ? MODE_FLOAT : MODE_INT);
2711
2712		  enum machine_mode reg_mode =
2713		    mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2714
2715		  if (reg_mode != BLKmode)
2716		    {
2717		      rtx mem = assign_stack_temp (reg_mode,
2718						   GET_MODE_SIZE (mode), 0);
2719
2720		      rtx cmem = change_address (mem, mode, NULL_RTX);
2721
2722		      current_function_cannot_inline
2723			= "function using short complex types cannot be inline";
2724
2725		      if (packed_dest_p)
2726			{
2727			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2728			  emit_move_insn_1 (cmem, y);
2729			  return emit_move_insn_1 (sreg, mem);
2730			}
2731		      else
2732			{
2733			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2734			  emit_move_insn_1 (mem, sreg);
2735			  return emit_move_insn_1 (x, cmem);
2736			}
2737		    }
2738		}
2739	    }
2740
2741	  realpart_x = gen_realpart (submode, x);
2742	  realpart_y = gen_realpart (submode, y);
2743	  imagpart_x = gen_imagpart (submode, x);
2744	  imagpart_y = gen_imagpart (submode, y);
2745
2746	  /* Show the output dies here.  This is necessary for SUBREGs
2747	     of pseudos since we cannot track their lifetimes correctly;
2748	     hard regs shouldn't appear here except as return values.
2749	     We never want to emit such a clobber after reload.  */
2750	  if (x != y
2751	      && ! (reload_in_progress || reload_completed)
2752	      && (GET_CODE (realpart_x) == SUBREG
2753		  || GET_CODE (imagpart_x) == SUBREG))
2754	    {
2755	      emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2756	    }
2757
2758	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2759		     (realpart_x, realpart_y));
2760	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2761		     (imagpart_x, imagpart_y));
2762	}
2763
2764      return get_last_insn ();
2765    }
2766
2767  /* This will handle any multi-word mode that lacks a move_insn pattern.
2768     However, you will get better code if you define such patterns,
2769     even if they must turn into multiple assembler instructions.  */
2770  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2771    {
2772      rtx last_insn = 0;
2773      rtx seq;
2774      int need_clobber;
2775
2776#ifdef PUSH_ROUNDING
2777
2778      /* If X is a push on the stack, do the push now and replace
2779	 X with a reference to the stack pointer.  */
2780      if (push_operand (x, GET_MODE (x)))
2781	{
2782	  anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2783	  x = change_address (x, VOIDmode, stack_pointer_rtx);
2784	}
2785#endif
2786
2787      start_sequence ();
2788
2789      need_clobber = 0;
2790      for (i = 0;
2791	   i < (GET_MODE_SIZE (mode)  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2792	   i++)
2793	{
2794	  rtx xpart = operand_subword (x, i, 1, mode);
2795	  rtx ypart = operand_subword (y, i, 1, mode);
2796
2797	  /* If we can't get a part of Y, put Y into memory if it is a
2798	     constant.  Otherwise, force it into a register.  If we still
2799	     can't get a part of Y, abort.  */
2800	  if (ypart == 0 && CONSTANT_P (y))
2801	    {
2802	      y = force_const_mem (mode, y);
2803	      ypart = operand_subword (y, i, 1, mode);
2804	    }
2805	  else if (ypart == 0)
2806	    ypart = operand_subword_force (y, i, mode);
2807
2808	  if (xpart == 0 || ypart == 0)
2809	    abort ();
2810
2811	  need_clobber |= (GET_CODE (xpart) == SUBREG);
2812
2813	  last_insn = emit_move_insn (xpart, ypart);
2814	}
2815
2816      seq = gen_sequence ();
2817      end_sequence ();
2818
2819      /* Show the output dies here.  This is necessary for SUBREGs
2820	 of pseudos since we cannot track their lifetimes correctly;
2821	 hard regs shouldn't appear here except as return values.
2822	 We never want to emit such a clobber after reload.  */
2823      if (x != y
2824	  && ! (reload_in_progress || reload_completed)
2825	  && need_clobber != 0)
2826	{
2827	  emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2828	}
2829
2830      emit_insn (seq);
2831
2832      return last_insn;
2833    }
2834  else
2835    abort ();
2836}
2837
2838/* Pushing data onto the stack.  */
2839
2840/* Push a block of length SIZE (perhaps variable)
2841   and return an rtx to address the beginning of the block.
2842   Note that it is not possible for the value returned to be a QUEUED.
2843   The value may be virtual_outgoing_args_rtx.
2844
2845   EXTRA is the number of bytes of padding to push in addition to SIZE.
2846   BELOW nonzero means this padding comes at low addresses;
2847   otherwise, the padding comes at high addresses.  */
2848
2849rtx
2850push_block (size, extra, below)
2851     rtx size;
2852     int extra, below;
2853{
2854  register rtx temp;
2855
2856  size = convert_modes (Pmode, ptr_mode, size, 1);
2857  if (CONSTANT_P (size))
2858    anti_adjust_stack (plus_constant (size, extra));
2859  else if (GET_CODE (size) == REG && extra == 0)
2860    anti_adjust_stack (size);
2861  else
2862    {
2863      rtx temp = copy_to_mode_reg (Pmode, size);
2864      if (extra != 0)
2865	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2866			     temp, 0, OPTAB_LIB_WIDEN);
2867      anti_adjust_stack (temp);
2868    }
2869
2870#if defined (STACK_GROWS_DOWNWARD) \
2871    || (defined (ARGS_GROW_DOWNWARD) \
2872	&& !defined (ACCUMULATE_OUTGOING_ARGS))
2873
2874  /* Return the lowest stack address when STACK or ARGS grow downward and
2875     we are not aaccumulating outgoing arguments (the c4x port uses such
2876     conventions).  */
2877  temp = virtual_outgoing_args_rtx;
2878  if (extra != 0 && below)
2879    temp = plus_constant (temp, extra);
2880#else
2881  if (GET_CODE (size) == CONST_INT)
2882    temp = plus_constant (virtual_outgoing_args_rtx,
2883			  - INTVAL (size) - (below ? 0 : extra));
2884  else if (extra != 0 && !below)
2885    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2886		    negate_rtx (Pmode, plus_constant (size, extra)));
2887  else
2888    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2889		    negate_rtx (Pmode, size));
2890#endif
2891
2892  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2893}
2894
2895rtx
2896gen_push_operand ()
2897{
2898  return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2899}
2900
2901/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2902   block of SIZE bytes.  */
2903
2904static rtx
2905get_push_address (size)
2906	int size;
2907{
2908  register rtx temp;
2909
2910  if (STACK_PUSH_CODE == POST_DEC)
2911    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2912  else if (STACK_PUSH_CODE == POST_INC)
2913    temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2914  else
2915    temp = stack_pointer_rtx;
2916
2917  return copy_to_reg (temp);
2918}
2919
2920/* Generate code to push X onto the stack, assuming it has mode MODE and
2921   type TYPE.
2922   MODE is redundant except when X is a CONST_INT (since they don't
2923   carry mode info).
2924   SIZE is an rtx for the size of data to be copied (in bytes),
2925   needed only if X is BLKmode.
2926
2927   ALIGN (in bytes) is maximum alignment we can assume.
2928
2929   If PARTIAL and REG are both nonzero, then copy that many of the first
2930   words of X into registers starting with REG, and push the rest of X.
2931   The amount of space pushed is decreased by PARTIAL words,
2932   rounded *down* to a multiple of PARM_BOUNDARY.
2933   REG must be a hard register in this case.
2934   If REG is zero but PARTIAL is not, take any all others actions for an
2935   argument partially in registers, but do not actually load any
2936   registers.
2937
2938   EXTRA is the amount in bytes of extra space to leave next to this arg.
2939   This is ignored if an argument block has already been allocated.
2940
2941   On a machine that lacks real push insns, ARGS_ADDR is the address of
2942   the bottom of the argument block for this call.  We use indexing off there
2943   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
2944   argument block has not been preallocated.
2945
2946   ARGS_SO_FAR is the size of args previously pushed for this call.
2947
2948   REG_PARM_STACK_SPACE is nonzero if functions require stack space
2949   for arguments passed in registers.  If nonzero, it will be the number
2950   of bytes required.  */
2951
2952void
2953emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2954		args_addr, args_so_far, reg_parm_stack_space)
2955     register rtx x;
2956     enum machine_mode mode;
2957     tree type;
2958     rtx size;
2959     int align;
2960     int partial;
2961     rtx reg;
2962     int extra;
2963     rtx args_addr;
2964     rtx args_so_far;
2965     int reg_parm_stack_space;
2966{
2967  rtx xinner;
2968  enum direction stack_direction
2969#ifdef STACK_GROWS_DOWNWARD
2970    = downward;
2971#else
2972    = upward;
2973#endif
2974
2975  /* Decide where to pad the argument: `downward' for below,
2976     `upward' for above, or `none' for don't pad it.
2977     Default is below for small data on big-endian machines; else above.  */
2978  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2979
2980  /* Invert direction if stack is post-update.  */
2981  if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2982    if (where_pad != none)
2983      where_pad = (where_pad == downward ? upward : downward);
2984
2985  xinner = x = protect_from_queue (x, 0);
2986
2987  if (mode == BLKmode)
2988    {
2989      /* Copy a block into the stack, entirely or partially.  */
2990
2991      register rtx temp;
2992      int used = partial * UNITS_PER_WORD;
2993      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2994      int skip;
2995
2996      if (size == 0)
2997	abort ();
2998
2999      used -= offset;
3000
3001      /* USED is now the # of bytes we need not copy to the stack
3002	 because registers will take care of them.  */
3003
3004      if (partial != 0)
3005	xinner = change_address (xinner, BLKmode,
3006				 plus_constant (XEXP (xinner, 0), used));
3007
3008      /* If the partial register-part of the arg counts in its stack size,
3009	 skip the part of stack space corresponding to the registers.
3010	 Otherwise, start copying to the beginning of the stack space,
3011	 by setting SKIP to 0.  */
3012      skip = (reg_parm_stack_space == 0) ? 0 : used;
3013
3014#ifdef PUSH_ROUNDING
3015      /* Do it with several push insns if that doesn't take lots of insns
3016	 and if there is no difficulty with push insns that skip bytes
3017	 on the stack for alignment purposes.  */
3018      if (args_addr == 0
3019	  && GET_CODE (size) == CONST_INT
3020	  && skip == 0
3021	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3022	  /* Here we avoid the case of a structure whose weak alignment
3023	     forces many pushes of a small amount of data,
3024	     and such small pushes do rounding that causes trouble.  */
3025	  && ((! SLOW_UNALIGNED_ACCESS)
3026	      || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
3027	      || PUSH_ROUNDING (align) == align)
3028	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3029	{
3030	  /* Push padding now if padding above and stack grows down,
3031	     or if padding below and stack grows up.
3032	     But if space already allocated, this has already been done.  */
3033	  if (extra && args_addr == 0
3034	      && where_pad != none && where_pad != stack_direction)
3035	    anti_adjust_stack (GEN_INT (extra));
3036
3037	  move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3038			  INTVAL (size) - used, align);
3039
3040	  if (current_function_check_memory_usage && ! in_check_memory_usage)
3041	    {
3042	      rtx temp;
3043
3044	      in_check_memory_usage = 1;
3045	      temp = get_push_address (INTVAL(size) - used);
3046	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3047		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3048				   temp, Pmode,
3049				   XEXP (xinner, 0), Pmode,
3050				   GEN_INT (INTVAL(size) - used),
3051				   TYPE_MODE (sizetype));
3052	      else
3053		emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3054				   temp, Pmode,
3055			 	   GEN_INT (INTVAL(size) - used),
3056				   TYPE_MODE (sizetype),
3057				   GEN_INT (MEMORY_USE_RW),
3058				   TYPE_MODE (integer_type_node));
3059	      in_check_memory_usage = 0;
3060	    }
3061	}
3062      else
3063#endif /* PUSH_ROUNDING */
3064	{
3065	  /* Otherwise make space on the stack and copy the data
3066	     to the address of that space.  */
3067
3068	  /* Deduct words put into registers from the size we must copy.  */
3069	  if (partial != 0)
3070	    {
3071	      if (GET_CODE (size) == CONST_INT)
3072		size = GEN_INT (INTVAL (size) - used);
3073	      else
3074		size = expand_binop (GET_MODE (size), sub_optab, size,
3075				     GEN_INT (used), NULL_RTX, 0,
3076				     OPTAB_LIB_WIDEN);
3077	    }
3078
3079	  /* Get the address of the stack space.
3080	     In this case, we do not deal with EXTRA separately.
3081	     A single stack adjust will do.  */
3082	  if (! args_addr)
3083	    {
3084	      temp = push_block (size, extra, where_pad == downward);
3085	      extra = 0;
3086	    }
3087	  else if (GET_CODE (args_so_far) == CONST_INT)
3088	    temp = memory_address (BLKmode,
3089				   plus_constant (args_addr,
3090						  skip + INTVAL (args_so_far)));
3091	  else
3092	    temp = memory_address (BLKmode,
3093				   plus_constant (gen_rtx_PLUS (Pmode,
3094								args_addr,
3095								args_so_far),
3096						  skip));
3097	  if (current_function_check_memory_usage && ! in_check_memory_usage)
3098	    {
3099	      rtx target;
3100
3101	      in_check_memory_usage = 1;
3102	      target = copy_to_reg (temp);
3103	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3104		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3105				   target, Pmode,
3106				   XEXP (xinner, 0), Pmode,
3107				   size, TYPE_MODE (sizetype));
3108	      else
3109	        emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3110				   target, Pmode,
3111			 	   size, TYPE_MODE (sizetype),
3112				   GEN_INT (MEMORY_USE_RW),
3113				   TYPE_MODE (integer_type_node));
3114	      in_check_memory_usage = 0;
3115	    }
3116
3117	  /* TEMP is the address of the block.  Copy the data there.  */
3118	  if (GET_CODE (size) == CONST_INT
3119	      && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3120	    {
3121	      move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3122			      INTVAL (size), align);
3123	      goto ret;
3124	    }
3125	  else
3126	    {
3127	      rtx opalign = GEN_INT (align);
3128	      enum machine_mode mode;
3129	      rtx target = gen_rtx_MEM (BLKmode, temp);
3130
3131	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3132		   mode != VOIDmode;
3133		   mode = GET_MODE_WIDER_MODE (mode))
3134		{
3135		  enum insn_code code = movstr_optab[(int) mode];
3136
3137		  if (code != CODE_FOR_nothing
3138		      && ((GET_CODE (size) == CONST_INT
3139			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3140			       <= (GET_MODE_MASK (mode) >> 1)))
3141			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3142		      && (insn_operand_predicate[(int) code][0] == 0
3143			  || ((*insn_operand_predicate[(int) code][0])
3144			      (target, BLKmode)))
3145		      && (insn_operand_predicate[(int) code][1] == 0
3146			  || ((*insn_operand_predicate[(int) code][1])
3147			      (xinner, BLKmode)))
3148		      && (insn_operand_predicate[(int) code][3] == 0
3149			  || ((*insn_operand_predicate[(int) code][3])
3150			      (opalign, VOIDmode))))
3151		    {
3152		      rtx op2 = convert_to_mode (mode, size, 1);
3153		      rtx last = get_last_insn ();
3154		      rtx pat;
3155
3156		      if (insn_operand_predicate[(int) code][2] != 0
3157			  && ! ((*insn_operand_predicate[(int) code][2])
3158				(op2, mode)))
3159			op2 = copy_to_mode_reg (mode, op2);
3160
3161		      pat = GEN_FCN ((int) code) (target, xinner,
3162						  op2, opalign);
3163		      if (pat)
3164			{
3165			  emit_insn (pat);
3166			  goto ret;
3167			}
3168		      else
3169			delete_insns_since (last);
3170		    }
3171		}
3172	    }
3173
3174#ifndef ACCUMULATE_OUTGOING_ARGS
3175	  /* If the source is referenced relative to the stack pointer,
3176	     copy it to another register to stabilize it.  We do not need
3177	     to do this if we know that we won't be changing sp.  */
3178
3179	  if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3180	      || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3181	    temp = copy_to_reg (temp);
3182#endif
3183
3184	  /* Make inhibit_defer_pop nonzero around the library call
3185	     to force it to pop the bcopy-arguments right away.  */
3186	  NO_DEFER_POP;
3187#ifdef TARGET_MEM_FUNCTIONS
3188	  emit_library_call (memcpy_libfunc, 0,
3189			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3190			     convert_to_mode (TYPE_MODE (sizetype),
3191					      size, TREE_UNSIGNED (sizetype)),
3192			     TYPE_MODE (sizetype));
3193#else
3194	  emit_library_call (bcopy_libfunc, 0,
3195			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3196			     convert_to_mode (TYPE_MODE (integer_type_node),
3197					      size,
3198					      TREE_UNSIGNED (integer_type_node)),
3199			     TYPE_MODE (integer_type_node));
3200#endif
3201	  OK_DEFER_POP;
3202	}
3203    }
3204  else if (partial > 0)
3205    {
3206      /* Scalar partly in registers.  */
3207
3208      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3209      int i;
3210      int not_stack;
3211      /* # words of start of argument
3212	 that we must make space for but need not store.  */
3213      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3214      int args_offset = INTVAL (args_so_far);
3215      int skip;
3216
3217      /* Push padding now if padding above and stack grows down,
3218	 or if padding below and stack grows up.
3219	 But if space already allocated, this has already been done.  */
3220      if (extra && args_addr == 0
3221	  && where_pad != none && where_pad != stack_direction)
3222	anti_adjust_stack (GEN_INT (extra));
3223
3224      /* If we make space by pushing it, we might as well push
3225	 the real data.  Otherwise, we can leave OFFSET nonzero
3226	 and leave the space uninitialized.  */
3227      if (args_addr == 0)
3228	offset = 0;
3229
3230      /* Now NOT_STACK gets the number of words that we don't need to
3231	 allocate on the stack.  */
3232      not_stack = partial - offset;
3233
3234      /* If the partial register-part of the arg counts in its stack size,
3235	 skip the part of stack space corresponding to the registers.
3236	 Otherwise, start copying to the beginning of the stack space,
3237	 by setting SKIP to 0.  */
3238      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3239
3240      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3241	x = validize_mem (force_const_mem (mode, x));
3242
3243      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3244	 SUBREGs of such registers are not allowed.  */
3245      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3246	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3247	x = copy_to_reg (x);
3248
3249      /* Loop over all the words allocated on the stack for this arg.  */
3250      /* We can do it by words, because any scalar bigger than a word
3251	 has a size a multiple of a word.  */
3252#ifndef PUSH_ARGS_REVERSED
3253      for (i = not_stack; i < size; i++)
3254#else
3255      for (i = size - 1; i >= not_stack; i--)
3256#endif
3257	if (i >= not_stack + offset)
3258	  emit_push_insn (operand_subword_force (x, i, mode),
3259			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3260			  0, args_addr,
3261			  GEN_INT (args_offset + ((i - not_stack + skip)
3262						  * UNITS_PER_WORD)),
3263			  reg_parm_stack_space);
3264    }
3265  else
3266    {
3267      rtx addr;
3268      rtx target = NULL_RTX;
3269
3270      /* Push padding now if padding above and stack grows down,
3271	 or if padding below and stack grows up.
3272	 But if space already allocated, this has already been done.  */
3273      if (extra && args_addr == 0
3274	  && where_pad != none && where_pad != stack_direction)
3275	anti_adjust_stack (GEN_INT (extra));
3276
3277#ifdef PUSH_ROUNDING
3278      if (args_addr == 0)
3279	addr = gen_push_operand ();
3280      else
3281#endif
3282	{
3283	  if (GET_CODE (args_so_far) == CONST_INT)
3284	    addr
3285	      = memory_address (mode,
3286				plus_constant (args_addr,
3287					       INTVAL (args_so_far)));
3288          else
3289	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3290						       args_so_far));
3291	  target = addr;
3292	}
3293
3294      emit_move_insn (gen_rtx_MEM (mode, addr), x);
3295
3296      if (current_function_check_memory_usage && ! in_check_memory_usage)
3297	{
3298	  in_check_memory_usage = 1;
3299	  if (target == 0)
3300	    target = get_push_address (GET_MODE_SIZE (mode));
3301
3302	  if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3303	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3304			       target, Pmode,
3305			       XEXP (x, 0), Pmode,
3306			       GEN_INT (GET_MODE_SIZE (mode)),
3307			       TYPE_MODE (sizetype));
3308	  else
3309	    emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3310			       target, Pmode,
3311			       GEN_INT (GET_MODE_SIZE (mode)),
3312			       TYPE_MODE (sizetype),
3313			       GEN_INT (MEMORY_USE_RW),
3314			       TYPE_MODE (integer_type_node));
3315	  in_check_memory_usage = 0;
3316	}
3317    }
3318
3319 ret:
3320  /* If part should go in registers, copy that part
3321     into the appropriate registers.  Do this now, at the end,
3322     since mem-to-mem copies above may do function calls.  */
3323  if (partial > 0 && reg != 0)
3324    {
3325      /* Handle calls that pass values in multiple non-contiguous locations.
3326	 The Irix 6 ABI has examples of this.  */
3327      if (GET_CODE (reg) == PARALLEL)
3328	emit_group_load (reg, x, -1, align);  /* ??? size? */
3329      else
3330	move_block_to_reg (REGNO (reg), x, partial, mode);
3331    }
3332
3333  if (extra && args_addr == 0 && where_pad == stack_direction)
3334    anti_adjust_stack (GEN_INT (extra));
3335}
3336
3337/* Expand an assignment that stores the value of FROM into TO.
3338   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3339   (This may contain a QUEUED rtx;
3340   if the value is constant, this rtx is a constant.)
3341   Otherwise, the returned value is NULL_RTX.
3342
3343   SUGGEST_REG is no longer actually used.
3344   It used to mean, copy the value through a register
3345   and return that register, if that is possible.
3346   We now use WANT_VALUE to decide whether to do this.  */
3347
3348rtx
3349expand_assignment (to, from, want_value, suggest_reg)
3350     tree to, from;
3351     int want_value;
3352     int suggest_reg;
3353{
3354  register rtx to_rtx = 0;
3355  rtx result;
3356
3357  /* Don't crash if the lhs of the assignment was erroneous.  */
3358
3359  if (TREE_CODE (to) == ERROR_MARK)
3360    {
3361      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3362      return want_value ? result : NULL_RTX;
3363    }
3364
3365  /* Assignment of a structure component needs special treatment
3366     if the structure component's rtx is not simply a MEM.
3367     Assignment of an array element at a constant index, and assignment of
3368     an array element in an unaligned packed structure field, has the same
3369     problem.  */
3370
3371  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3372      || TREE_CODE (to) == ARRAY_REF)
3373    {
3374      enum machine_mode mode1;
3375      int bitsize;
3376      int bitpos;
3377      tree offset;
3378      int unsignedp;
3379      int volatilep = 0;
3380      tree tem;
3381      int alignment;
3382
3383      push_temp_slots ();
3384      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3385				 &unsignedp, &volatilep, &alignment);
3386
3387      /* If we are going to use store_bit_field and extract_bit_field,
3388	 make sure to_rtx will be safe for multiple use.  */
3389
3390      if (mode1 == VOIDmode && want_value)
3391	tem = stabilize_reference (tem);
3392
3393      to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3394      if (offset != 0)
3395	{
3396	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3397
3398	  if (GET_CODE (to_rtx) != MEM)
3399	    abort ();
3400
3401	  if (GET_MODE (offset_rtx) != ptr_mode)
3402	    {
3403#ifdef POINTERS_EXTEND_UNSIGNED
3404	      offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3405#else
3406	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3407#endif
3408	    }
3409
3410	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3411	     to call force_reg for that case.  Avoid that case.  */
3412	  if (GET_CODE (to_rtx) == MEM
3413	      && GET_MODE (to_rtx) == BLKmode
3414	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3415	      && bitsize
3416	      && (bitpos % bitsize) == 0
3417	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3418	      && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3419	    {
3420	      rtx temp = change_address (to_rtx, mode1,
3421				         plus_constant (XEXP (to_rtx, 0),
3422						        (bitpos /
3423						         BITS_PER_UNIT)));
3424	      if (GET_CODE (XEXP (temp, 0)) == REG)
3425	        to_rtx = temp;
3426	      else
3427		to_rtx = change_address (to_rtx, mode1,
3428				         force_reg (GET_MODE (XEXP (temp, 0)),
3429						    XEXP (temp, 0)));
3430	      bitpos = 0;
3431	    }
3432
3433	  to_rtx = change_address (to_rtx, VOIDmode,
3434				   gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3435						 force_reg (ptr_mode, offset_rtx)));
3436	}
3437      if (volatilep)
3438	{
3439	  if (GET_CODE (to_rtx) == MEM)
3440	    {
3441	      /* When the offset is zero, to_rtx is the address of the
3442		 structure we are storing into, and hence may be shared.
3443		 We must make a new MEM before setting the volatile bit.  */
3444	      if (offset == 0)
3445		to_rtx = copy_rtx (to_rtx);
3446
3447	      MEM_VOLATILE_P (to_rtx) = 1;
3448	    }
3449#if 0  /* This was turned off because, when a field is volatile
3450	  in an object which is not volatile, the object may be in a register,
3451	  and then we would abort over here.  */
3452	  else
3453	    abort ();
3454#endif
3455	}
3456
3457      if (TREE_CODE (to) == COMPONENT_REF
3458	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3459	{
3460	  if (offset == 0)
3461	    to_rtx = copy_rtx (to_rtx);
3462
3463	  RTX_UNCHANGING_P (to_rtx) = 1;
3464	}
3465
3466      /* Check the access.  */
3467      if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3468	{
3469	  rtx to_addr;
3470	  int size;
3471	  int best_mode_size;
3472	  enum machine_mode best_mode;
3473
3474	  best_mode = get_best_mode (bitsize, bitpos,
3475	  			     TYPE_ALIGN (TREE_TYPE (tem)),
3476	  			     mode1, volatilep);
3477	  if (best_mode == VOIDmode)
3478	    best_mode = QImode;
3479
3480	  best_mode_size = GET_MODE_BITSIZE (best_mode);
3481	  to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3482	  size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3483	  size *= GET_MODE_SIZE (best_mode);
3484
3485	  /* Check the access right of the pointer.  */
3486	  if (size)
3487	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3488			       to_addr, Pmode,
3489			       GEN_INT (size), TYPE_MODE (sizetype),
3490			       GEN_INT (MEMORY_USE_WO),
3491			       TYPE_MODE (integer_type_node));
3492	}
3493
3494      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3495			    (want_value
3496			     /* Spurious cast makes HPUX compiler happy.  */
3497			     ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3498			     : VOIDmode),
3499			    unsignedp,
3500			    /* Required alignment of containing datum.  */
3501			    alignment,
3502			    int_size_in_bytes (TREE_TYPE (tem)),
3503			    get_alias_set (to));
3504      preserve_temp_slots (result);
3505      free_temp_slots ();
3506      pop_temp_slots ();
3507
3508      /* If the value is meaningful, convert RESULT to the proper mode.
3509	 Otherwise, return nothing.  */
3510      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3511					  TYPE_MODE (TREE_TYPE (from)),
3512					  result,
3513					  TREE_UNSIGNED (TREE_TYPE (to)))
3514	      : NULL_RTX);
3515    }
3516
3517  /* If the rhs is a function call and its value is not an aggregate,
3518     call the function before we start to compute the lhs.
3519     This is needed for correct code for cases such as
3520     val = setjmp (buf) on machines where reference to val
3521     requires loading up part of an address in a separate insn.
3522
3523     Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3524     a promoted variable where the zero- or sign- extension needs to be done.
3525     Handling this in the normal way is safe because no computation is done
3526     before the call.  */
3527  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3528      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3529      && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3530    {
3531      rtx value;
3532
3533      push_temp_slots ();
3534      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3535      if (to_rtx == 0)
3536	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3537
3538      /* Handle calls that return values in multiple non-contiguous locations.
3539	 The Irix 6 ABI has examples of this.  */
3540      if (GET_CODE (to_rtx) == PARALLEL)
3541	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3542			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3543      else if (GET_MODE (to_rtx) == BLKmode)
3544	emit_block_move (to_rtx, value, expr_size (from),
3545			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3546      else
3547	{
3548#ifdef POINTERS_EXTEND_UNSIGNED
3549	  if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3550	     || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3551	    value = convert_memory_address (GET_MODE (to_rtx), value);
3552#endif
3553	  emit_move_insn (to_rtx, value);
3554	}
3555      preserve_temp_slots (to_rtx);
3556      free_temp_slots ();
3557      pop_temp_slots ();
3558      return want_value ? to_rtx : NULL_RTX;
3559    }
3560
3561  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3562     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3563
3564  if (to_rtx == 0)
3565    {
3566      to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3567      if (GET_CODE (to_rtx) == MEM)
3568	MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3569    }
3570
3571  /* Don't move directly into a return register.  */
3572  if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3573    {
3574      rtx temp;
3575
3576      push_temp_slots ();
3577      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3578      emit_move_insn (to_rtx, temp);
3579      preserve_temp_slots (to_rtx);
3580      free_temp_slots ();
3581      pop_temp_slots ();
3582      return want_value ? to_rtx : NULL_RTX;
3583    }
3584
3585  /* In case we are returning the contents of an object which overlaps
3586     the place the value is being stored, use a safe function when copying
3587     a value through a pointer into a structure value return block.  */
3588  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3589      && current_function_returns_struct
3590      && !current_function_returns_pcc_struct)
3591    {
3592      rtx from_rtx, size;
3593
3594      push_temp_slots ();
3595      size = expr_size (from);
3596      from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3597			      EXPAND_MEMORY_USE_DONT);
3598
3599      /* Copy the rights of the bitmap.  */
3600      if (current_function_check_memory_usage)
3601	emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3602			   XEXP (to_rtx, 0), Pmode,
3603			   XEXP (from_rtx, 0), Pmode,
3604			   convert_to_mode (TYPE_MODE (sizetype),
3605					    size, TREE_UNSIGNED (sizetype)),
3606			   TYPE_MODE (sizetype));
3607
3608#ifdef TARGET_MEM_FUNCTIONS
3609      emit_library_call (memcpy_libfunc, 0,
3610			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3611			 XEXP (from_rtx, 0), Pmode,
3612			 convert_to_mode (TYPE_MODE (sizetype),
3613					  size, TREE_UNSIGNED (sizetype)),
3614			 TYPE_MODE (sizetype));
3615#else
3616      emit_library_call (bcopy_libfunc, 0,
3617			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3618			 XEXP (to_rtx, 0), Pmode,
3619			 convert_to_mode (TYPE_MODE (integer_type_node),
3620					  size, TREE_UNSIGNED (integer_type_node)),
3621			 TYPE_MODE (integer_type_node));
3622#endif
3623
3624      preserve_temp_slots (to_rtx);
3625      free_temp_slots ();
3626      pop_temp_slots ();
3627      return want_value ? to_rtx : NULL_RTX;
3628    }
3629
3630  /* Compute FROM and store the value in the rtx we got.  */
3631
3632  push_temp_slots ();
3633  result = store_expr (from, to_rtx, want_value);
3634  preserve_temp_slots (result);
3635  free_temp_slots ();
3636  pop_temp_slots ();
3637  return want_value ? result : NULL_RTX;
3638}
3639
3640/* Generate code for computing expression EXP,
3641   and storing the value into TARGET.
3642   TARGET may contain a QUEUED rtx.
3643
3644   If WANT_VALUE is nonzero, return a copy of the value
3645   not in TARGET, so that we can be sure to use the proper
3646   value in a containing expression even if TARGET has something
3647   else stored in it.  If possible, we copy the value through a pseudo
3648   and return that pseudo.  Or, if the value is constant, we try to
3649   return the constant.  In some cases, we return a pseudo
3650   copied *from* TARGET.
3651
3652   If the mode is BLKmode then we may return TARGET itself.
3653   It turns out that in BLKmode it doesn't cause a problem.
3654   because C has no operators that could combine two different
3655   assignments into the same BLKmode object with different values
3656   with no sequence point.  Will other languages need this to
3657   be more thorough?
3658
3659   If WANT_VALUE is 0, we return NULL, to make sure
3660   to catch quickly any cases where the caller uses the value
3661   and fails to set WANT_VALUE.  */
3662
3663rtx
3664store_expr (exp, target, want_value)
3665     register tree exp;
3666     register rtx target;
3667     int want_value;
3668{
3669  register rtx temp;
3670  int dont_return_target = 0;
3671
3672  if (TREE_CODE (exp) == COMPOUND_EXPR)
3673    {
3674      /* Perform first part of compound expression, then assign from second
3675	 part.  */
3676      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3677      emit_queue ();
3678      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3679    }
3680  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3681    {
3682      /* For conditional expression, get safe form of the target.  Then
3683	 test the condition, doing the appropriate assignment on either
3684	 side.  This avoids the creation of unnecessary temporaries.
3685	 For non-BLKmode, it is more efficient not to do this.  */
3686
3687      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3688
3689      emit_queue ();
3690      target = protect_from_queue (target, 1);
3691
3692      do_pending_stack_adjust ();
3693      NO_DEFER_POP;
3694      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3695      start_cleanup_deferral ();
3696      store_expr (TREE_OPERAND (exp, 1), target, 0);
3697      end_cleanup_deferral ();
3698      emit_queue ();
3699      emit_jump_insn (gen_jump (lab2));
3700      emit_barrier ();
3701      emit_label (lab1);
3702      start_cleanup_deferral ();
3703      store_expr (TREE_OPERAND (exp, 2), target, 0);
3704      end_cleanup_deferral ();
3705      emit_queue ();
3706      emit_label (lab2);
3707      OK_DEFER_POP;
3708
3709      return want_value ? target : NULL_RTX;
3710    }
3711  else if (queued_subexp_p (target))
3712    /* If target contains a postincrement, let's not risk
3713       using it as the place to generate the rhs.  */
3714    {
3715      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3716	{
3717	  /* Expand EXP into a new pseudo.  */
3718	  temp = gen_reg_rtx (GET_MODE (target));
3719	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3720	}
3721      else
3722	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3723
3724      /* If target is volatile, ANSI requires accessing the value
3725	 *from* the target, if it is accessed.  So make that happen.
3726	 In no case return the target itself.  */
3727      if (! MEM_VOLATILE_P (target) && want_value)
3728	dont_return_target = 1;
3729    }
3730  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3731	   && GET_MODE (target) != BLKmode)
3732    /* If target is in memory and caller wants value in a register instead,
3733       arrange that.  Pass TARGET as target for expand_expr so that,
3734       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3735       We know expand_expr will not use the target in that case.
3736       Don't do this if TARGET is volatile because we are supposed
3737       to write it and then read it.  */
3738    {
3739      temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3740			  GET_MODE (target), 0);
3741      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3742	temp = copy_to_reg (temp);
3743      dont_return_target = 1;
3744    }
3745  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3746    /* If this is an scalar in a register that is stored in a wider mode
3747       than the declared mode, compute the result into its declared mode
3748       and then convert to the wider mode.  Our value is the computed
3749       expression.  */
3750    {
3751      /* If we don't want a value, we can do the conversion inside EXP,
3752	 which will often result in some optimizations.  Do the conversion
3753	 in two steps: first change the signedness, if needed, then
3754	 the extend.  But don't do this if the type of EXP is a subtype
3755	 of something else since then the conversion might involve
3756	 more than just converting modes.  */
3757      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3758	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
3759	{
3760	  if (TREE_UNSIGNED (TREE_TYPE (exp))
3761	      != SUBREG_PROMOTED_UNSIGNED_P (target))
3762	    exp
3763	      = convert
3764		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3765					  TREE_TYPE (exp)),
3766		 exp);
3767
3768	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3769					SUBREG_PROMOTED_UNSIGNED_P (target)),
3770			 exp);
3771	}
3772
3773      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3774
3775      /* If TEMP is a volatile MEM and we want a result value, make
3776	 the access now so it gets done only once.  Likewise if
3777	 it contains TARGET.  */
3778      if (GET_CODE (temp) == MEM && want_value
3779	  && (MEM_VOLATILE_P (temp)
3780	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3781	temp = copy_to_reg (temp);
3782
3783      /* If TEMP is a VOIDmode constant, use convert_modes to make
3784	 sure that we properly convert it.  */
3785      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3786	temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3787			      TYPE_MODE (TREE_TYPE (exp)), temp,
3788			      SUBREG_PROMOTED_UNSIGNED_P (target));
3789
3790      convert_move (SUBREG_REG (target), temp,
3791		    SUBREG_PROMOTED_UNSIGNED_P (target));
3792      return want_value ? temp : NULL_RTX;
3793    }
3794  else
3795    {
3796      temp = expand_expr (exp, target, GET_MODE (target), 0);
3797      /* Return TARGET if it's a specified hardware register.
3798	 If TARGET is a volatile mem ref, either return TARGET
3799	 or return a reg copied *from* TARGET; ANSI requires this.
3800
3801	 Otherwise, if TEMP is not TARGET, return TEMP
3802	 if it is constant (for efficiency),
3803	 or if we really want the correct value.  */
3804      if (!(target && GET_CODE (target) == REG
3805	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
3806	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3807	  && ! rtx_equal_p (temp, target)
3808	  && (CONSTANT_P (temp) || want_value))
3809	dont_return_target = 1;
3810    }
3811
3812  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3813     the same as that of TARGET, adjust the constant.  This is needed, for
3814     example, in case it is a CONST_DOUBLE and we want only a word-sized
3815     value.  */
3816  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3817      && TREE_CODE (exp) != ERROR_MARK
3818      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3819    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3820			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3821
3822  if (current_function_check_memory_usage
3823      && GET_CODE (target) == MEM
3824      && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3825    {
3826      if (GET_CODE (temp) == MEM)
3827        emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3828			   XEXP (target, 0), Pmode,
3829			   XEXP (temp, 0), Pmode,
3830			   expr_size (exp), TYPE_MODE (sizetype));
3831      else
3832        emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3833			   XEXP (target, 0), Pmode,
3834			   expr_size (exp), TYPE_MODE (sizetype),
3835			   GEN_INT (MEMORY_USE_WO),
3836			   TYPE_MODE (integer_type_node));
3837    }
3838
3839  /* If value was not generated in the target, store it there.
3840     Convert the value to TARGET's type first if nec.  */
3841  /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3842     one or both of them are volatile memory refs, we have to distinguish
3843     two cases:
3844     - expand_expr has used TARGET.  In this case, we must not generate
3845       another copy.  This can be detected by TARGET being equal according
3846       to == .
3847     - expand_expr has not used TARGET - that means that the source just
3848       happens to have the same RTX form.  Since temp will have been created
3849       by expand_expr, it will compare unequal according to == .
3850       We must generate a copy in this case, to reach the correct number
3851       of volatile memory references.  */
3852
3853  if ((! rtx_equal_p (temp, target)
3854       || (temp != target && (side_effects_p (temp)
3855			      || side_effects_p (target))))
3856      && TREE_CODE (exp) != ERROR_MARK)
3857    {
3858      target = protect_from_queue (target, 1);
3859      if (GET_MODE (temp) != GET_MODE (target)
3860	  && GET_MODE (temp) != VOIDmode)
3861	{
3862	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3863	  if (dont_return_target)
3864	    {
3865	      /* In this case, we will return TEMP,
3866		 so make sure it has the proper mode.
3867		 But don't forget to store the value into TARGET.  */
3868	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3869	      emit_move_insn (target, temp);
3870	    }
3871	  else
3872	    convert_move (target, temp, unsignedp);
3873	}
3874
3875      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3876	{
3877	  /* Handle copying a string constant into an array.
3878	     The string constant may be shorter than the array.
3879	     So copy just the string's actual length, and clear the rest.  */
3880	  rtx size;
3881	  rtx addr;
3882
3883	  /* Get the size of the data type of the string,
3884	     which is actually the size of the target.  */
3885	  size = expr_size (exp);
3886	  if (GET_CODE (size) == CONST_INT
3887	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
3888	    emit_block_move (target, temp, size,
3889			     TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3890	  else
3891	    {
3892	      /* Compute the size of the data to copy from the string.  */
3893	      tree copy_size
3894		= size_binop (MIN_EXPR,
3895			      make_tree (sizetype, size),
3896			      convert (sizetype,
3897				       build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3898	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3899					       VOIDmode, 0);
3900	      rtx label = 0;
3901
3902	      /* Copy that much.  */
3903	      emit_block_move (target, temp, copy_size_rtx,
3904			       TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3905
3906	      /* Figure out how much is left in TARGET that we have to clear.
3907		 Do all calculations in ptr_mode.  */
3908
3909	      addr = XEXP (target, 0);
3910	      addr = convert_modes (ptr_mode, Pmode, addr, 1);
3911
3912	      if (GET_CODE (copy_size_rtx) == CONST_INT)
3913		{
3914		  addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3915		  size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3916		}
3917	      else
3918		{
3919		  addr = force_reg (ptr_mode, addr);
3920		  addr = expand_binop (ptr_mode, add_optab, addr,
3921				       copy_size_rtx, NULL_RTX, 0,
3922				       OPTAB_LIB_WIDEN);
3923
3924		  size = expand_binop (ptr_mode, sub_optab, size,
3925				       copy_size_rtx, NULL_RTX, 0,
3926				       OPTAB_LIB_WIDEN);
3927
3928		  label = gen_label_rtx ();
3929		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3930					   GET_MODE (size), 0, 0, label);
3931		}
3932
3933	      if (size != const0_rtx)
3934		{
3935		  /* Be sure we can write on ADDR.  */
3936		  if (current_function_check_memory_usage)
3937		    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3938				       addr, Pmode,
3939				       size, TYPE_MODE (sizetype),
3940 				       GEN_INT (MEMORY_USE_WO),
3941				       TYPE_MODE (integer_type_node));
3942#ifdef TARGET_MEM_FUNCTIONS
3943		  emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3944				     addr, ptr_mode,
3945				     const0_rtx, TYPE_MODE (integer_type_node),
3946				     convert_to_mode (TYPE_MODE (sizetype),
3947						      size,
3948						      TREE_UNSIGNED (sizetype)),
3949				     TYPE_MODE (sizetype));
3950#else
3951		  emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3952				     addr, ptr_mode,
3953				     convert_to_mode (TYPE_MODE (integer_type_node),
3954						      size,
3955						      TREE_UNSIGNED (integer_type_node)),
3956				     TYPE_MODE (integer_type_node));
3957#endif
3958		}
3959
3960	      if (label)
3961		emit_label (label);
3962	    }
3963	}
3964      /* Handle calls that return values in multiple non-contiguous locations.
3965	 The Irix 6 ABI has examples of this.  */
3966      else if (GET_CODE (target) == PARALLEL)
3967	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3968			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3969      else if (GET_MODE (temp) == BLKmode)
3970	emit_block_move (target, temp, expr_size (exp),
3971			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3972      else
3973	emit_move_insn (target, temp);
3974    }
3975
3976  /* If we don't want a value, return NULL_RTX.  */
3977  if (! want_value)
3978    return NULL_RTX;
3979
3980  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3981     ??? The latter test doesn't seem to make sense.  */
3982  else if (dont_return_target && GET_CODE (temp) != MEM)
3983    return temp;
3984
3985  /* Return TARGET itself if it is a hard register.  */
3986  else if (want_value && GET_MODE (target) != BLKmode
3987	   && ! (GET_CODE (target) == REG
3988		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3989    return copy_to_reg (target);
3990
3991  else
3992    return target;
3993}
3994
3995/* Return 1 if EXP just contains zeros.  */
3996
3997static int
3998is_zeros_p (exp)
3999     tree exp;
4000{
4001  tree elt;
4002
4003  switch (TREE_CODE (exp))
4004    {
4005    case CONVERT_EXPR:
4006    case NOP_EXPR:
4007    case NON_LVALUE_EXPR:
4008      return is_zeros_p (TREE_OPERAND (exp, 0));
4009
4010    case INTEGER_CST:
4011      return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
4012
4013    case COMPLEX_CST:
4014      return
4015	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4016
4017    case REAL_CST:
4018      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4019
4020    case CONSTRUCTOR:
4021      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4022	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4023      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4024	if (! is_zeros_p (TREE_VALUE (elt)))
4025	  return 0;
4026
4027      return 1;
4028
4029    default:
4030      return 0;
4031    }
4032}
4033
4034/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4035
4036static int
4037mostly_zeros_p (exp)
4038     tree exp;
4039{
4040  if (TREE_CODE (exp) == CONSTRUCTOR)
4041    {
4042      int elts = 0, zeros = 0;
4043      tree elt = CONSTRUCTOR_ELTS (exp);
4044      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4045	{
4046	  /* If there are no ranges of true bits, it is all zero.  */
4047	  return elt == NULL_TREE;
4048	}
4049      for (; elt; elt = TREE_CHAIN (elt))
4050	{
4051	  /* We do not handle the case where the index is a RANGE_EXPR,
4052	     so the statistic will be somewhat inaccurate.
4053	     We do make a more accurate count in store_constructor itself,
4054	     so since this function is only used for nested array elements,
4055	     this should be close enough.  */
4056	  if (mostly_zeros_p (TREE_VALUE (elt)))
4057	    zeros++;
4058	  elts++;
4059	}
4060
4061      return 4 * zeros >= 3 * elts;
4062    }
4063
4064  return is_zeros_p (exp);
4065}
4066
4067/* Helper function for store_constructor.
4068   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4069   TYPE is the type of the CONSTRUCTOR, not the element type.
4070   CLEARED is as for store_constructor.
4071
4072   This provides a recursive shortcut back to store_constructor when it isn't
4073   necessary to go through store_field.  This is so that we can pass through
4074   the cleared field to let store_constructor know that we may not have to
4075   clear a substructure if the outer structure has already been cleared.  */
4076
4077static void
4078store_constructor_field (target, bitsize, bitpos,
4079			 mode, exp, type, cleared)
4080     rtx target;
4081     int bitsize, bitpos;
4082     enum machine_mode mode;
4083     tree exp, type;
4084     int cleared;
4085{
4086  if (TREE_CODE (exp) == CONSTRUCTOR
4087      && bitpos % BITS_PER_UNIT == 0
4088      /* If we have a non-zero bitpos for a register target, then we just
4089	 let store_field do the bitfield handling.  This is unlikely to
4090	 generate unnecessary clear instructions anyways.  */
4091      && (bitpos == 0 || GET_CODE (target) == MEM))
4092    {
4093      if (bitpos != 0)
4094	target = change_address (target, VOIDmode,
4095				 plus_constant (XEXP (target, 0),
4096						bitpos / BITS_PER_UNIT));
4097      store_constructor (exp, target, cleared);
4098    }
4099  else
4100    store_field (target, bitsize, bitpos, mode, exp,
4101		 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
4102		 int_size_in_bytes (type), 0);
4103}
4104
4105/* Store the value of constructor EXP into the rtx TARGET.
4106   TARGET is either a REG or a MEM.
4107   CLEARED is true if TARGET is known to have been zero'd.  */
4108
4109static void
4110store_constructor (exp, target, cleared)
4111     tree exp;
4112     rtx target;
4113     int cleared;
4114{
4115  tree type = TREE_TYPE (exp);
4116  rtx exp_size = expr_size (exp);
4117
4118  /* We know our target cannot conflict, since safe_from_p has been called.  */
4119#if 0
4120  /* Don't try copying piece by piece into a hard register
4121     since that is vulnerable to being clobbered by EXP.
4122     Instead, construct in a pseudo register and then copy it all.  */
4123  if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4124    {
4125      rtx temp = gen_reg_rtx (GET_MODE (target));
4126      store_constructor (exp, temp, 0);
4127      emit_move_insn (target, temp);
4128      return;
4129    }
4130#endif
4131
4132  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4133      || TREE_CODE (type) == QUAL_UNION_TYPE)
4134    {
4135      register tree elt;
4136
4137      /* Inform later passes that the whole union value is dead.  */
4138      if (TREE_CODE (type) == UNION_TYPE
4139	  || TREE_CODE (type) == QUAL_UNION_TYPE)
4140	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4141
4142      /* If we are building a static constructor into a register,
4143	 set the initial value as zero so we can fold the value into
4144	 a constant.  But if more than one register is involved,
4145	 this probably loses.  */
4146      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4147	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4148	{
4149	  if (! cleared)
4150	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4151
4152	  cleared = 1;
4153	}
4154
4155      /* If the constructor has fewer fields than the structure
4156	 or if we are initializing the structure to mostly zeros,
4157	 clear the whole structure first.  */
4158      else if ((list_length (CONSTRUCTOR_ELTS (exp))
4159		!= list_length (TYPE_FIELDS (type)))
4160	       || mostly_zeros_p (exp))
4161	{
4162	  if (! cleared)
4163	    clear_storage (target, expr_size (exp),
4164			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4165
4166	  cleared = 1;
4167	}
4168      else
4169	/* Inform later passes that the old value is dead.  */
4170	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4171
4172      /* Store each element of the constructor into
4173	 the corresponding field of TARGET.  */
4174
4175      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4176	{
4177	  register tree field = TREE_PURPOSE (elt);
4178	  tree value = TREE_VALUE (elt);
4179	  register enum machine_mode mode;
4180	  int bitsize;
4181	  int bitpos = 0;
4182	  int unsignedp;
4183	  tree pos, constant = 0, offset = 0;
4184	  rtx to_rtx = target;
4185
4186	  /* Just ignore missing fields.
4187	     We cleared the whole structure, above,
4188	     if any fields are missing.  */
4189	  if (field == 0)
4190	    continue;
4191
4192	  if (cleared && is_zeros_p (TREE_VALUE (elt)))
4193	    continue;
4194
4195	  bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4196	  unsignedp = TREE_UNSIGNED (field);
4197	  mode = DECL_MODE (field);
4198	  if (DECL_BIT_FIELD (field))
4199	    mode = VOIDmode;
4200
4201	  pos = DECL_FIELD_BITPOS (field);
4202	  if (TREE_CODE (pos) == INTEGER_CST)
4203	    constant = pos;
4204	  else if (TREE_CODE (pos) == PLUS_EXPR
4205		   && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4206	    constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4207	  else
4208	    offset = pos;
4209
4210	  if (constant)
4211	    bitpos = TREE_INT_CST_LOW (constant);
4212
4213	  if (offset)
4214	    {
4215	      rtx offset_rtx;
4216
4217	      if (contains_placeholder_p (offset))
4218		offset = build (WITH_RECORD_EXPR, sizetype,
4219				offset, make_tree (TREE_TYPE (exp), target));
4220
4221	      offset = size_binop (FLOOR_DIV_EXPR, offset,
4222				   size_int (BITS_PER_UNIT));
4223
4224	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4225	      if (GET_CODE (to_rtx) != MEM)
4226		abort ();
4227
4228              if (GET_MODE (offset_rtx) != ptr_mode)
4229                {
4230#ifdef POINTERS_EXTEND_UNSIGNED
4231                  offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4232#else
4233                  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4234#endif
4235                }
4236
4237	      to_rtx
4238		= change_address (to_rtx, VOIDmode,
4239				  gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4240					   force_reg (ptr_mode, offset_rtx)));
4241	    }
4242	  if (TREE_READONLY (field))
4243	    {
4244	      if (GET_CODE (to_rtx) == MEM)
4245		to_rtx = copy_rtx (to_rtx);
4246
4247	      RTX_UNCHANGING_P (to_rtx) = 1;
4248	    }
4249
4250#ifdef WORD_REGISTER_OPERATIONS
4251	  /* If this initializes a field that is smaller than a word, at the
4252	     start of a word, try to widen it to a full word.
4253	     This special case allows us to output C++ member function
4254	     initializations in a form that the optimizers can understand.  */
4255	  if (constant
4256	      && GET_CODE (target) == REG
4257	      && bitsize < BITS_PER_WORD
4258	      && bitpos % BITS_PER_WORD == 0
4259	      && GET_MODE_CLASS (mode) == MODE_INT
4260	      && TREE_CODE (value) == INTEGER_CST
4261	      && GET_CODE (exp_size) == CONST_INT
4262	      && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4263	    {
4264	      tree type = TREE_TYPE (value);
4265	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4266		{
4267		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4268		  value = convert (type, value);
4269		}
4270	      if (BYTES_BIG_ENDIAN)
4271		value
4272		  = fold (build (LSHIFT_EXPR, type, value,
4273				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4274	      bitsize = BITS_PER_WORD;
4275	      mode = word_mode;
4276	    }
4277#endif
4278	  store_constructor_field (to_rtx, bitsize, bitpos,
4279				   mode, value, type, cleared);
4280	}
4281    }
4282  else if (TREE_CODE (type) == ARRAY_TYPE)
4283    {
4284      register tree elt;
4285      register int i;
4286      int need_to_clear;
4287      tree domain = TYPE_DOMAIN (type);
4288      HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4289      HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4290      tree elttype = TREE_TYPE (type);
4291
4292      /* If the constructor has fewer elements than the array,
4293         clear the whole array first.  Similarly if this is
4294         static constructor of a non-BLKmode object.  */
4295      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4296	need_to_clear = 1;
4297      else
4298	{
4299	  HOST_WIDE_INT count = 0, zero_count = 0;
4300	  need_to_clear = 0;
4301	  /* This loop is a more accurate version of the loop in
4302	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4303	     It is also needed to check for missing elements.  */
4304	  for (elt = CONSTRUCTOR_ELTS (exp);
4305	       elt != NULL_TREE;
4306	       elt = TREE_CHAIN (elt))
4307	    {
4308	      tree index = TREE_PURPOSE (elt);
4309	      HOST_WIDE_INT this_node_count;
4310	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4311		{
4312		  tree lo_index = TREE_OPERAND (index, 0);
4313		  tree hi_index = TREE_OPERAND (index, 1);
4314		  if (TREE_CODE (lo_index) != INTEGER_CST
4315		      || TREE_CODE (hi_index) != INTEGER_CST)
4316		    {
4317		      need_to_clear = 1;
4318		      break;
4319		    }
4320		  this_node_count = TREE_INT_CST_LOW (hi_index)
4321		    - TREE_INT_CST_LOW (lo_index) + 1;
4322		}
4323	      else
4324		this_node_count = 1;
4325	      count += this_node_count;
4326	      if (mostly_zeros_p (TREE_VALUE (elt)))
4327		zero_count += this_node_count;
4328	    }
4329	  /* Clear the entire array first if there are any missing elements,
4330	     or if the incidence of zero elements is >= 75%.  */
4331	  if (count < maxelt - minelt + 1
4332	      || 4 * zero_count >= 3 * count)
4333	    need_to_clear = 1;
4334	}
4335      if (need_to_clear)
4336	{
4337	  if (! cleared)
4338	    clear_storage (target, expr_size (exp),
4339			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4340	  cleared = 1;
4341	}
4342      else
4343	/* Inform later passes that the old value is dead.  */
4344	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4345
4346      /* Store each element of the constructor into
4347	 the corresponding element of TARGET, determined
4348	 by counting the elements.  */
4349      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4350	   elt;
4351	   elt = TREE_CHAIN (elt), i++)
4352	{
4353	  register enum machine_mode mode;
4354	  int bitsize;
4355	  int bitpos;
4356	  int unsignedp;
4357	  tree value = TREE_VALUE (elt);
4358	  tree index = TREE_PURPOSE (elt);
4359	  rtx xtarget = target;
4360
4361	  if (cleared && is_zeros_p (value))
4362	    continue;
4363
4364	  mode = TYPE_MODE (elttype);
4365	  bitsize = GET_MODE_BITSIZE (mode);
4366	  unsignedp = TREE_UNSIGNED (elttype);
4367
4368	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4369	    {
4370	      tree lo_index = TREE_OPERAND (index, 0);
4371	      tree hi_index = TREE_OPERAND (index, 1);
4372	      rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4373	      struct nesting *loop;
4374	      HOST_WIDE_INT lo, hi, count;
4375	      tree position;
4376
4377	      /* If the range is constant and "small", unroll the loop.  */
4378	      if (TREE_CODE (lo_index) == INTEGER_CST
4379		  && TREE_CODE (hi_index) == INTEGER_CST
4380		  && (lo = TREE_INT_CST_LOW (lo_index),
4381		      hi = TREE_INT_CST_LOW (hi_index),
4382		      count = hi - lo + 1,
4383		      (GET_CODE (target) != MEM
4384		       || count <= 2
4385		       || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4386			   && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4387			   <= 40 * 8))))
4388		{
4389		  lo -= minelt;  hi -= minelt;
4390		  for (; lo <= hi; lo++)
4391		    {
4392		      bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4393		      store_constructor_field (target, bitsize, bitpos,
4394					       mode, value, type, cleared);
4395		    }
4396		}
4397	      else
4398		{
4399		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4400		  loop_top = gen_label_rtx ();
4401		  loop_end = gen_label_rtx ();
4402
4403		  unsignedp = TREE_UNSIGNED (domain);
4404
4405		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4406
4407		  DECL_RTL (index) = index_r
4408		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4409						 &unsignedp, 0));
4410
4411		  if (TREE_CODE (value) == SAVE_EXPR
4412		      && SAVE_EXPR_RTL (value) == 0)
4413		    {
4414		      /* Make sure value gets expanded once before the
4415                         loop.  */
4416		      expand_expr (value, const0_rtx, VOIDmode, 0);
4417		      emit_queue ();
4418		    }
4419		  store_expr (lo_index, index_r, 0);
4420		  loop = expand_start_loop (0);
4421
4422		  /* Assign value to element index.  */
4423		  position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4424					 size_int (BITS_PER_UNIT));
4425		  position = size_binop (MULT_EXPR,
4426					 size_binop (MINUS_EXPR, index,
4427						     TYPE_MIN_VALUE (domain)),
4428					 position);
4429		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4430		  addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4431		  xtarget = change_address (target, mode, addr);
4432		  if (TREE_CODE (value) == CONSTRUCTOR)
4433		    store_constructor (value, xtarget, cleared);
4434		  else
4435		    store_expr (value, xtarget, 0);
4436
4437		  expand_exit_loop_if_false (loop,
4438					     build (LT_EXPR, integer_type_node,
4439						    index, hi_index));
4440
4441		  expand_increment (build (PREINCREMENT_EXPR,
4442					   TREE_TYPE (index),
4443					   index, integer_one_node), 0, 0);
4444		  expand_end_loop ();
4445		  emit_label (loop_end);
4446
4447		  /* Needed by stupid register allocation. to extend the
4448		     lifetime of pseudo-regs used by target past the end
4449		     of the loop.  */
4450		  emit_insn (gen_rtx_USE (GET_MODE (target), target));
4451		}
4452	    }
4453	  else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4454	      || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4455	    {
4456	      rtx pos_rtx, addr;
4457	      tree position;
4458
4459	      if (index == 0)
4460		index = size_int (i);
4461
4462	      if (minelt)
4463		index = size_binop (MINUS_EXPR, index,
4464				    TYPE_MIN_VALUE (domain));
4465	      position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4466				     size_int (BITS_PER_UNIT));
4467	      position = size_binop (MULT_EXPR, index, position);
4468	      pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4469	      addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4470	      xtarget = change_address (target, mode, addr);
4471	      store_expr (value, xtarget, 0);
4472	    }
4473	  else
4474	    {
4475	      if (index != 0)
4476		bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4477			  * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4478	      else
4479		bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4480	      store_constructor_field (target, bitsize, bitpos,
4481				       mode, value, type, cleared);
4482	    }
4483	}
4484    }
4485  /* set constructor assignments */
4486  else if (TREE_CODE (type) == SET_TYPE)
4487    {
4488      tree elt = CONSTRUCTOR_ELTS (exp);
4489      int nbytes = int_size_in_bytes (type), nbits;
4490      tree domain = TYPE_DOMAIN (type);
4491      tree domain_min, domain_max, bitlength;
4492
4493      /* The default implementation strategy is to extract the constant
4494	 parts of the constructor, use that to initialize the target,
4495	 and then "or" in whatever non-constant ranges we need in addition.
4496
4497	 If a large set is all zero or all ones, it is
4498	 probably better to set it using memset (if available) or bzero.
4499	 Also, if a large set has just a single range, it may also be
4500	 better to first clear all the first clear the set (using
4501	 bzero/memset), and set the bits we want.  */
4502
4503      /* Check for all zeros.  */
4504      if (elt == NULL_TREE)
4505	{
4506	  if (!cleared)
4507	    clear_storage (target, expr_size (exp),
4508			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4509	  return;
4510	}
4511
4512      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4513      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4514      bitlength = size_binop (PLUS_EXPR,
4515			      size_binop (MINUS_EXPR, domain_max, domain_min),
4516			      size_one_node);
4517
4518      if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4519	abort ();
4520      nbits = TREE_INT_CST_LOW (bitlength);
4521
4522      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4523	 are "complicated" (more than one range), initialize (the
4524	 constant parts) by copying from a constant.  */
4525      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4526	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4527	{
4528	  int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4529	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4530	  char *bit_buffer = (char *) alloca (nbits);
4531	  HOST_WIDE_INT word = 0;
4532	  int bit_pos = 0;
4533	  int ibit = 0;
4534	  int offset = 0;  /* In bytes from beginning of set.  */
4535	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4536	  for (;;)
4537	    {
4538	      if (bit_buffer[ibit])
4539		{
4540		  if (BYTES_BIG_ENDIAN)
4541		    word |= (1 << (set_word_size - 1 - bit_pos));
4542		  else
4543		    word |= 1 << bit_pos;
4544		}
4545	      bit_pos++;  ibit++;
4546	      if (bit_pos >= set_word_size || ibit == nbits)
4547		{
4548		  if (word != 0 || ! cleared)
4549		    {
4550		      rtx datum = GEN_INT (word);
4551		      rtx to_rtx;
4552		      /* The assumption here is that it is safe to use
4553			 XEXP if the set is multi-word, but not if
4554			 it's single-word.  */
4555		      if (GET_CODE (target) == MEM)
4556			{
4557			  to_rtx = plus_constant (XEXP (target, 0), offset);
4558			  to_rtx = change_address (target, mode, to_rtx);
4559			}
4560		      else if (offset == 0)
4561			to_rtx = target;
4562		      else
4563			abort ();
4564		      emit_move_insn (to_rtx, datum);
4565		    }
4566		  if (ibit == nbits)
4567		    break;
4568		  word = 0;
4569		  bit_pos = 0;
4570		  offset += set_word_size / BITS_PER_UNIT;
4571		}
4572	    }
4573	}
4574      else if (!cleared)
4575	{
4576	  /* Don't bother clearing storage if the set is all ones.  */
4577	  if (TREE_CHAIN (elt) != NULL_TREE
4578	      || (TREE_PURPOSE (elt) == NULL_TREE
4579		  ? nbits != 1
4580		  : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4581		     || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4582		     || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4583			 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4584			 != nbits))))
4585	    clear_storage (target, expr_size (exp),
4586			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4587	}
4588
4589      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4590	{
4591	  /* start of range of element or NULL */
4592	  tree startbit = TREE_PURPOSE (elt);
4593	  /* end of range of element, or element value */
4594	  tree endbit   = TREE_VALUE (elt);
4595#ifdef TARGET_MEM_FUNCTIONS
4596	  HOST_WIDE_INT startb, endb;
4597#endif
4598	  rtx  bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4599
4600	  bitlength_rtx = expand_expr (bitlength,
4601			    NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4602
4603	  /* handle non-range tuple element like [ expr ]  */
4604	  if (startbit == NULL_TREE)
4605	    {
4606	      startbit = save_expr (endbit);
4607	      endbit = startbit;
4608	    }
4609	  startbit = convert (sizetype, startbit);
4610	  endbit = convert (sizetype, endbit);
4611	  if (! integer_zerop (domain_min))
4612	    {
4613	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4614	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4615	    }
4616	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4617				      EXPAND_CONST_ADDRESS);
4618	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4619				    EXPAND_CONST_ADDRESS);
4620
4621	  if (REG_P (target))
4622	    {
4623	      targetx = assign_stack_temp (GET_MODE (target),
4624					   GET_MODE_SIZE (GET_MODE (target)),
4625					   0);
4626	      emit_move_insn (targetx, target);
4627	    }
4628	  else if (GET_CODE (target) == MEM)
4629	    targetx = target;
4630	  else
4631	    abort ();
4632
4633#ifdef TARGET_MEM_FUNCTIONS
4634	  /* Optimization:  If startbit and endbit are
4635	     constants divisible by BITS_PER_UNIT,
4636	     call memset instead.  */
4637	  if (TREE_CODE (startbit) == INTEGER_CST
4638	      && TREE_CODE (endbit) == INTEGER_CST
4639	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4640	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4641	    {
4642	      emit_library_call (memset_libfunc, 0,
4643				 VOIDmode, 3,
4644				 plus_constant (XEXP (targetx, 0),
4645						startb / BITS_PER_UNIT),
4646				 Pmode,
4647				 constm1_rtx, TYPE_MODE (integer_type_node),
4648				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4649				 TYPE_MODE (sizetype));
4650	    }
4651	  else
4652#endif
4653	    {
4654	      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4655				 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4656				 bitlength_rtx, TYPE_MODE (sizetype),
4657				 startbit_rtx, TYPE_MODE (sizetype),
4658				 endbit_rtx, TYPE_MODE (sizetype));
4659	    }
4660	  if (REG_P (target))
4661	    emit_move_insn (target, targetx);
4662	}
4663    }
4664
4665  else
4666    abort ();
4667}
4668
4669/* Store the value of EXP (an expression tree)
4670   into a subfield of TARGET which has mode MODE and occupies
4671   BITSIZE bits, starting BITPOS bits from the start of TARGET.
4672   If MODE is VOIDmode, it means that we are storing into a bit-field.
4673
4674   If VALUE_MODE is VOIDmode, return nothing in particular.
4675   UNSIGNEDP is not used in this case.
4676
4677   Otherwise, return an rtx for the value stored.  This rtx
4678   has mode VALUE_MODE if that is convenient to do.
4679   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4680
4681   ALIGN is the alignment that TARGET is known to have, measured in bytes.
4682   TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4683
4684   ALIAS_SET is the alias set for the destination.  This value will
4685   (in general) be different from that for TARGET, since TARGET is a
4686   reference to the containing structure.  */
4687
4688static rtx
4689store_field (target, bitsize, bitpos, mode, exp, value_mode,
4690	     unsignedp, align, total_size, alias_set)
4691     rtx target;
4692     int bitsize, bitpos;
4693     enum machine_mode mode;
4694     tree exp;
4695     enum machine_mode value_mode;
4696     int unsignedp;
4697     int align;
4698     int total_size;
4699     int alias_set;
4700{
4701  HOST_WIDE_INT width_mask = 0;
4702
4703  if (TREE_CODE (exp) == ERROR_MARK)
4704    return const0_rtx;
4705
4706  if (bitsize < HOST_BITS_PER_WIDE_INT)
4707    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4708
4709  /* If we are storing into an unaligned field of an aligned union that is
4710     in a register, we may have the mode of TARGET being an integer mode but
4711     MODE == BLKmode.  In that case, get an aligned object whose size and
4712     alignment are the same as TARGET and store TARGET into it (we can avoid
4713     the store if the field being stored is the entire width of TARGET).  Then
4714     call ourselves recursively to store the field into a BLKmode version of
4715     that object.  Finally, load from the object into TARGET.  This is not
4716     very efficient in general, but should only be slightly more expensive
4717     than the otherwise-required unaligned accesses.  Perhaps this can be
4718     cleaned up later.  */
4719
4720  if (mode == BLKmode
4721      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4722    {
4723      rtx object = assign_stack_temp (GET_MODE (target),
4724				      GET_MODE_SIZE (GET_MODE (target)), 0);
4725      rtx blk_object = copy_rtx (object);
4726
4727      MEM_SET_IN_STRUCT_P (object, 1);
4728      MEM_SET_IN_STRUCT_P (blk_object, 1);
4729      PUT_MODE (blk_object, BLKmode);
4730
4731      if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4732	emit_move_insn (object, target);
4733
4734      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4735		   align, total_size, alias_set);
4736
4737      /* Even though we aren't returning target, we need to
4738	 give it the updated value.  */
4739      emit_move_insn (target, object);
4740
4741      return blk_object;
4742    }
4743
4744  /* If the structure is in a register or if the component
4745     is a bit field, we cannot use addressing to access it.
4746     Use bit-field techniques or SUBREG to store in it.  */
4747
4748  if (mode == VOIDmode
4749      || (mode != BLKmode && ! direct_store[(int) mode]
4750	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4751	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4752      || GET_CODE (target) == REG
4753      || GET_CODE (target) == SUBREG
4754      /* If the field isn't aligned enough to store as an ordinary memref,
4755	 store it as a bit field.  */
4756      || (SLOW_UNALIGNED_ACCESS
4757	  && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4758      || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4759    {
4760      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4761
4762      /* If BITSIZE is narrower than the size of the type of EXP
4763	 we will be narrowing TEMP.  Normally, what's wanted are the
4764	 low-order bits.  However, if EXP's type is a record and this is
4765	 big-endian machine, we want the upper BITSIZE bits.  */
4766      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4767	  && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4768	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4769	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4770			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4771				       - bitsize),
4772			     temp, 1);
4773
4774      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4775	 MODE.  */
4776      if (mode != VOIDmode && mode != BLKmode
4777	  && mode != TYPE_MODE (TREE_TYPE (exp)))
4778	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4779
4780      /* If the modes of TARGET and TEMP are both BLKmode, both
4781	 must be in memory and BITPOS must be aligned on a byte
4782	 boundary.  If so, we simply do a block copy.  */
4783      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4784	{
4785	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4786	      || bitpos % BITS_PER_UNIT != 0)
4787	    abort ();
4788
4789	  target = change_address (target, VOIDmode,
4790				   plus_constant (XEXP (target, 0),
4791						bitpos / BITS_PER_UNIT));
4792
4793	  emit_block_move (target, temp,
4794			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4795				    / BITS_PER_UNIT),
4796			   1);
4797
4798	  return value_mode == VOIDmode ? const0_rtx : target;
4799	}
4800
4801      /* Store the value in the bitfield.  */
4802      store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4803      if (value_mode != VOIDmode)
4804	{
4805	  /* The caller wants an rtx for the value.  */
4806	  /* If possible, avoid refetching from the bitfield itself.  */
4807	  if (width_mask != 0
4808	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4809	    {
4810	      tree count;
4811	      enum machine_mode tmode;
4812
4813	      if (unsignedp)
4814		return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4815	      tmode = GET_MODE (temp);
4816	      if (tmode == VOIDmode)
4817		tmode = value_mode;
4818	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4819	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4820	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4821	    }
4822	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
4823				    NULL_RTX, value_mode, 0, align,
4824				    total_size);
4825	}
4826      return const0_rtx;
4827    }
4828  else
4829    {
4830      rtx addr = XEXP (target, 0);
4831      rtx to_rtx;
4832
4833      /* If a value is wanted, it must be the lhs;
4834	 so make the address stable for multiple use.  */
4835
4836      if (value_mode != VOIDmode && GET_CODE (addr) != REG
4837	  && ! CONSTANT_ADDRESS_P (addr)
4838	  /* A frame-pointer reference is already stable.  */
4839	  && ! (GET_CODE (addr) == PLUS
4840		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
4841		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
4842		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4843	addr = copy_to_reg (addr);
4844
4845      /* Now build a reference to just the desired component.  */
4846
4847      to_rtx = copy_rtx (change_address (target, mode,
4848					 plus_constant (addr,
4849							(bitpos
4850							 / BITS_PER_UNIT))));
4851      MEM_SET_IN_STRUCT_P (to_rtx, 1);
4852      MEM_ALIAS_SET (to_rtx) = alias_set;
4853
4854      return store_expr (exp, to_rtx, value_mode != VOIDmode);
4855    }
4856}
4857
4858/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4859   or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4860   ARRAY_REFs and find the ultimate containing object, which we return.
4861
4862   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4863   bit position, and *PUNSIGNEDP to the signedness of the field.
4864   If the position of the field is variable, we store a tree
4865   giving the variable offset (in units) in *POFFSET.
4866   This offset is in addition to the bit position.
4867   If the position is not variable, we store 0 in *POFFSET.
4868   We set *PALIGNMENT to the alignment in bytes of the address that will be
4869   computed.  This is the alignment of the thing we return if *POFFSET
4870   is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4871
4872   If any of the extraction expressions is volatile,
4873   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
4874
4875   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
4876   is a mode that can be used to access the field.  In that case, *PBITSIZE
4877   is redundant.
4878
4879   If the field describes a variable-sized object, *PMODE is set to
4880   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
4881   this case, but the address of the object can be found.   */
4882
4883tree
4884get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4885		     punsignedp, pvolatilep, palignment)
4886     tree exp;
4887     int *pbitsize;
4888     int *pbitpos;
4889     tree *poffset;
4890     enum machine_mode *pmode;
4891     int *punsignedp;
4892     int *pvolatilep;
4893     int *palignment;
4894{
4895  tree orig_exp = exp;
4896  tree size_tree = 0;
4897  enum machine_mode mode = VOIDmode;
4898  tree offset = integer_zero_node;
4899  unsigned int alignment = BIGGEST_ALIGNMENT;
4900
4901  if (TREE_CODE (exp) == COMPONENT_REF)
4902    {
4903      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4904      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4905	mode = DECL_MODE (TREE_OPERAND (exp, 1));
4906      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4907    }
4908  else if (TREE_CODE (exp) == BIT_FIELD_REF)
4909    {
4910      size_tree = TREE_OPERAND (exp, 1);
4911      *punsignedp = TREE_UNSIGNED (exp);
4912    }
4913  else
4914    {
4915      mode = TYPE_MODE (TREE_TYPE (exp));
4916      if (mode == BLKmode)
4917	size_tree = TYPE_SIZE (TREE_TYPE (exp));
4918
4919      *pbitsize = GET_MODE_BITSIZE (mode);
4920      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4921    }
4922
4923  if (size_tree)
4924    {
4925      if (TREE_CODE (size_tree) != INTEGER_CST)
4926	mode = BLKmode, *pbitsize = -1;
4927      else
4928	*pbitsize = TREE_INT_CST_LOW (size_tree);
4929    }
4930
4931  /* Compute cumulative bit-offset for nested component-refs and array-refs,
4932     and find the ultimate containing object.  */
4933
4934  *pbitpos = 0;
4935
4936  while (1)
4937    {
4938      if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4939	{
4940	  tree pos = (TREE_CODE (exp) == COMPONENT_REF
4941		      ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4942		      : TREE_OPERAND (exp, 2));
4943	  tree constant = integer_zero_node, var = pos;
4944
4945	  /* If this field hasn't been filled in yet, don't go
4946	     past it.  This should only happen when folding expressions
4947	     made during type construction.  */
4948	  if (pos == 0)
4949	    break;
4950
4951	  /* Assume here that the offset is a multiple of a unit.
4952	     If not, there should be an explicitly added constant.  */
4953	  if (TREE_CODE (pos) == PLUS_EXPR
4954	      && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4955	    constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4956	  else if (TREE_CODE (pos) == INTEGER_CST)
4957	    constant = pos, var = integer_zero_node;
4958
4959	  *pbitpos += TREE_INT_CST_LOW (constant);
4960	  offset = size_binop (PLUS_EXPR, offset,
4961			       size_binop (EXACT_DIV_EXPR, var,
4962					   size_int (BITS_PER_UNIT)));
4963	}
4964
4965      else if (TREE_CODE (exp) == ARRAY_REF)
4966	{
4967	  /* This code is based on the code in case ARRAY_REF in expand_expr
4968	     below.  We assume here that the size of an array element is
4969	     always an integral multiple of BITS_PER_UNIT.  */
4970
4971	  tree index = TREE_OPERAND (exp, 1);
4972	  tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4973	  tree low_bound
4974	    = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4975	  tree index_type = TREE_TYPE (index);
4976	  tree xindex;
4977
4978	  if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4979	    {
4980	      index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4981			       index);
4982	      index_type = TREE_TYPE (index);
4983	    }
4984
4985	  /* Optimize the special-case of a zero lower bound.
4986
4987	     We convert the low_bound to sizetype to avoid some problems
4988	     with constant folding.  (E.g. suppose the lower bound is 1,
4989	     and its mode is QI.  Without the conversion,  (ARRAY
4990	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4991	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
4992
4993	     But sizetype isn't quite right either (especially if
4994	     the lowbound is negative).  FIXME */
4995
4996	  if (! integer_zerop (low_bound))
4997	    index = fold (build (MINUS_EXPR, index_type, index,
4998				 convert (sizetype, low_bound)));
4999
5000	  if (TREE_CODE (index) == INTEGER_CST)
5001	    {
5002	      index = convert (sbitsizetype, index);
5003	      index_type = TREE_TYPE (index);
5004	    }
5005
5006	  xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5007			        convert (sbitsizetype,
5008					 TYPE_SIZE (TREE_TYPE (exp)))));
5009
5010	  if (TREE_CODE (xindex) == INTEGER_CST
5011	      && TREE_INT_CST_HIGH (xindex) == 0)
5012	    *pbitpos += TREE_INT_CST_LOW (xindex);
5013	  else
5014	    {
5015	      /* Either the bit offset calculated above is not constant, or
5016		 it overflowed.  In either case, redo the multiplication
5017		 against the size in units.  This is especially important
5018		 in the non-constant case to avoid a division at runtime.  */
5019	      xindex = fold (build (MULT_EXPR, ssizetype, index,
5020                                    convert (ssizetype,
5021                                         TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5022
5023	      if (contains_placeholder_p (xindex))
5024		xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5025
5026	      offset = size_binop (PLUS_EXPR, offset, xindex);
5027	    }
5028	}
5029      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5030	       && ! ((TREE_CODE (exp) == NOP_EXPR
5031		      || TREE_CODE (exp) == CONVERT_EXPR)
5032		     && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
5033			   && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5034			       != UNION_TYPE))
5035		     && (TYPE_MODE (TREE_TYPE (exp))
5036			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5037	break;
5038
5039      /* If any reference in the chain is volatile, the effect is volatile.  */
5040      if (TREE_THIS_VOLATILE (exp))
5041	*pvolatilep = 1;
5042
5043      /* If the offset is non-constant already, then we can't assume any
5044	 alignment more than the alignment here.  */
5045      if (! integer_zerop (offset))
5046	alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5047
5048      exp = TREE_OPERAND (exp, 0);
5049    }
5050
5051  if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5052    alignment = MIN (alignment, DECL_ALIGN (exp));
5053  else if (TREE_TYPE (exp) != 0)
5054    alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5055
5056  if (integer_zerop (offset))
5057    offset = 0;
5058
5059  if (offset != 0 && contains_placeholder_p (offset))
5060    offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5061
5062  *pmode = mode;
5063  *poffset = offset;
5064  *palignment = alignment / BITS_PER_UNIT;
5065  return exp;
5066}
5067
5068/* Subroutine of expand_exp: compute memory_usage from modifier.  */
5069static enum memory_use_mode
5070get_memory_usage_from_modifier (modifier)
5071     enum expand_modifier modifier;
5072{
5073  switch (modifier)
5074    {
5075    case EXPAND_NORMAL:
5076    case EXPAND_SUM:
5077      return MEMORY_USE_RO;
5078      break;
5079    case EXPAND_MEMORY_USE_WO:
5080      return MEMORY_USE_WO;
5081      break;
5082    case EXPAND_MEMORY_USE_RW:
5083      return MEMORY_USE_RW;
5084      break;
5085    case EXPAND_MEMORY_USE_DONT:
5086      /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5087	 MEMORY_USE_DONT, because they are modifiers to a call of
5088	 expand_expr in the ADDR_EXPR case of expand_expr.  */
5089    case EXPAND_CONST_ADDRESS:
5090    case EXPAND_INITIALIZER:
5091      return MEMORY_USE_DONT;
5092    case EXPAND_MEMORY_USE_BAD:
5093    default:
5094      abort ();
5095    }
5096}
5097
5098/* Given an rtx VALUE that may contain additions and multiplications,
5099   return an equivalent value that just refers to a register or memory.
5100   This is done by generating instructions to perform the arithmetic
5101   and returning a pseudo-register containing the value.
5102
5103   The returned value may be a REG, SUBREG, MEM or constant.  */
5104
5105rtx
5106force_operand (value, target)
5107     rtx value, target;
5108{
5109  register optab binoptab = 0;
5110  /* Use a temporary to force order of execution of calls to
5111     `force_operand'.  */
5112  rtx tmp;
5113  register rtx op2;
5114  /* Use subtarget as the target for operand 0 of a binary operation.  */
5115  register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5116
5117  /* Check for a PIC address load.  */
5118  if (flag_pic
5119      && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5120      && XEXP (value, 0) == pic_offset_table_rtx
5121      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5122	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5123	  || GET_CODE (XEXP (value, 1)) == CONST))
5124    {
5125      if (!subtarget)
5126	subtarget = gen_reg_rtx (GET_MODE (value));
5127      emit_move_insn (subtarget, value);
5128      return subtarget;
5129    }
5130
5131  if (GET_CODE (value) == PLUS)
5132    binoptab = add_optab;
5133  else if (GET_CODE (value) == MINUS)
5134    binoptab = sub_optab;
5135  else if (GET_CODE (value) == MULT)
5136    {
5137      op2 = XEXP (value, 1);
5138      if (!CONSTANT_P (op2)
5139	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5140	subtarget = 0;
5141      tmp = force_operand (XEXP (value, 0), subtarget);
5142      return expand_mult (GET_MODE (value), tmp,
5143			  force_operand (op2, NULL_RTX),
5144			  target, 0);
5145    }
5146
5147  if (binoptab)
5148    {
5149      op2 = XEXP (value, 1);
5150      if (!CONSTANT_P (op2)
5151	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5152	subtarget = 0;
5153      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5154	{
5155	  binoptab = add_optab;
5156	  op2 = negate_rtx (GET_MODE (value), op2);
5157	}
5158
5159      /* Check for an addition with OP2 a constant integer and our first
5160	 operand a PLUS of a virtual register and something else.  In that
5161	 case, we want to emit the sum of the virtual register and the
5162	 constant first and then add the other value.  This allows virtual
5163	 register instantiation to simply modify the constant rather than
5164	 creating another one around this addition.  */
5165      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5166	  && GET_CODE (XEXP (value, 0)) == PLUS
5167	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5168	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5169	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5170	{
5171	  rtx temp = expand_binop (GET_MODE (value), binoptab,
5172				   XEXP (XEXP (value, 0), 0), op2,
5173				   subtarget, 0, OPTAB_LIB_WIDEN);
5174	  return expand_binop (GET_MODE (value), binoptab, temp,
5175			       force_operand (XEXP (XEXP (value, 0), 1), 0),
5176			       target, 0, OPTAB_LIB_WIDEN);
5177	}
5178
5179      tmp = force_operand (XEXP (value, 0), subtarget);
5180      return expand_binop (GET_MODE (value), binoptab, tmp,
5181			   force_operand (op2, NULL_RTX),
5182			   target, 0, OPTAB_LIB_WIDEN);
5183      /* We give UNSIGNEDP = 0 to expand_binop
5184	 because the only operations we are expanding here are signed ones.  */
5185    }
5186  return value;
5187}
5188
5189/* Subroutine of expand_expr:
5190   save the non-copied parts (LIST) of an expr (LHS), and return a list
5191   which can restore these values to their previous values,
5192   should something modify their storage.  */
5193
5194static tree
5195save_noncopied_parts (lhs, list)
5196     tree lhs;
5197     tree list;
5198{
5199  tree tail;
5200  tree parts = 0;
5201
5202  for (tail = list; tail; tail = TREE_CHAIN (tail))
5203    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5204      parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5205    else
5206      {
5207	tree part = TREE_VALUE (tail);
5208	tree part_type = TREE_TYPE (part);
5209	tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5210	rtx target = assign_temp (part_type, 0, 1, 1);
5211	if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5212	  target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5213	parts = tree_cons (to_be_saved,
5214			   build (RTL_EXPR, part_type, NULL_TREE,
5215				  (tree) target),
5216			   parts);
5217	store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5218      }
5219  return parts;
5220}
5221
5222/* Subroutine of expand_expr:
5223   record the non-copied parts (LIST) of an expr (LHS), and return a list
5224   which specifies the initial values of these parts.  */
5225
5226static tree
5227init_noncopied_parts (lhs, list)
5228     tree lhs;
5229     tree list;
5230{
5231  tree tail;
5232  tree parts = 0;
5233
5234  for (tail = list; tail; tail = TREE_CHAIN (tail))
5235    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5236      parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5237    else if (TREE_PURPOSE (tail))
5238      {
5239	tree part = TREE_VALUE (tail);
5240	tree part_type = TREE_TYPE (part);
5241	tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5242	parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5243      }
5244  return parts;
5245}
5246
5247/* Subroutine of expand_expr: return nonzero iff there is no way that
5248   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5249   call is going to be used to determine whether we need a temporary
5250   for EXP, as opposed to a recursive call to this function.
5251
5252   It is always safe for this routine to return zero since it merely
5253   searches for optimization opportunities.  */
5254
5255static int
5256safe_from_p (x, exp, top_p)
5257     rtx x;
5258     tree exp;
5259     int top_p;
5260{
5261  rtx exp_rtl = 0;
5262  int i, nops;
5263  static int save_expr_count;
5264  static int save_expr_size = 0;
5265  static tree *save_expr_rewritten;
5266  static tree save_expr_trees[256];
5267
5268  if (x == 0
5269      /* If EXP has varying size, we MUST use a target since we currently
5270	 have no way of allocating temporaries of variable size
5271	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5272	 So we assume here that something at a higher level has prevented a
5273	 clash.  This is somewhat bogus, but the best we can do.  Only
5274	 do this when X is BLKmode and when we are at the top level.  */
5275      || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5276	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5277	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5278	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5279	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5280	      != INTEGER_CST)
5281	  && GET_MODE (x) == BLKmode))
5282    return 1;
5283
5284  if (top_p && save_expr_size == 0)
5285    {
5286      int rtn;
5287
5288      save_expr_count = 0;
5289      save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5290      save_expr_rewritten = &save_expr_trees[0];
5291
5292      rtn = safe_from_p (x, exp, 1);
5293
5294      for (i = 0; i < save_expr_count; ++i)
5295	{
5296	  if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5297	    abort ();
5298	  TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5299	}
5300
5301      save_expr_size = 0;
5302
5303      return rtn;
5304    }
5305
5306  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5307     find the underlying pseudo.  */
5308  if (GET_CODE (x) == SUBREG)
5309    {
5310      x = SUBREG_REG (x);
5311      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5312	return 0;
5313    }
5314
5315  /* If X is a location in the outgoing argument area, it is always safe.  */
5316  if (GET_CODE (x) == MEM
5317      && (XEXP (x, 0) == virtual_outgoing_args_rtx
5318	  || (GET_CODE (XEXP (x, 0)) == PLUS
5319	      && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5320    return 1;
5321
5322  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5323    {
5324    case 'd':
5325      exp_rtl = DECL_RTL (exp);
5326      break;
5327
5328    case 'c':
5329      return 1;
5330
5331    case 'x':
5332      if (TREE_CODE (exp) == TREE_LIST)
5333	return ((TREE_VALUE (exp) == 0
5334		 || safe_from_p (x, TREE_VALUE (exp), 0))
5335		&& (TREE_CHAIN (exp) == 0
5336		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5337      else if (TREE_CODE (exp) == ERROR_MARK)
5338	return 1;	/* An already-visited SAVE_EXPR? */
5339      else
5340	return 0;
5341
5342    case '1':
5343      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5344
5345    case '2':
5346    case '<':
5347      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5348	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5349
5350    case 'e':
5351    case 'r':
5352      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5353	 the expression.  If it is set, we conflict iff we are that rtx or
5354	 both are in memory.  Otherwise, we check all operands of the
5355	 expression recursively.  */
5356
5357      switch (TREE_CODE (exp))
5358	{
5359	case ADDR_EXPR:
5360	  return (staticp (TREE_OPERAND (exp, 0))
5361		  || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5362		  || TREE_STATIC (exp));
5363
5364	case INDIRECT_REF:
5365	  if (GET_CODE (x) == MEM)
5366	    return 0;
5367	  break;
5368
5369	case CALL_EXPR:
5370	  exp_rtl = CALL_EXPR_RTL (exp);
5371	  if (exp_rtl == 0)
5372	    {
5373	      /* Assume that the call will clobber all hard registers and
5374		 all of memory.  */
5375	      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5376		  || GET_CODE (x) == MEM)
5377		return 0;
5378	    }
5379
5380	  break;
5381
5382	case RTL_EXPR:
5383	  /* If a sequence exists, we would have to scan every instruction
5384	     in the sequence to see if it was safe.  This is probably not
5385	     worthwhile.  */
5386	  if (RTL_EXPR_SEQUENCE (exp))
5387	    return 0;
5388
5389	  exp_rtl = RTL_EXPR_RTL (exp);
5390	  break;
5391
5392	case WITH_CLEANUP_EXPR:
5393	  exp_rtl = RTL_EXPR_RTL (exp);
5394	  break;
5395
5396	case CLEANUP_POINT_EXPR:
5397	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5398
5399	case SAVE_EXPR:
5400	  exp_rtl = SAVE_EXPR_RTL (exp);
5401	  if (exp_rtl)
5402	    break;
5403
5404	  /* This SAVE_EXPR might appear many times in the top-level
5405	     safe_from_p() expression, and if it has a complex
5406	     subexpression, examining it multiple times could result
5407	     in a combinatorial explosion.  E.g. on an Alpha
5408	     running at least 200MHz, a Fortran test case compiled with
5409	     optimization took about 28 minutes to compile -- even though
5410	     it was only a few lines long, and the complicated line causing
5411	     so much time to be spent in the earlier version of safe_from_p()
5412	     had only 293 or so unique nodes.
5413
5414	     So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5415	     where it is so we can turn it back in the top-level safe_from_p()
5416	     when we're done.  */
5417
5418	  /* For now, don't bother re-sizing the array. */
5419	  if (save_expr_count >= save_expr_size)
5420	    return 0;
5421	  save_expr_rewritten[save_expr_count++] = exp;
5422
5423	  nops = tree_code_length[(int) SAVE_EXPR];
5424	  for (i = 0; i < nops; i++)
5425	    {
5426	      tree operand = TREE_OPERAND (exp, i);
5427	      if (operand == NULL_TREE)
5428		continue;
5429	      TREE_SET_CODE (exp, ERROR_MARK);
5430	      if (!safe_from_p (x, operand, 0))
5431		return 0;
5432	      TREE_SET_CODE (exp, SAVE_EXPR);
5433	    }
5434	  TREE_SET_CODE (exp, ERROR_MARK);
5435	  return 1;
5436
5437	case BIND_EXPR:
5438	  /* The only operand we look at is operand 1.  The rest aren't
5439	     part of the expression.  */
5440	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5441
5442	case METHOD_CALL_EXPR:
5443	  /* This takes a rtx argument, but shouldn't appear here.  */
5444	  abort ();
5445
5446	default:
5447	  break;
5448	}
5449
5450      /* If we have an rtx, we do not need to scan our operands.  */
5451      if (exp_rtl)
5452	break;
5453
5454      nops = tree_code_length[(int) TREE_CODE (exp)];
5455      for (i = 0; i < nops; i++)
5456	if (TREE_OPERAND (exp, i) != 0
5457	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5458	  return 0;
5459    }
5460
5461  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5462     with it.  */
5463  if (exp_rtl)
5464    {
5465      if (GET_CODE (exp_rtl) == SUBREG)
5466	{
5467	  exp_rtl = SUBREG_REG (exp_rtl);
5468	  if (GET_CODE (exp_rtl) == REG
5469	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5470	    return 0;
5471	}
5472
5473      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5474	 are memory and EXP is not readonly.  */
5475      return ! (rtx_equal_p (x, exp_rtl)
5476		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5477		    && ! TREE_READONLY (exp)));
5478    }
5479
5480  /* If we reach here, it is safe.  */
5481  return 1;
5482}
5483
5484/* Subroutine of expand_expr: return nonzero iff EXP is an
5485   expression whose type is statically determinable.  */
5486
5487static int
5488fixed_type_p (exp)
5489     tree exp;
5490{
5491  if (TREE_CODE (exp) == PARM_DECL
5492      || TREE_CODE (exp) == VAR_DECL
5493      || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5494      || TREE_CODE (exp) == COMPONENT_REF
5495      || TREE_CODE (exp) == ARRAY_REF)
5496    return 1;
5497  return 0;
5498}
5499
5500/* Subroutine of expand_expr: return rtx if EXP is a
5501   variable or parameter; else return 0.  */
5502
5503static rtx
5504var_rtx (exp)
5505     tree exp;
5506{
5507  STRIP_NOPS (exp);
5508  switch (TREE_CODE (exp))
5509    {
5510    case PARM_DECL:
5511    case VAR_DECL:
5512      return DECL_RTL (exp);
5513    default:
5514      return 0;
5515    }
5516}
5517
5518#ifdef MAX_INTEGER_COMPUTATION_MODE
5519void
5520check_max_integer_computation_mode (exp)
5521    tree exp;
5522{
5523  enum tree_code code = TREE_CODE (exp);
5524  enum machine_mode mode;
5525
5526  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5527  if (code == NOP_EXPR
5528      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5529    return;
5530
5531  /* First check the type of the overall operation.   We need only look at
5532     unary, binary and relational operations.  */
5533  if (TREE_CODE_CLASS (code) == '1'
5534      || TREE_CODE_CLASS (code) == '2'
5535      || TREE_CODE_CLASS (code) == '<')
5536    {
5537      mode = TYPE_MODE (TREE_TYPE (exp));
5538      if (GET_MODE_CLASS (mode) == MODE_INT
5539	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5540	fatal ("unsupported wide integer operation");
5541    }
5542
5543  /* Check operand of a unary op.  */
5544  if (TREE_CODE_CLASS (code) == '1')
5545    {
5546      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5547      if (GET_MODE_CLASS (mode) == MODE_INT
5548	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5549	fatal ("unsupported wide integer operation");
5550    }
5551
5552  /* Check operands of a binary/comparison op.  */
5553  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5554    {
5555      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5556      if (GET_MODE_CLASS (mode) == MODE_INT
5557	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5558	fatal ("unsupported wide integer operation");
5559
5560      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5561      if (GET_MODE_CLASS (mode) == MODE_INT
5562	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5563	fatal ("unsupported wide integer operation");
5564    }
5565}
5566#endif
5567
5568
5569/* expand_expr: generate code for computing expression EXP.
5570   An rtx for the computed value is returned.  The value is never null.
5571   In the case of a void EXP, const0_rtx is returned.
5572
5573   The value may be stored in TARGET if TARGET is nonzero.
5574   TARGET is just a suggestion; callers must assume that
5575   the rtx returned may not be the same as TARGET.
5576
5577   If TARGET is CONST0_RTX, it means that the value will be ignored.
5578
5579   If TMODE is not VOIDmode, it suggests generating the
5580   result in mode TMODE.  But this is done only when convenient.
5581   Otherwise, TMODE is ignored and the value generated in its natural mode.
5582   TMODE is just a suggestion; callers must assume that
5583   the rtx returned may not have mode TMODE.
5584
5585   Note that TARGET may have neither TMODE nor MODE.  In that case, it
5586   probably will not be used.
5587
5588   If MODIFIER is EXPAND_SUM then when EXP is an addition
5589   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5590   or a nest of (PLUS ...) and (MINUS ...) where the terms are
5591   products as above, or REG or MEM, or constant.
5592   Ordinarily in such cases we would output mul or add instructions
5593   and then return a pseudo reg containing the sum.
5594
5595   EXPAND_INITIALIZER is much like EXPAND_SUM except that
5596   it also marks a label as absolutely required (it can't be dead).
5597   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5598   This is used for outputting expressions used in initializers.
5599
5600   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5601   with a constant address even if that address is not normally legitimate.
5602   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
5603
5604rtx
5605expand_expr (exp, target, tmode, modifier)
5606     register tree exp;
5607     rtx target;
5608     enum machine_mode tmode;
5609     enum expand_modifier modifier;
5610{
5611  /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5612     This is static so it will be accessible to our recursive callees.  */
5613  static tree placeholder_list = 0;
5614  register rtx op0, op1, temp;
5615  tree type = TREE_TYPE (exp);
5616  int unsignedp = TREE_UNSIGNED (type);
5617  register enum machine_mode mode;
5618  register enum tree_code code = TREE_CODE (exp);
5619  optab this_optab;
5620  rtx subtarget, original_target;
5621  int ignore;
5622  tree context;
5623  /* Used by check-memory-usage to make modifier read only.  */
5624  enum expand_modifier ro_modifier;
5625
5626  /* Handle ERROR_MARK before anybody tries to access its type. */
5627  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5628    {
5629      op0 = CONST0_RTX (tmode);
5630      if (op0 != 0)
5631	return op0;
5632      return const0_rtx;
5633    }
5634
5635  mode = TYPE_MODE (type);
5636  /* Use subtarget as the target for operand 0 of a binary operation.  */
5637  subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5638  original_target = target;
5639  ignore = (target == const0_rtx
5640	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5641		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5642		 || code == COND_EXPR)
5643		&& TREE_CODE (type) == VOID_TYPE));
5644
5645  /* Make a read-only version of the modifier.  */
5646  if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5647      || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5648    ro_modifier = modifier;
5649  else
5650    ro_modifier = EXPAND_NORMAL;
5651
5652  /* Don't use hard regs as subtargets, because the combiner
5653     can only handle pseudo regs.  */
5654  if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5655    subtarget = 0;
5656  /* Avoid subtargets inside loops,
5657     since they hide some invariant expressions.  */
5658  if (preserve_subexpressions_p ())
5659    subtarget = 0;
5660
5661  /* If we are going to ignore this result, we need only do something
5662     if there is a side-effect somewhere in the expression.  If there
5663     is, short-circuit the most common cases here.  Note that we must
5664     not call expand_expr with anything but const0_rtx in case this
5665     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
5666
5667  if (ignore)
5668    {
5669      if (! TREE_SIDE_EFFECTS (exp))
5670	return const0_rtx;
5671
5672      /* Ensure we reference a volatile object even if value is ignored.  */
5673      if (TREE_THIS_VOLATILE (exp)
5674	  && TREE_CODE (exp) != FUNCTION_DECL
5675	  && mode != VOIDmode && mode != BLKmode)
5676	{
5677	  temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5678	  if (GET_CODE (temp) == MEM)
5679	    temp = copy_to_reg (temp);
5680	  return const0_rtx;
5681	}
5682
5683      if (TREE_CODE_CLASS (code) == '1')
5684	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5685			    VOIDmode, ro_modifier);
5686      else if (TREE_CODE_CLASS (code) == '2'
5687	       || TREE_CODE_CLASS (code) == '<')
5688	{
5689	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5690	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5691	  return const0_rtx;
5692	}
5693      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5694	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5695	/* If the second operand has no side effects, just evaluate
5696	   the first.  */
5697	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5698			    VOIDmode, ro_modifier);
5699
5700      target = 0;
5701    }
5702
5703#ifdef MAX_INTEGER_COMPUTATION_MODE
5704  if (target
5705      && TREE_CODE (exp) != INTEGER_CST
5706      && TREE_CODE (exp) != PARM_DECL
5707      && TREE_CODE (exp) != ARRAY_REF
5708      && TREE_CODE (exp) != COMPONENT_REF
5709      && TREE_CODE (exp) != BIT_FIELD_REF
5710      && TREE_CODE (exp) != INDIRECT_REF
5711      && TREE_CODE (exp) != CALL_EXPR
5712      && TREE_CODE (exp) != VAR_DECL)
5713    {
5714      enum machine_mode mode = GET_MODE (target);
5715
5716      if (GET_MODE_CLASS (mode) == MODE_INT
5717	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5718	fatal ("unsupported wide integer operation");
5719    }
5720
5721  if (TREE_CODE (exp) != INTEGER_CST
5722      && TREE_CODE (exp) != PARM_DECL
5723      && TREE_CODE (exp) != ARRAY_REF
5724      && TREE_CODE (exp) != COMPONENT_REF
5725      && TREE_CODE (exp) != BIT_FIELD_REF
5726      && TREE_CODE (exp) != INDIRECT_REF
5727      && TREE_CODE (exp) != VAR_DECL
5728      && TREE_CODE (exp) != CALL_EXPR
5729      && GET_MODE_CLASS (tmode) == MODE_INT
5730      && tmode > MAX_INTEGER_COMPUTATION_MODE)
5731    fatal ("unsupported wide integer operation");
5732
5733  check_max_integer_computation_mode (exp);
5734#endif
5735
5736  /* If will do cse, generate all results into pseudo registers
5737     since 1) that allows cse to find more things
5738     and 2) otherwise cse could produce an insn the machine
5739     cannot support.  */
5740
5741  if (! cse_not_expected && mode != BLKmode && target
5742      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5743    target = subtarget;
5744
5745  switch (code)
5746    {
5747    case LABEL_DECL:
5748      {
5749	tree function = decl_function_context (exp);
5750	/* Handle using a label in a containing function.  */
5751	if (function != current_function_decl
5752	    && function != inline_function_decl && function != 0)
5753	  {
5754	    struct function *p = find_function_data (function);
5755	    /* Allocate in the memory associated with the function
5756	       that the label is in.  */
5757	    push_obstacks (p->function_obstack,
5758			   p->function_maybepermanent_obstack);
5759
5760	    p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5761						  label_rtx (exp),
5762						  p->forced_labels);
5763	    pop_obstacks ();
5764	  }
5765	else
5766	  {
5767	    if (modifier == EXPAND_INITIALIZER)
5768	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5769						 label_rtx (exp),
5770						 forced_labels);
5771	  }
5772	temp = gen_rtx_MEM (FUNCTION_MODE,
5773			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5774	if (function != current_function_decl
5775	    && function != inline_function_decl && function != 0)
5776	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5777	return temp;
5778      }
5779
5780    case PARM_DECL:
5781      if (DECL_RTL (exp) == 0)
5782	{
5783	  error_with_decl (exp, "prior parameter's size depends on `%s'");
5784	  return CONST0_RTX (mode);
5785	}
5786
5787      /* ... fall through ...  */
5788
5789    case VAR_DECL:
5790      /* If a static var's type was incomplete when the decl was written,
5791	 but the type is complete now, lay out the decl now.  */
5792      if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5793	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5794	{
5795	  push_obstacks_nochange ();
5796	  end_temporary_allocation ();
5797	  layout_decl (exp, 0);
5798	  PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5799	  pop_obstacks ();
5800	}
5801
5802      /* Although static-storage variables start off initialized, according to
5803	 ANSI C, a memcpy could overwrite them with uninitialized values.  So
5804	 we check them too.  This also lets us check for read-only variables
5805	 accessed via a non-const declaration, in case it won't be detected
5806	 any other way (e.g., in an embedded system or OS kernel without
5807	 memory protection).
5808
5809	 Aggregates are not checked here; they're handled elsewhere.  */
5810      if (current_function_check_memory_usage && code == VAR_DECL
5811	  && GET_CODE (DECL_RTL (exp)) == MEM
5812	  && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5813	{
5814	  enum memory_use_mode memory_usage;
5815	  memory_usage = get_memory_usage_from_modifier (modifier);
5816
5817	  if (memory_usage != MEMORY_USE_DONT)
5818	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5819			       XEXP (DECL_RTL (exp), 0), Pmode,
5820			       GEN_INT (int_size_in_bytes (type)),
5821			       TYPE_MODE (sizetype),
5822			       GEN_INT (memory_usage),
5823			       TYPE_MODE (integer_type_node));
5824	}
5825
5826      /* ... fall through ...  */
5827
5828    case FUNCTION_DECL:
5829    case RESULT_DECL:
5830      if (DECL_RTL (exp) == 0)
5831	abort ();
5832
5833      /* Ensure variable marked as used even if it doesn't go through
5834	 a parser.  If it hasn't be used yet, write out an external
5835	 definition.  */
5836      if (! TREE_USED (exp))
5837	{
5838	  assemble_external (exp);
5839	  TREE_USED (exp) = 1;
5840	}
5841
5842      /* Show we haven't gotten RTL for this yet.  */
5843      temp = 0;
5844
5845      /* Handle variables inherited from containing functions.  */
5846      context = decl_function_context (exp);
5847
5848      /* We treat inline_function_decl as an alias for the current function
5849	 because that is the inline function whose vars, types, etc.
5850	 are being merged into the current function.
5851	 See expand_inline_function.  */
5852
5853      if (context != 0 && context != current_function_decl
5854	  && context != inline_function_decl
5855	  /* If var is static, we don't need a static chain to access it.  */
5856	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
5857		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5858	{
5859	  rtx addr;
5860
5861	  /* Mark as non-local and addressable.  */
5862	  DECL_NONLOCAL (exp) = 1;
5863	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
5864	    abort ();
5865	  mark_addressable (exp);
5866	  if (GET_CODE (DECL_RTL (exp)) != MEM)
5867	    abort ();
5868	  addr = XEXP (DECL_RTL (exp), 0);
5869	  if (GET_CODE (addr) == MEM)
5870	    addr = gen_rtx_MEM (Pmode,
5871				fix_lexical_addr (XEXP (addr, 0), exp));
5872	  else
5873	    addr = fix_lexical_addr (addr, exp);
5874	  temp = change_address (DECL_RTL (exp), mode, addr);
5875	}
5876
5877      /* This is the case of an array whose size is to be determined
5878	 from its initializer, while the initializer is still being parsed.
5879	 See expand_decl.  */
5880
5881      else if (GET_CODE (DECL_RTL (exp)) == MEM
5882	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5883	temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5884			       XEXP (DECL_RTL (exp), 0));
5885
5886      /* If DECL_RTL is memory, we are in the normal case and either
5887	 the address is not valid or it is not a register and -fforce-addr
5888	 is specified, get the address into a register.  */
5889
5890      else if (GET_CODE (DECL_RTL (exp)) == MEM
5891	       && modifier != EXPAND_CONST_ADDRESS
5892	       && modifier != EXPAND_SUM
5893	       && modifier != EXPAND_INITIALIZER
5894	       && (! memory_address_p (DECL_MODE (exp),
5895				       XEXP (DECL_RTL (exp), 0))
5896		   || (flag_force_addr
5897		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5898	temp = change_address (DECL_RTL (exp), VOIDmode,
5899			       copy_rtx (XEXP (DECL_RTL (exp), 0)));
5900
5901      /* If we got something, return it.  But first, set the alignment
5902	 the address is a register.  */
5903      if (temp != 0)
5904	{
5905	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5906	    mark_reg_pointer (XEXP (temp, 0),
5907			      DECL_ALIGN (exp) / BITS_PER_UNIT);
5908
5909	  return temp;
5910	}
5911
5912      /* If the mode of DECL_RTL does not match that of the decl, it
5913	 must be a promoted value.  We return a SUBREG of the wanted mode,
5914	 but mark it so that we know that it was already extended.  */
5915
5916      if (GET_CODE (DECL_RTL (exp)) == REG
5917	  && GET_MODE (DECL_RTL (exp)) != mode)
5918	{
5919	  /* Get the signedness used for this variable.  Ensure we get the
5920	     same mode we got when the variable was declared.  */
5921	  if (GET_MODE (DECL_RTL (exp))
5922	      != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5923	    abort ();
5924
5925	  temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5926	  SUBREG_PROMOTED_VAR_P (temp) = 1;
5927	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5928	  return temp;
5929	}
5930
5931      return DECL_RTL (exp);
5932
5933    case INTEGER_CST:
5934      return immed_double_const (TREE_INT_CST_LOW (exp),
5935				 TREE_INT_CST_HIGH (exp),
5936				 mode);
5937
5938    case CONST_DECL:
5939      return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5940      			  EXPAND_MEMORY_USE_BAD);
5941
5942    case REAL_CST:
5943      /* If optimized, generate immediate CONST_DOUBLE
5944	 which will be turned into memory by reload if necessary.
5945
5946	 We used to force a register so that loop.c could see it.  But
5947	 this does not allow gen_* patterns to perform optimizations with
5948	 the constants.  It also produces two insns in cases like "x = 1.0;".
5949	 On most machines, floating-point constants are not permitted in
5950	 many insns, so we'd end up copying it to a register in any case.
5951
5952	 Now, we do the copying in expand_binop, if appropriate.  */
5953      return immed_real_const (exp);
5954
5955    case COMPLEX_CST:
5956    case STRING_CST:
5957      if (! TREE_CST_RTL (exp))
5958	output_constant_def (exp);
5959
5960      /* TREE_CST_RTL probably contains a constant address.
5961	 On RISC machines where a constant address isn't valid,
5962	 make some insns to get that address into a register.  */
5963      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5964	  && modifier != EXPAND_CONST_ADDRESS
5965	  && modifier != EXPAND_INITIALIZER
5966	  && modifier != EXPAND_SUM
5967	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5968	      || (flag_force_addr
5969		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5970	return change_address (TREE_CST_RTL (exp), VOIDmode,
5971			       copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5972      return TREE_CST_RTL (exp);
5973
5974    case EXPR_WITH_FILE_LOCATION:
5975      {
5976	rtx to_return;
5977	char *saved_input_filename = input_filename;
5978	int saved_lineno = lineno;
5979	input_filename = EXPR_WFL_FILENAME (exp);
5980	lineno = EXPR_WFL_LINENO (exp);
5981	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5982	  emit_line_note (input_filename, lineno);
5983	/* Possibly avoid switching back and force here */
5984	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5985	input_filename = saved_input_filename;
5986	lineno = saved_lineno;
5987	return to_return;
5988      }
5989
5990    case SAVE_EXPR:
5991      context = decl_function_context (exp);
5992
5993      /* If this SAVE_EXPR was at global context, assume we are an
5994	 initialization function and move it into our context.  */
5995      if (context == 0)
5996	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5997
5998      /* We treat inline_function_decl as an alias for the current function
5999	 because that is the inline function whose vars, types, etc.
6000	 are being merged into the current function.
6001	 See expand_inline_function.  */
6002      if (context == current_function_decl || context == inline_function_decl)
6003	context = 0;
6004
6005      /* If this is non-local, handle it.  */
6006      if (context)
6007	{
6008	  /* The following call just exists to abort if the context is
6009	     not of a containing function.  */
6010	  find_function_data (context);
6011
6012	  temp = SAVE_EXPR_RTL (exp);
6013	  if (temp && GET_CODE (temp) == REG)
6014	    {
6015	      put_var_into_stack (exp);
6016	      temp = SAVE_EXPR_RTL (exp);
6017	    }
6018	  if (temp == 0 || GET_CODE (temp) != MEM)
6019	    abort ();
6020	  return change_address (temp, mode,
6021				 fix_lexical_addr (XEXP (temp, 0), exp));
6022	}
6023      if (SAVE_EXPR_RTL (exp) == 0)
6024	{
6025	  if (mode == VOIDmode)
6026	    temp = const0_rtx;
6027	  else
6028	    temp = assign_temp (type, 3, 0, 0);
6029
6030	  SAVE_EXPR_RTL (exp) = temp;
6031	  if (!optimize && GET_CODE (temp) == REG)
6032	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6033						save_expr_regs);
6034
6035	  /* If the mode of TEMP does not match that of the expression, it
6036	     must be a promoted value.  We pass store_expr a SUBREG of the
6037	     wanted mode but mark it so that we know that it was already
6038	     extended.  Note that `unsignedp' was modified above in
6039	     this case.  */
6040
6041	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6042	    {
6043	      temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6044	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6045	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6046	    }
6047
6048	  if (temp == const0_rtx)
6049	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6050			 EXPAND_MEMORY_USE_BAD);
6051	  else
6052	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6053
6054	  TREE_USED (exp) = 1;
6055	}
6056
6057      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6058	 must be a promoted value.  We return a SUBREG of the wanted mode,
6059	 but mark it so that we know that it was already extended.  */
6060
6061      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6062	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6063	{
6064	  /* Compute the signedness and make the proper SUBREG.  */
6065	  promote_mode (type, mode, &unsignedp, 0);
6066	  temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6067	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6068	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6069	  return temp;
6070	}
6071
6072      return SAVE_EXPR_RTL (exp);
6073
6074    case UNSAVE_EXPR:
6075      {
6076	rtx temp;
6077	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6078	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6079	return temp;
6080      }
6081
6082    case PLACEHOLDER_EXPR:
6083      {
6084	tree placeholder_expr;
6085
6086	/* If there is an object on the head of the placeholder list,
6087	   see if some object in it of type TYPE or a pointer to it.  For
6088	   further information, see tree.def.  */
6089	for (placeholder_expr = placeholder_list;
6090	     placeholder_expr != 0;
6091	     placeholder_expr = TREE_CHAIN (placeholder_expr))
6092	  {
6093	    tree need_type = TYPE_MAIN_VARIANT (type);
6094	    tree object = 0;
6095	    tree old_list = placeholder_list;
6096	    tree elt;
6097
6098	    /* Find the outermost reference that is of the type we want.
6099	       If none, see if any object has a type that is a pointer to
6100	       the type we want.  */
6101	    for (elt = TREE_PURPOSE (placeholder_expr);
6102		 elt != 0 && object == 0;
6103		 elt
6104		 = ((TREE_CODE (elt) == COMPOUND_EXPR
6105		     || TREE_CODE (elt) == COND_EXPR)
6106		    ? TREE_OPERAND (elt, 1)
6107		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6108		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6109		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6110		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6111		    ? TREE_OPERAND (elt, 0) : 0))
6112	      if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6113		object = elt;
6114
6115	    for (elt = TREE_PURPOSE (placeholder_expr);
6116		 elt != 0 && object == 0;
6117		 elt
6118		 = ((TREE_CODE (elt) == COMPOUND_EXPR
6119		     || TREE_CODE (elt) == COND_EXPR)
6120		    ? TREE_OPERAND (elt, 1)
6121		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6122		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6123		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6124		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6125		    ? TREE_OPERAND (elt, 0) : 0))
6126	      if (POINTER_TYPE_P (TREE_TYPE (elt))
6127		  && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6128		      == need_type))
6129		object = build1 (INDIRECT_REF, need_type, elt);
6130
6131	    if (object != 0)
6132	      {
6133		/* Expand this object skipping the list entries before
6134		   it was found in case it is also a PLACEHOLDER_EXPR.
6135		   In that case, we want to translate it using subsequent
6136		   entries.  */
6137		placeholder_list = TREE_CHAIN (placeholder_expr);
6138		temp = expand_expr (object, original_target, tmode,
6139				    ro_modifier);
6140		placeholder_list = old_list;
6141		return temp;
6142	      }
6143	  }
6144      }
6145
6146      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6147      abort ();
6148
6149    case WITH_RECORD_EXPR:
6150      /* Put the object on the placeholder list, expand our first operand,
6151	 and pop the list.  */
6152      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6153				    placeholder_list);
6154      target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6155			    tmode, ro_modifier);
6156      placeholder_list = TREE_CHAIN (placeholder_list);
6157      return target;
6158
6159    case GOTO_EXPR:
6160      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6161	expand_goto (TREE_OPERAND (exp, 0));
6162      else
6163	expand_computed_goto (TREE_OPERAND (exp, 0));
6164      return const0_rtx;
6165
6166    case EXIT_EXPR:
6167      expand_exit_loop_if_false (NULL_PTR,
6168				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6169      return const0_rtx;
6170
6171    case LABELED_BLOCK_EXPR:
6172      if (LABELED_BLOCK_BODY (exp))
6173	expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6174      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6175      return const0_rtx;
6176
6177    case EXIT_BLOCK_EXPR:
6178      if (EXIT_BLOCK_RETURN (exp))
6179	sorry ("returned value in block_exit_expr");
6180      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6181      return const0_rtx;
6182
6183    case LOOP_EXPR:
6184      push_temp_slots ();
6185      expand_start_loop (1);
6186      expand_expr_stmt (TREE_OPERAND (exp, 0));
6187      expand_end_loop ();
6188      pop_temp_slots ();
6189
6190      return const0_rtx;
6191
6192    case BIND_EXPR:
6193      {
6194	tree vars = TREE_OPERAND (exp, 0);
6195	int vars_need_expansion = 0;
6196
6197	/* Need to open a binding contour here because
6198	   if there are any cleanups they must be contained here.  */
6199	expand_start_bindings (0);
6200
6201	/* Mark the corresponding BLOCK for output in its proper place.  */
6202	if (TREE_OPERAND (exp, 2) != 0
6203	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6204	  insert_block (TREE_OPERAND (exp, 2));
6205
6206	/* If VARS have not yet been expanded, expand them now.  */
6207	while (vars)
6208	  {
6209	    if (DECL_RTL (vars) == 0)
6210	      {
6211		vars_need_expansion = 1;
6212		expand_decl (vars);
6213	      }
6214	    expand_decl_init (vars);
6215	    vars = TREE_CHAIN (vars);
6216	  }
6217
6218	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6219
6220	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6221
6222	return temp;
6223      }
6224
6225    case RTL_EXPR:
6226      if (RTL_EXPR_SEQUENCE (exp))
6227	{
6228	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6229	    abort ();
6230	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6231	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6232	}
6233      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6234      free_temps_for_rtl_expr (exp);
6235      return RTL_EXPR_RTL (exp);
6236
6237    case CONSTRUCTOR:
6238      /* If we don't need the result, just ensure we evaluate any
6239	 subexpressions.  */
6240      if (ignore)
6241	{
6242	  tree elt;
6243	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6244	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6245	    		 EXPAND_MEMORY_USE_BAD);
6246	  return const0_rtx;
6247	}
6248
6249      /* All elts simple constants => refer to a constant in memory.  But
6250	 if this is a non-BLKmode mode, let it store a field at a time
6251	 since that should make a CONST_INT or CONST_DOUBLE when we
6252	 fold.  Likewise, if we have a target we can use, it is best to
6253	 store directly into the target unless the type is large enough
6254	 that memcpy will be used.  If we are making an initializer and
6255	 all operands are constant, put it in memory as well.  */
6256      else if ((TREE_STATIC (exp)
6257		&& ((mode == BLKmode
6258		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6259		    || TREE_ADDRESSABLE (exp)
6260		    || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6261			&& (!MOVE_BY_PIECES_P
6262                             (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6263			     TYPE_ALIGN (type) / BITS_PER_UNIT))
6264			&& ! mostly_zeros_p (exp))))
6265	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6266	{
6267	  rtx constructor = output_constant_def (exp);
6268	  if (modifier != EXPAND_CONST_ADDRESS
6269	      && modifier != EXPAND_INITIALIZER
6270	      && modifier != EXPAND_SUM
6271	      && (! memory_address_p (GET_MODE (constructor),
6272				      XEXP (constructor, 0))
6273		  || (flag_force_addr
6274		      && GET_CODE (XEXP (constructor, 0)) != REG)))
6275	    constructor = change_address (constructor, VOIDmode,
6276					  XEXP (constructor, 0));
6277	  return constructor;
6278	}
6279
6280      else
6281	{
6282	  /* Handle calls that pass values in multiple non-contiguous
6283	     locations.  The Irix 6 ABI has examples of this.  */
6284	  if (target == 0 || ! safe_from_p (target, exp, 1)
6285	      || GET_CODE (target) == PARALLEL)
6286	    {
6287	      if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6288		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6289	      else
6290		target = assign_temp (type, 0, 1, 1);
6291	    }
6292
6293	  if (TREE_READONLY (exp))
6294	    {
6295	      if (GET_CODE (target) == MEM)
6296		target = copy_rtx (target);
6297
6298	      RTX_UNCHANGING_P (target) = 1;
6299	    }
6300
6301	  store_constructor (exp, target, 0);
6302	  return target;
6303	}
6304
6305    case INDIRECT_REF:
6306      {
6307	tree exp1 = TREE_OPERAND (exp, 0);
6308	tree exp2;
6309	tree index;
6310 	tree string = string_constant (exp1, &index);
6311 	int i;
6312
6313	/* Try to optimize reads from const strings.  */
6314 	if (string
6315 	    && TREE_CODE (string) == STRING_CST
6316 	    && TREE_CODE (index) == INTEGER_CST
6317 	    && !TREE_INT_CST_HIGH (index)
6318 	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6319 	    && GET_MODE_CLASS (mode) == MODE_INT
6320 	    && GET_MODE_SIZE (mode) == 1
6321	    && modifier != EXPAND_MEMORY_USE_WO)
6322 	  return GEN_INT (TREE_STRING_POINTER (string)[i]);
6323
6324	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6325	op0 = memory_address (mode, op0);
6326
6327	if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6328	  {
6329	    enum memory_use_mode memory_usage;
6330	    memory_usage = get_memory_usage_from_modifier (modifier);
6331
6332            if (memory_usage != MEMORY_USE_DONT)
6333	      {
6334		in_check_memory_usage = 1;
6335		emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6336				   op0, Pmode,
6337				   GEN_INT (int_size_in_bytes (type)),
6338				   TYPE_MODE (sizetype),
6339				   GEN_INT (memory_usage),
6340				   TYPE_MODE (integer_type_node));
6341		in_check_memory_usage = 0;
6342	      }
6343	  }
6344
6345	temp = gen_rtx_MEM (mode, op0);
6346
6347	if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6348	    || (TREE_CODE (exp1) == ADDR_EXPR
6349		&& (exp2 = TREE_OPERAND (exp1, 0))
6350		&& AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6351	  MEM_SET_IN_STRUCT_P (temp, 1);
6352
6353	MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6354	MEM_ALIAS_SET (temp) = get_alias_set (exp);
6355
6356	/* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6357	   here, because, in C and C++, the fact that a location is accessed
6358	   through a pointer to const does not mean that the value there can
6359	   never change.  Languages where it can never change should
6360	   also set TREE_STATIC.  */
6361	RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6362	return temp;
6363      }
6364
6365    case ARRAY_REF:
6366      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6367	abort ();
6368
6369      {
6370	tree array = TREE_OPERAND (exp, 0);
6371	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6372	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6373	tree index = TREE_OPERAND (exp, 1);
6374	tree index_type = TREE_TYPE (index);
6375	HOST_WIDE_INT i;
6376
6377	/* Optimize the special-case of a zero lower bound.
6378
6379	   We convert the low_bound to sizetype to avoid some problems
6380	   with constant folding.  (E.g. suppose the lower bound is 1,
6381	   and its mode is QI.  Without the conversion,  (ARRAY
6382	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6383	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
6384
6385	   But sizetype isn't quite right either (especially if
6386	   the lowbound is negative).  FIXME */
6387
6388	if (! integer_zerop (low_bound))
6389	  index = fold (build (MINUS_EXPR, index_type, index,
6390			       convert (sizetype, low_bound)));
6391
6392	/* Fold an expression like: "foo"[2].
6393	   This is not done in fold so it won't happen inside &.
6394	   Don't fold if this is for wide characters since it's too
6395	   difficult to do correctly and this is a very rare case.  */
6396
6397	if (TREE_CODE (array) == STRING_CST
6398	    && TREE_CODE (index) == INTEGER_CST
6399	    && !TREE_INT_CST_HIGH (index)
6400	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6401	    && GET_MODE_CLASS (mode) == MODE_INT
6402	    && GET_MODE_SIZE (mode) == 1)
6403	  return GEN_INT (TREE_STRING_POINTER (array)[i]);
6404
6405	/* If this is a constant index into a constant array,
6406	   just get the value from the array.  Handle both the cases when
6407	   we have an explicit constructor and when our operand is a variable
6408	   that was declared const.  */
6409
6410	if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6411	  {
6412	    if (TREE_CODE (index) == INTEGER_CST
6413		&& TREE_INT_CST_HIGH (index) == 0)
6414	      {
6415		tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6416
6417		i = TREE_INT_CST_LOW (index);
6418		while (elem && i--)
6419		  elem = TREE_CHAIN (elem);
6420		if (elem)
6421		  return expand_expr (fold (TREE_VALUE (elem)), target,
6422				      tmode, ro_modifier);
6423	      }
6424	  }
6425
6426	else if (optimize >= 1
6427		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6428		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6429		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6430	  {
6431	    if (TREE_CODE (index) == INTEGER_CST)
6432	      {
6433		tree init = DECL_INITIAL (array);
6434
6435		i = TREE_INT_CST_LOW (index);
6436		if (TREE_CODE (init) == CONSTRUCTOR)
6437		  {
6438		    tree elem = CONSTRUCTOR_ELTS (init);
6439
6440		    while (elem
6441			   && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6442		      elem = TREE_CHAIN (elem);
6443		    if (elem)
6444		      return expand_expr (fold (TREE_VALUE (elem)), target,
6445					  tmode, ro_modifier);
6446		  }
6447		else if (TREE_CODE (init) == STRING_CST
6448			 && TREE_INT_CST_HIGH (index) == 0
6449			 && (TREE_INT_CST_LOW (index)
6450			     < TREE_STRING_LENGTH (init)))
6451                  {
6452                    tree type = TREE_TYPE (TREE_TYPE (init));
6453                    enum machine_mode mode = TYPE_MODE (type);
6454
6455                    if (GET_MODE_CLASS (mode) == MODE_INT
6456                        && GET_MODE_SIZE (mode) == 1)
6457                      return (GEN_INT
6458                              (TREE_STRING_POINTER
6459                               (init)[TREE_INT_CST_LOW (index)]));
6460                  }
6461	      }
6462	  }
6463      }
6464
6465      /* ... fall through ... */
6466
6467    case COMPONENT_REF:
6468    case BIT_FIELD_REF:
6469      /* If the operand is a CONSTRUCTOR, we can just extract the
6470	 appropriate field if it is present.  Don't do this if we have
6471	 already written the data since we want to refer to that copy
6472	 and varasm.c assumes that's what we'll do.  */
6473      if (code != ARRAY_REF
6474	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6475	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6476	{
6477	  tree elt;
6478
6479	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6480	       elt = TREE_CHAIN (elt))
6481	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6482		/* We can normally use the value of the field in the
6483		   CONSTRUCTOR.  However, if this is a bitfield in
6484		   an integral mode that we can fit in a HOST_WIDE_INT,
6485		   we must mask only the number of bits in the bitfield,
6486		   since this is done implicitly by the constructor.  If
6487		   the bitfield does not meet either of those conditions,
6488		   we can't do this optimization.  */
6489		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6490		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6491			 == MODE_INT)
6492			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6493			    <= HOST_BITS_PER_WIDE_INT))))
6494	      {
6495		op0 =  expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6496		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6497		  {
6498		    int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6499
6500		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6501		      {
6502			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6503			op0 = expand_and (op0, op1, target);
6504		      }
6505		    else
6506		      {
6507			enum machine_mode imode
6508			  = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6509			tree count
6510			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6511					 0);
6512
6513			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6514					    target, 0);
6515			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6516					    target, 0);
6517		      }
6518		  }
6519
6520		return op0;
6521	      }
6522	}
6523
6524      {
6525	enum machine_mode mode1;
6526	int bitsize;
6527	int bitpos;
6528	tree offset;
6529	int volatilep = 0;
6530	int alignment;
6531	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6532					&mode1, &unsignedp, &volatilep,
6533					&alignment);
6534
6535	/* If we got back the original object, something is wrong.  Perhaps
6536	   we are evaluating an expression too early.  In any event, don't
6537	   infinitely recurse.  */
6538	if (tem == exp)
6539	  abort ();
6540
6541	/* If TEM's type is a union of variable size, pass TARGET to the inner
6542	   computation, since it will need a temporary and TARGET is known
6543	   to have to do.  This occurs in unchecked conversion in Ada.  */
6544
6545	op0 = expand_expr (tem,
6546			   (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6547			    && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6548				!= INTEGER_CST)
6549			    ? target : NULL_RTX),
6550			   VOIDmode,
6551			   modifier == EXPAND_INITIALIZER
6552			   ? modifier : EXPAND_NORMAL);
6553
6554	/* If this is a constant, put it into a register if it is a
6555	   legitimate constant and memory if it isn't.  */
6556	if (CONSTANT_P (op0))
6557	  {
6558	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6559	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6560	      op0 = force_reg (mode, op0);
6561	    else
6562	      op0 = validize_mem (force_const_mem (mode, op0));
6563	  }
6564
6565	if (offset != 0)
6566	  {
6567	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6568
6569	    if (GET_CODE (op0) != MEM)
6570	      abort ();
6571
6572	    if (GET_MODE (offset_rtx) != ptr_mode)
6573	      {
6574#ifdef POINTERS_EXTEND_UNSIGNED
6575		offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6576#else
6577		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6578#endif
6579	      }
6580
6581	    /* A constant address in TO_RTX can have VOIDmode, we must not try
6582	       to call force_reg for that case.  Avoid that case.  */
6583	    if (GET_CODE (op0) == MEM
6584		&& GET_MODE (op0) == BLKmode
6585		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6586		&& bitsize
6587		&& (bitpos % bitsize) == 0
6588		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6589		&& (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6590	      {
6591		rtx temp = change_address (op0, mode1,
6592					   plus_constant (XEXP (op0, 0),
6593							  (bitpos /
6594							   BITS_PER_UNIT)));
6595		if (GET_CODE (XEXP (temp, 0)) == REG)
6596		  op0 = temp;
6597		else
6598		  op0 = change_address (op0, mode1,
6599					force_reg (GET_MODE (XEXP (temp, 0)),
6600						   XEXP (temp, 0)));
6601		bitpos = 0;
6602	      }
6603
6604
6605	    op0 = change_address (op0, VOIDmode,
6606				  gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6607						force_reg (ptr_mode, offset_rtx)));
6608	  }
6609
6610	/* Don't forget about volatility even if this is a bitfield.  */
6611	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6612	  {
6613	    op0 = copy_rtx (op0);
6614	    MEM_VOLATILE_P (op0) = 1;
6615	  }
6616
6617	/* Check the access.  */
6618	if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6619          {
6620	    enum memory_use_mode memory_usage;
6621	    memory_usage = get_memory_usage_from_modifier (modifier);
6622
6623	    if (memory_usage != MEMORY_USE_DONT)
6624	      {
6625		rtx to;
6626		int size;
6627
6628		to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6629		size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6630
6631        	/* Check the access right of the pointer.  */
6632		if (size > BITS_PER_UNIT)
6633		  emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6634				     to, Pmode,
6635				     GEN_INT (size / BITS_PER_UNIT),
6636				     TYPE_MODE (sizetype),
6637				     GEN_INT (memory_usage),
6638				     TYPE_MODE (integer_type_node));
6639	      }
6640	  }
6641
6642	/* In cases where an aligned union has an unaligned object
6643	   as a field, we might be extracting a BLKmode value from
6644	   an integer-mode (e.g., SImode) object.  Handle this case
6645	   by doing the extract into an object as wide as the field
6646	   (which we know to be the width of a basic mode), then
6647	   storing into memory, and changing the mode to BLKmode.
6648	   If we ultimately want the address (EXPAND_CONST_ADDRESS or
6649	   EXPAND_INITIALIZER), then we must not copy to a temporary.  */
6650	if (mode1 == VOIDmode
6651	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6652	    || (modifier != EXPAND_CONST_ADDRESS
6653		&& modifier != EXPAND_INITIALIZER
6654		&& ((mode1 != BLKmode && ! direct_load[(int) mode1]
6655		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6656		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6657		    /* If the field isn't aligned enough to fetch as a memref,
6658		       fetch it as a bit field.  */
6659		    || (SLOW_UNALIGNED_ACCESS
6660			&& ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6661			    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6662	  {
6663	    enum machine_mode ext_mode = mode;
6664
6665	    if (ext_mode == BLKmode)
6666	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6667
6668	    if (ext_mode == BLKmode)
6669	      {
6670		/* In this case, BITPOS must start at a byte boundary and
6671		   TARGET, if specified, must be a MEM.  */
6672		if (GET_CODE (op0) != MEM
6673		    || (target != 0 && GET_CODE (target) != MEM)
6674		    || bitpos % BITS_PER_UNIT != 0)
6675		  abort ();
6676
6677		op0 = change_address (op0, VOIDmode,
6678				      plus_constant (XEXP (op0, 0),
6679						     bitpos / BITS_PER_UNIT));
6680		if (target == 0)
6681		  target = assign_temp (type, 0, 1, 1);
6682
6683		emit_block_move (target, op0,
6684				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6685					  / BITS_PER_UNIT),
6686				 1);
6687
6688		return target;
6689	      }
6690
6691	    op0 = validize_mem (op0);
6692
6693	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6694	      mark_reg_pointer (XEXP (op0, 0), alignment);
6695
6696	    op0 = extract_bit_field (op0, bitsize, bitpos,
6697				     unsignedp, target, ext_mode, ext_mode,
6698				     alignment,
6699				     int_size_in_bytes (TREE_TYPE (tem)));
6700
6701	    /* If the result is a record type and BITSIZE is narrower than
6702	       the mode of OP0, an integral mode, and this is a big endian
6703	       machine, we must put the field into the high-order bits.  */
6704	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6705		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6706		&& bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6707	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6708				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6709					    - bitsize),
6710				  op0, 1);
6711
6712	    if (mode == BLKmode)
6713	      {
6714		rtx new = assign_stack_temp (ext_mode,
6715					     bitsize / BITS_PER_UNIT, 0);
6716
6717		emit_move_insn (new, op0);
6718		op0 = copy_rtx (new);
6719		PUT_MODE (op0, BLKmode);
6720		MEM_SET_IN_STRUCT_P (op0, 1);
6721	      }
6722
6723	    return op0;
6724	  }
6725
6726	/* If the result is BLKmode, use that to access the object
6727	   now as well.  */
6728	if (mode == BLKmode)
6729	  mode1 = BLKmode;
6730
6731	/* Get a reference to just this component.  */
6732	if (modifier == EXPAND_CONST_ADDRESS
6733	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6734	  op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6735						   (bitpos / BITS_PER_UNIT)));
6736	else
6737	  op0 = change_address (op0, mode1,
6738				plus_constant (XEXP (op0, 0),
6739					       (bitpos / BITS_PER_UNIT)));
6740
6741	if (GET_CODE (op0) == MEM)
6742	  MEM_ALIAS_SET (op0) = get_alias_set (exp);
6743
6744	if (GET_CODE (XEXP (op0, 0)) == REG)
6745	  mark_reg_pointer (XEXP (op0, 0), alignment);
6746
6747	MEM_SET_IN_STRUCT_P (op0, 1);
6748	MEM_VOLATILE_P (op0) |= volatilep;
6749	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6750	    || modifier == EXPAND_CONST_ADDRESS
6751	    || modifier == EXPAND_INITIALIZER)
6752	  return op0;
6753	else if (target == 0)
6754	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6755
6756	convert_move (target, op0, unsignedp);
6757	return target;
6758      }
6759
6760      /* Intended for a reference to a buffer of a file-object in Pascal.
6761	 But it's not certain that a special tree code will really be
6762	 necessary for these.  INDIRECT_REF might work for them.  */
6763    case BUFFER_REF:
6764      abort ();
6765
6766    case IN_EXPR:
6767      {
6768	/* Pascal set IN expression.
6769
6770	   Algorithm:
6771	       rlo       = set_low - (set_low%bits_per_word);
6772	       the_word  = set [ (index - rlo)/bits_per_word ];
6773	       bit_index = index % bits_per_word;
6774	       bitmask   = 1 << bit_index;
6775	       return !!(the_word & bitmask);  */
6776
6777	tree set = TREE_OPERAND (exp, 0);
6778	tree index = TREE_OPERAND (exp, 1);
6779	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6780	tree set_type = TREE_TYPE (set);
6781	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6782	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6783	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6784	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6785	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6786	rtx setval = expand_expr (set, 0, VOIDmode, 0);
6787	rtx setaddr = XEXP (setval, 0);
6788	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6789	rtx rlow;
6790	rtx diff, quo, rem, addr, bit, result;
6791
6792	preexpand_calls (exp);
6793
6794	/* If domain is empty, answer is no.  Likewise if index is constant
6795	   and out of bounds.  */
6796	if (((TREE_CODE (set_high_bound) == INTEGER_CST
6797	     && TREE_CODE (set_low_bound) == INTEGER_CST
6798	     && tree_int_cst_lt (set_high_bound, set_low_bound))
6799	     || (TREE_CODE (index) == INTEGER_CST
6800		 && TREE_CODE (set_low_bound) == INTEGER_CST
6801		 && tree_int_cst_lt (index, set_low_bound))
6802	     || (TREE_CODE (set_high_bound) == INTEGER_CST
6803		 && TREE_CODE (index) == INTEGER_CST
6804		 && tree_int_cst_lt (set_high_bound, index))))
6805	  return const0_rtx;
6806
6807	if (target == 0)
6808	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6809
6810	/* If we get here, we have to generate the code for both cases
6811	   (in range and out of range).  */
6812
6813	op0 = gen_label_rtx ();
6814	op1 = gen_label_rtx ();
6815
6816	if (! (GET_CODE (index_val) == CONST_INT
6817	       && GET_CODE (lo_r) == CONST_INT))
6818	  {
6819	    emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6820				     GET_MODE (index_val), iunsignedp, 0, op1);
6821	  }
6822
6823	if (! (GET_CODE (index_val) == CONST_INT
6824	       && GET_CODE (hi_r) == CONST_INT))
6825	  {
6826	    emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6827				     GET_MODE (index_val), iunsignedp, 0, op1);
6828	  }
6829
6830	/* Calculate the element number of bit zero in the first word
6831	   of the set.  */
6832	if (GET_CODE (lo_r) == CONST_INT)
6833	  rlow = GEN_INT (INTVAL (lo_r)
6834			  & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6835	else
6836	  rlow = expand_binop (index_mode, and_optab, lo_r,
6837			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6838			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6839
6840	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6841			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6842
6843	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6844			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6845	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6846			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6847
6848	addr = memory_address (byte_mode,
6849			       expand_binop (index_mode, add_optab, diff,
6850					     setaddr, NULL_RTX, iunsignedp,
6851					     OPTAB_LIB_WIDEN));
6852
6853	/* Extract the bit we want to examine */
6854	bit = expand_shift (RSHIFT_EXPR, byte_mode,
6855			    gen_rtx_MEM (byte_mode, addr),
6856			    make_tree (TREE_TYPE (index), rem),
6857			    NULL_RTX, 1);
6858	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6859			       GET_MODE (target) == byte_mode ? target : 0,
6860			       1, OPTAB_LIB_WIDEN);
6861
6862	if (result != target)
6863	  convert_move (target, result, 1);
6864
6865	/* Output the code to handle the out-of-range case.  */
6866	emit_jump (op0);
6867	emit_label (op1);
6868	emit_move_insn (target, const0_rtx);
6869	emit_label (op0);
6870	return target;
6871      }
6872
6873    case WITH_CLEANUP_EXPR:
6874      if (RTL_EXPR_RTL (exp) == 0)
6875	{
6876	  RTL_EXPR_RTL (exp)
6877	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6878	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6879
6880	  /* That's it for this cleanup.  */
6881	  TREE_OPERAND (exp, 2) = 0;
6882	}
6883      return RTL_EXPR_RTL (exp);
6884
6885    case CLEANUP_POINT_EXPR:
6886      {
6887	/* Start a new binding layer that will keep track of all cleanup
6888	   actions to be performed.  */
6889	expand_start_bindings (0);
6890
6891	target_temp_slot_level = temp_slot_level;
6892
6893	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6894	/* If we're going to use this value, load it up now.  */
6895	if (! ignore)
6896	  op0 = force_not_mem (op0);
6897	preserve_temp_slots (op0);
6898	expand_end_bindings (NULL_TREE, 0, 0);
6899      }
6900      return op0;
6901
6902    case CALL_EXPR:
6903      /* Check for a built-in function.  */
6904      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6905	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6906	      == FUNCTION_DECL)
6907	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6908	return expand_builtin (exp, target, subtarget, tmode, ignore);
6909
6910      /* If this call was expanded already by preexpand_calls,
6911	 just return the result we got.  */
6912      if (CALL_EXPR_RTL (exp) != 0)
6913	return CALL_EXPR_RTL (exp);
6914
6915      return expand_call (exp, target, ignore);
6916
6917    case NON_LVALUE_EXPR:
6918    case NOP_EXPR:
6919    case CONVERT_EXPR:
6920    case REFERENCE_EXPR:
6921      if (TREE_CODE (type) == UNION_TYPE)
6922	{
6923	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6924	  if (target == 0)
6925	    {
6926	      if (mode != BLKmode)
6927		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6928	      else
6929		target = assign_temp (type, 0, 1, 1);
6930	    }
6931
6932	  if (GET_CODE (target) == MEM)
6933	    /* Store data into beginning of memory target.  */
6934	    store_expr (TREE_OPERAND (exp, 0),
6935			change_address (target, TYPE_MODE (valtype), 0), 0);
6936
6937	  else if (GET_CODE (target) == REG)
6938	    /* Store this field into a union of the proper type.  */
6939	    store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6940			 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6941			 VOIDmode, 0, 1,
6942			 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6943			 0);
6944	  else
6945	    abort ();
6946
6947	  /* Return the entire union.  */
6948	  return target;
6949	}
6950
6951      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6952	{
6953	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6954			     ro_modifier);
6955
6956	  /* If the signedness of the conversion differs and OP0 is
6957	     a promoted SUBREG, clear that indication since we now
6958	     have to do the proper extension.  */
6959	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6960	      && GET_CODE (op0) == SUBREG)
6961	    SUBREG_PROMOTED_VAR_P (op0) = 0;
6962
6963	  return op0;
6964	}
6965
6966      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6967      if (GET_MODE (op0) == mode)
6968	return op0;
6969
6970      /* If OP0 is a constant, just convert it into the proper mode.  */
6971      if (CONSTANT_P (op0))
6972	return
6973	  convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6974			 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6975
6976      if (modifier == EXPAND_INITIALIZER)
6977	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6978
6979      if (target == 0)
6980	return
6981	  convert_to_mode (mode, op0,
6982			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6983      else
6984	convert_move (target, op0,
6985		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6986      return target;
6987
6988    case PLUS_EXPR:
6989      /* We come here from MINUS_EXPR when the second operand is a
6990         constant.  */
6991    plus_expr:
6992      this_optab = add_optab;
6993
6994      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6995	 something else, make sure we add the register to the constant and
6996	 then to the other thing.  This case can occur during strength
6997	 reduction and doing it this way will produce better code if the
6998	 frame pointer or argument pointer is eliminated.
6999
7000	 fold-const.c will ensure that the constant is always in the inner
7001	 PLUS_EXPR, so the only case we need to do anything about is if
7002	 sp, ap, or fp is our second argument, in which case we must swap
7003	 the innermost first argument and our second argument.  */
7004
7005      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7006	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7007	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7008	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7009	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7010	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7011	{
7012	  tree t = TREE_OPERAND (exp, 1);
7013
7014	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7015	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7016	}
7017
7018      /* If the result is to be ptr_mode and we are adding an integer to
7019	 something, we might be forming a constant.  So try to use
7020	 plus_constant.  If it produces a sum and we can't accept it,
7021	 use force_operand.  This allows P = &ARR[const] to generate
7022	 efficient code on machines where a SYMBOL_REF is not a valid
7023	 address.
7024
7025	 If this is an EXPAND_SUM call, always return the sum.  */
7026      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7027	  || mode == ptr_mode)
7028	{
7029	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7030	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7031	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7032	    {
7033	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7034				 EXPAND_SUM);
7035	      op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
7036	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7037		op1 = force_operand (op1, target);
7038	      return op1;
7039	    }
7040
7041	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7042		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7043		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7044	    {
7045	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7046				 EXPAND_SUM);
7047	      if (! CONSTANT_P (op0))
7048		{
7049		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7050				     VOIDmode, modifier);
7051		  /* Don't go to both_summands if modifier
7052		     says it's not right to return a PLUS.  */
7053		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7054		    goto binop2;
7055		  goto both_summands;
7056		}
7057	      op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
7058	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7059		op0 = force_operand (op0, target);
7060	      return op0;
7061	    }
7062	}
7063
7064      /* No sense saving up arithmetic to be done
7065	 if it's all in the wrong mode to form part of an address.
7066	 And force_operand won't know whether to sign-extend or
7067	 zero-extend.  */
7068      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7069	  || mode != ptr_mode)
7070	goto binop;
7071
7072      preexpand_calls (exp);
7073      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7074	subtarget = 0;
7075
7076      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7077      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7078
7079    both_summands:
7080      /* Make sure any term that's a sum with a constant comes last.  */
7081      if (GET_CODE (op0) == PLUS
7082	  && CONSTANT_P (XEXP (op0, 1)))
7083	{
7084	  temp = op0;
7085	  op0 = op1;
7086	  op1 = temp;
7087	}
7088      /* If adding to a sum including a constant,
7089	 associate it to put the constant outside.  */
7090      if (GET_CODE (op1) == PLUS
7091	  && CONSTANT_P (XEXP (op1, 1)))
7092	{
7093	  rtx constant_term = const0_rtx;
7094
7095	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7096	  if (temp != 0)
7097	    op0 = temp;
7098	  /* Ensure that MULT comes first if there is one.  */
7099	  else if (GET_CODE (op0) == MULT)
7100	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7101	  else
7102	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7103
7104	  /* Let's also eliminate constants from op0 if possible.  */
7105	  op0 = eliminate_constant_term (op0, &constant_term);
7106
7107	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7108	     their sum should be a constant.  Form it into OP1, since the
7109	     result we want will then be OP0 + OP1.  */
7110
7111	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7112					    XEXP (op1, 1));
7113	  if (temp != 0)
7114	    op1 = temp;
7115	  else
7116	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7117	}
7118
7119      /* Put a constant term last and put a multiplication first.  */
7120      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7121	temp = op1, op1 = op0, op0 = temp;
7122
7123      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7124      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7125
7126    case MINUS_EXPR:
7127      /* For initializers, we are allowed to return a MINUS of two
7128	 symbolic constants.  Here we handle all cases when both operands
7129	 are constant.  */
7130      /* Handle difference of two symbolic constants,
7131	 for the sake of an initializer.  */
7132      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7133	  && really_constant_p (TREE_OPERAND (exp, 0))
7134	  && really_constant_p (TREE_OPERAND (exp, 1)))
7135	{
7136	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7137				 VOIDmode, ro_modifier);
7138	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7139				 VOIDmode, ro_modifier);
7140
7141	  /* If the last operand is a CONST_INT, use plus_constant of
7142	     the negated constant.  Else make the MINUS.  */
7143	  if (GET_CODE (op1) == CONST_INT)
7144	    return plus_constant (op0, - INTVAL (op1));
7145	  else
7146	    return gen_rtx_MINUS (mode, op0, op1);
7147	}
7148      /* Convert A - const to A + (-const).  */
7149      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7150	{
7151	  tree negated = fold (build1 (NEGATE_EXPR, type,
7152				       TREE_OPERAND (exp, 1)));
7153
7154	  /* Deal with the case where we can't negate the constant
7155	     in TYPE.  */
7156	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7157	    {
7158	      tree newtype = signed_type (type);
7159	      tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7160	      tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7161	      tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7162
7163	      if (! TREE_OVERFLOW (newneg))
7164		return expand_expr (convert (type,
7165					     build (PLUS_EXPR, newtype,
7166						    newop0, newneg)),
7167				    target, tmode, ro_modifier);
7168	    }
7169	  else
7170	    {
7171	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7172	      goto plus_expr;
7173	    }
7174	}
7175      this_optab = sub_optab;
7176      goto binop;
7177
7178    case MULT_EXPR:
7179      preexpand_calls (exp);
7180      /* If first operand is constant, swap them.
7181	 Thus the following special case checks need only
7182	 check the second operand.  */
7183      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7184	{
7185	  register tree t1 = TREE_OPERAND (exp, 0);
7186	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7187	  TREE_OPERAND (exp, 1) = t1;
7188	}
7189
7190      /* Attempt to return something suitable for generating an
7191	 indexed address, for machines that support that.  */
7192
7193      if (modifier == EXPAND_SUM && mode == ptr_mode
7194	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7195	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7196	{
7197	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7198			     EXPAND_SUM);
7199
7200	  /* Apply distributive law if OP0 is x+c.  */
7201	  if (GET_CODE (op0) == PLUS
7202	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7203	    return gen_rtx_PLUS (mode,
7204				 gen_rtx_MULT (mode, XEXP (op0, 0),
7205					       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7206			    GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7207				     * INTVAL (XEXP (op0, 1))));
7208
7209	  if (GET_CODE (op0) != REG)
7210	    op0 = force_operand (op0, NULL_RTX);
7211	  if (GET_CODE (op0) != REG)
7212	    op0 = copy_to_mode_reg (mode, op0);
7213
7214	  return gen_rtx_MULT (mode, op0,
7215			       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7216	}
7217
7218      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7219	subtarget = 0;
7220
7221      /* Check for multiplying things that have been extended
7222	 from a narrower type.  If this machine supports multiplying
7223	 in that narrower type with a result in the desired type,
7224	 do it that way, and avoid the explicit type-conversion.  */
7225      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7226	  && TREE_CODE (type) == INTEGER_TYPE
7227	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7228	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7229	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7230	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7231				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7232	       /* Don't use a widening multiply if a shift will do.  */
7233	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7234		    > HOST_BITS_PER_WIDE_INT)
7235		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7236	      ||
7237	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7238	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7239		   ==
7240		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7241	       /* If both operands are extended, they must either both
7242		  be zero-extended or both be sign-extended.  */
7243	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7244		   ==
7245		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7246	{
7247	  enum machine_mode innermode
7248	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7249	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7250			? smul_widen_optab : umul_widen_optab);
7251	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7252			? umul_widen_optab : smul_widen_optab);
7253	  if (mode == GET_MODE_WIDER_MODE (innermode))
7254	    {
7255	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7256		{
7257		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7258				     NULL_RTX, VOIDmode, 0);
7259		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7260		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7261				       VOIDmode, 0);
7262		  else
7263		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7264				       NULL_RTX, VOIDmode, 0);
7265		  goto binop2;
7266		}
7267	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7268		       && innermode == word_mode)
7269		{
7270		  rtx htem;
7271		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7272				     NULL_RTX, VOIDmode, 0);
7273		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7274		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7275				       VOIDmode, 0);
7276		  else
7277		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7278				       NULL_RTX, VOIDmode, 0);
7279		  temp = expand_binop (mode, other_optab, op0, op1, target,
7280				       unsignedp, OPTAB_LIB_WIDEN);
7281		  htem = expand_mult_highpart_adjust (innermode,
7282						      gen_highpart (innermode, temp),
7283						      op0, op1,
7284						      gen_highpart (innermode, temp),
7285						      unsignedp);
7286		  emit_move_insn (gen_highpart (innermode, temp), htem);
7287		  return temp;
7288		}
7289	    }
7290	}
7291      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7292      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7293      return expand_mult (mode, op0, op1, target, unsignedp);
7294
7295    case TRUNC_DIV_EXPR:
7296    case FLOOR_DIV_EXPR:
7297    case CEIL_DIV_EXPR:
7298    case ROUND_DIV_EXPR:
7299    case EXACT_DIV_EXPR:
7300      preexpand_calls (exp);
7301      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7302	subtarget = 0;
7303      /* Possible optimization: compute the dividend with EXPAND_SUM
7304	 then if the divisor is constant can optimize the case
7305	 where some terms of the dividend have coeffs divisible by it.  */
7306      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7307      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7308      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7309
7310    case RDIV_EXPR:
7311      this_optab = flodiv_optab;
7312      goto binop;
7313
7314    case TRUNC_MOD_EXPR:
7315    case FLOOR_MOD_EXPR:
7316    case CEIL_MOD_EXPR:
7317    case ROUND_MOD_EXPR:
7318      preexpand_calls (exp);
7319      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7320	subtarget = 0;
7321      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7322      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7323      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7324
7325    case FIX_ROUND_EXPR:
7326    case FIX_FLOOR_EXPR:
7327    case FIX_CEIL_EXPR:
7328      abort ();			/* Not used for C.  */
7329
7330    case FIX_TRUNC_EXPR:
7331      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7332      if (target == 0)
7333	target = gen_reg_rtx (mode);
7334      expand_fix (target, op0, unsignedp);
7335      return target;
7336
7337    case FLOAT_EXPR:
7338      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7339      if (target == 0)
7340	target = gen_reg_rtx (mode);
7341      /* expand_float can't figure out what to do if FROM has VOIDmode.
7342	 So give it the correct mode.  With -O, cse will optimize this.  */
7343      if (GET_MODE (op0) == VOIDmode)
7344	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7345				op0);
7346      expand_float (target, op0,
7347		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7348      return target;
7349
7350    case NEGATE_EXPR:
7351      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7352      temp = expand_unop (mode, neg_optab, op0, target, 0);
7353      if (temp == 0)
7354	abort ();
7355      return temp;
7356
7357    case ABS_EXPR:
7358      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7359
7360      /* Handle complex values specially.  */
7361      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7362	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7363	return expand_complex_abs (mode, op0, target, unsignedp);
7364
7365      /* Unsigned abs is simply the operand.  Testing here means we don't
7366	 risk generating incorrect code below.  */
7367      if (TREE_UNSIGNED (type))
7368	return op0;
7369
7370      return expand_abs (mode, op0, target,
7371			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7372
7373    case MAX_EXPR:
7374    case MIN_EXPR:
7375      target = original_target;
7376      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7377	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7378	  || GET_MODE (target) != mode
7379	  || (GET_CODE (target) == REG
7380	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7381	target = gen_reg_rtx (mode);
7382      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7383      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7384
7385      /* First try to do it with a special MIN or MAX instruction.
7386	 If that does not win, use a conditional jump to select the proper
7387	 value.  */
7388      this_optab = (TREE_UNSIGNED (type)
7389		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7390		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7391
7392      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7393			   OPTAB_WIDEN);
7394      if (temp != 0)
7395	return temp;
7396
7397      /* At this point, a MEM target is no longer useful; we will get better
7398	 code without it.  */
7399
7400      if (GET_CODE (target) == MEM)
7401	target = gen_reg_rtx (mode);
7402
7403      if (target != op0)
7404	emit_move_insn (target, op0);
7405
7406      op0 = gen_label_rtx ();
7407
7408      /* If this mode is an integer too wide to compare properly,
7409	 compare word by word.  Rely on cse to optimize constant cases.  */
7410      if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7411	{
7412	  if (code == MAX_EXPR)
7413	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7414					  target, op1, NULL_RTX, op0);
7415	  else
7416	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7417					  op1, target, NULL_RTX, op0);
7418	  emit_move_insn (target, op1);
7419	}
7420      else
7421	{
7422	  if (code == MAX_EXPR)
7423	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7424		    ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7425		    : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7426	  else
7427	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7428		    ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7429		    : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7430	  if (temp == const0_rtx)
7431	    emit_move_insn (target, op1);
7432	  else if (temp != const_true_rtx)
7433	    {
7434	      if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7435		emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7436	      else
7437		abort ();
7438	      emit_move_insn (target, op1);
7439	    }
7440	}
7441      emit_label (op0);
7442      return target;
7443
7444    case BIT_NOT_EXPR:
7445      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7446      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7447      if (temp == 0)
7448	abort ();
7449      return temp;
7450
7451    case FFS_EXPR:
7452      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7453      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7454      if (temp == 0)
7455	abort ();
7456      return temp;
7457
7458      /* ??? Can optimize bitwise operations with one arg constant.
7459	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7460	 and (a bitwise1 b) bitwise2 b (etc)
7461	 but that is probably not worth while.  */
7462
7463      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7464	 boolean values when we want in all cases to compute both of them.  In
7465	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7466	 as actual zero-or-1 values and then bitwise anding.  In cases where
7467	 there cannot be any side effects, better code would be made by
7468	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7469	 how to recognize those cases.  */
7470
7471    case TRUTH_AND_EXPR:
7472    case BIT_AND_EXPR:
7473      this_optab = and_optab;
7474      goto binop;
7475
7476    case TRUTH_OR_EXPR:
7477    case BIT_IOR_EXPR:
7478      this_optab = ior_optab;
7479      goto binop;
7480
7481    case TRUTH_XOR_EXPR:
7482    case BIT_XOR_EXPR:
7483      this_optab = xor_optab;
7484      goto binop;
7485
7486    case LSHIFT_EXPR:
7487    case RSHIFT_EXPR:
7488    case LROTATE_EXPR:
7489    case RROTATE_EXPR:
7490      preexpand_calls (exp);
7491      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7492	subtarget = 0;
7493      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7494      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7495			   unsignedp);
7496
7497      /* Could determine the answer when only additive constants differ.  Also,
7498	 the addition of one can be handled by changing the condition.  */
7499    case LT_EXPR:
7500    case LE_EXPR:
7501    case GT_EXPR:
7502    case GE_EXPR:
7503    case EQ_EXPR:
7504    case NE_EXPR:
7505      preexpand_calls (exp);
7506      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7507      if (temp != 0)
7508	return temp;
7509
7510      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
7511      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7512	  && original_target
7513	  && GET_CODE (original_target) == REG
7514	  && (GET_MODE (original_target)
7515	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7516	{
7517	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7518			      VOIDmode, 0);
7519
7520	  if (temp != original_target)
7521	    temp = copy_to_reg (temp);
7522
7523	  op1 = gen_label_rtx ();
7524	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7525				   GET_MODE (temp), unsignedp, 0, op1);
7526	  emit_move_insn (temp, const1_rtx);
7527	  emit_label (op1);
7528	  return temp;
7529	}
7530
7531      /* If no set-flag instruction, must generate a conditional
7532	 store into a temporary variable.  Drop through
7533	 and handle this like && and ||.  */
7534
7535    case TRUTH_ANDIF_EXPR:
7536    case TRUTH_ORIF_EXPR:
7537      if (! ignore
7538	  && (target == 0 || ! safe_from_p (target, exp, 1)
7539	      /* Make sure we don't have a hard reg (such as function's return
7540		 value) live across basic blocks, if not optimizing.  */
7541	      || (!optimize && GET_CODE (target) == REG
7542		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7543	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7544
7545      if (target)
7546	emit_clr_insn (target);
7547
7548      op1 = gen_label_rtx ();
7549      jumpifnot (exp, op1);
7550
7551      if (target)
7552	emit_0_to_1_insn (target);
7553
7554      emit_label (op1);
7555      return ignore ? const0_rtx : target;
7556
7557    case TRUTH_NOT_EXPR:
7558      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7559      /* The parser is careful to generate TRUTH_NOT_EXPR
7560	 only with operands that are always zero or one.  */
7561      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7562			   target, 1, OPTAB_LIB_WIDEN);
7563      if (temp == 0)
7564	abort ();
7565      return temp;
7566
7567    case COMPOUND_EXPR:
7568      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7569      emit_queue ();
7570      return expand_expr (TREE_OPERAND (exp, 1),
7571			  (ignore ? const0_rtx : target),
7572			  VOIDmode, 0);
7573
7574    case COND_EXPR:
7575      /* If we would have a "singleton" (see below) were it not for a
7576	 conversion in each arm, bring that conversion back out.  */
7577      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7578	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7579	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7580	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7581	{
7582	  tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7583	  tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7584
7585	  if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7586	       && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7587	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7588		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7589	      || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7590		  && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7591	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7592		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7593	    return expand_expr (build1 (NOP_EXPR, type,
7594					build (COND_EXPR, TREE_TYPE (true),
7595					       TREE_OPERAND (exp, 0),
7596					       true, false)),
7597				target, tmode, modifier);
7598	}
7599
7600      {
7601	/* Note that COND_EXPRs whose type is a structure or union
7602	   are required to be constructed to contain assignments of
7603	   a temporary variable, so that we can evaluate them here
7604	   for side effect only.  If type is void, we must do likewise.  */
7605
7606	/* If an arm of the branch requires a cleanup,
7607	   only that cleanup is performed.  */
7608
7609	tree singleton = 0;
7610	tree binary_op = 0, unary_op = 0;
7611
7612	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7613	   convert it to our mode, if necessary.  */
7614	if (integer_onep (TREE_OPERAND (exp, 1))
7615	    && integer_zerop (TREE_OPERAND (exp, 2))
7616	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7617	  {
7618	    if (ignore)
7619	      {
7620		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7621			     ro_modifier);
7622		return const0_rtx;
7623	      }
7624
7625	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7626	    if (GET_MODE (op0) == mode)
7627	      return op0;
7628
7629	    if (target == 0)
7630	      target = gen_reg_rtx (mode);
7631	    convert_move (target, op0, unsignedp);
7632	    return target;
7633	  }
7634
7635	/* Check for X ? A + B : A.  If we have this, we can copy A to the
7636	   output and conditionally add B.  Similarly for unary operations.
7637	   Don't do this if X has side-effects because those side effects
7638	   might affect A or B and the "?" operation is a sequence point in
7639	   ANSI.  (operand_equal_p tests for side effects.)  */
7640
7641	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7642	    && operand_equal_p (TREE_OPERAND (exp, 2),
7643				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7644	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7645	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7646		 && operand_equal_p (TREE_OPERAND (exp, 1),
7647				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7648	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7649	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7650		 && operand_equal_p (TREE_OPERAND (exp, 2),
7651				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7652	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7653	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7654		 && operand_equal_p (TREE_OPERAND (exp, 1),
7655				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7656	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7657
7658	/* If we are not to produce a result, we have no target.  Otherwise,
7659	   if a target was specified use it; it will not be used as an
7660	   intermediate target unless it is safe.  If no target, use a
7661	   temporary.  */
7662
7663	if (ignore)
7664	  temp = 0;
7665	else if (original_target
7666		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7667		     || (singleton && GET_CODE (original_target) == REG
7668			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7669			 && original_target == var_rtx (singleton)))
7670		 && GET_MODE (original_target) == mode
7671#ifdef HAVE_conditional_move
7672		 && (! can_conditionally_move_p (mode)
7673		     || GET_CODE (original_target) == REG
7674		     || TREE_ADDRESSABLE (type))
7675#endif
7676		 && ! (GET_CODE (original_target) == MEM
7677		       && MEM_VOLATILE_P (original_target)))
7678	  temp = original_target;
7679	else if (TREE_ADDRESSABLE (type))
7680	  abort ();
7681	else
7682	  temp = assign_temp (type, 0, 0, 1);
7683
7684	/* If we had X ? A + C : A, with C a constant power of 2, and we can
7685	   do the test of X as a store-flag operation, do this as
7686	   A + ((X != 0) << log C).  Similarly for other simple binary
7687	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
7688	if (temp && singleton && binary_op
7689	    && (TREE_CODE (binary_op) == PLUS_EXPR
7690		|| TREE_CODE (binary_op) == MINUS_EXPR
7691		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
7692		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
7693	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7694		: integer_onep (TREE_OPERAND (binary_op, 1)))
7695	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7696	  {
7697	    rtx result;
7698	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7699			    : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7700			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7701			    : xor_optab);
7702
7703	    /* If we had X ? A : A + 1, do this as A + (X == 0).
7704
7705	       We have to invert the truth value here and then put it
7706	       back later if do_store_flag fails.  We cannot simply copy
7707	       TREE_OPERAND (exp, 0) to another variable and modify that
7708	       because invert_truthvalue can modify the tree pointed to
7709	       by its argument.  */
7710	    if (singleton == TREE_OPERAND (exp, 1))
7711	      TREE_OPERAND (exp, 0)
7712		= invert_truthvalue (TREE_OPERAND (exp, 0));
7713
7714	    result = do_store_flag (TREE_OPERAND (exp, 0),
7715				    (safe_from_p (temp, singleton, 1)
7716				     ? temp : NULL_RTX),
7717				    mode, BRANCH_COST <= 1);
7718
7719	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7720	      result = expand_shift (LSHIFT_EXPR, mode, result,
7721				     build_int_2 (tree_log2
7722						  (TREE_OPERAND
7723						   (binary_op, 1)),
7724						  0),
7725				     (safe_from_p (temp, singleton, 1)
7726				      ? temp : NULL_RTX), 0);
7727
7728	    if (result)
7729	      {
7730		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7731		return expand_binop (mode, boptab, op1, result, temp,
7732				     unsignedp, OPTAB_LIB_WIDEN);
7733	      }
7734	    else if (singleton == TREE_OPERAND (exp, 1))
7735	      TREE_OPERAND (exp, 0)
7736		= invert_truthvalue (TREE_OPERAND (exp, 0));
7737	  }
7738
7739	do_pending_stack_adjust ();
7740	NO_DEFER_POP;
7741	op0 = gen_label_rtx ();
7742
7743	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7744	  {
7745	    if (temp != 0)
7746	      {
7747		/* If the target conflicts with the other operand of the
7748		   binary op, we can't use it.  Also, we can't use the target
7749		   if it is a hard register, because evaluating the condition
7750		   might clobber it.  */
7751		if ((binary_op
7752		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7753		    || (GET_CODE (temp) == REG
7754			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
7755		  temp = gen_reg_rtx (mode);
7756		store_expr (singleton, temp, 0);
7757	      }
7758	    else
7759	      expand_expr (singleton,
7760			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7761	    if (singleton == TREE_OPERAND (exp, 1))
7762	      jumpif (TREE_OPERAND (exp, 0), op0);
7763	    else
7764	      jumpifnot (TREE_OPERAND (exp, 0), op0);
7765
7766	    start_cleanup_deferral ();
7767	    if (binary_op && temp == 0)
7768	      /* Just touch the other operand.  */
7769	      expand_expr (TREE_OPERAND (binary_op, 1),
7770			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7771	    else if (binary_op)
7772	      store_expr (build (TREE_CODE (binary_op), type,
7773				 make_tree (type, temp),
7774				 TREE_OPERAND (binary_op, 1)),
7775			  temp, 0);
7776	    else
7777	      store_expr (build1 (TREE_CODE (unary_op), type,
7778				  make_tree (type, temp)),
7779			  temp, 0);
7780	    op1 = op0;
7781	  }
7782	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7783	   comparison operator.  If we have one of these cases, set the
7784	   output to A, branch on A (cse will merge these two references),
7785	   then set the output to FOO.  */
7786	else if (temp
7787		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7788		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7789		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7790				     TREE_OPERAND (exp, 1), 0)
7791		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7792		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7793		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7794	  {
7795	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7796	      temp = gen_reg_rtx (mode);
7797	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7798	    jumpif (TREE_OPERAND (exp, 0), op0);
7799
7800	    start_cleanup_deferral ();
7801	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7802	    op1 = op0;
7803	  }
7804	else if (temp
7805		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7806		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7807		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7808				     TREE_OPERAND (exp, 2), 0)
7809		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7810		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7811		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7812	  {
7813	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7814	      temp = gen_reg_rtx (mode);
7815	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7816	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7817
7818	    start_cleanup_deferral ();
7819	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7820	    op1 = op0;
7821	  }
7822	else
7823	  {
7824	    op1 = gen_label_rtx ();
7825	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7826
7827	    start_cleanup_deferral ();
7828	    if (temp != 0)
7829	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
7830	    else
7831	      expand_expr (TREE_OPERAND (exp, 1),
7832			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7833	    end_cleanup_deferral ();
7834	    emit_queue ();
7835	    emit_jump_insn (gen_jump (op1));
7836	    emit_barrier ();
7837	    emit_label (op0);
7838	    start_cleanup_deferral ();
7839	    if (temp != 0)
7840	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
7841	    else
7842	      expand_expr (TREE_OPERAND (exp, 2),
7843			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7844	  }
7845
7846	end_cleanup_deferral ();
7847
7848	emit_queue ();
7849	emit_label (op1);
7850	OK_DEFER_POP;
7851
7852	return temp;
7853      }
7854
7855    case TARGET_EXPR:
7856      {
7857	/* Something needs to be initialized, but we didn't know
7858	   where that thing was when building the tree.  For example,
7859	   it could be the return value of a function, or a parameter
7860	   to a function which lays down in the stack, or a temporary
7861	   variable which must be passed by reference.
7862
7863	   We guarantee that the expression will either be constructed
7864	   or copied into our original target.  */
7865
7866	tree slot = TREE_OPERAND (exp, 0);
7867	tree cleanups = NULL_TREE;
7868	tree exp1;
7869
7870	if (TREE_CODE (slot) != VAR_DECL)
7871	  abort ();
7872
7873	if (! ignore)
7874	  target = original_target;
7875
7876	if (target == 0)
7877	  {
7878	    if (DECL_RTL (slot) != 0)
7879	      {
7880		target = DECL_RTL (slot);
7881		/* If we have already expanded the slot, so don't do
7882		   it again.  (mrs)  */
7883		if (TREE_OPERAND (exp, 1) == NULL_TREE)
7884		  return target;
7885	      }
7886	    else
7887	      {
7888		target = assign_temp (type, 2, 0, 1);
7889		/* All temp slots at this level must not conflict.  */
7890		preserve_temp_slots (target);
7891		DECL_RTL (slot) = target;
7892		if (TREE_ADDRESSABLE (slot))
7893		  {
7894		    TREE_ADDRESSABLE (slot) = 0;
7895		    mark_addressable (slot);
7896		  }
7897
7898		/* Since SLOT is not known to the called function
7899		   to belong to its stack frame, we must build an explicit
7900		   cleanup.  This case occurs when we must build up a reference
7901		   to pass the reference as an argument.  In this case,
7902		   it is very likely that such a reference need not be
7903		   built here.  */
7904
7905		if (TREE_OPERAND (exp, 2) == 0)
7906		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7907		cleanups = TREE_OPERAND (exp, 2);
7908	      }
7909	  }
7910	else
7911	  {
7912	    /* This case does occur, when expanding a parameter which
7913	       needs to be constructed on the stack.  The target
7914	       is the actual stack address that we want to initialize.
7915	       The function we call will perform the cleanup in this case.  */
7916
7917	    /* If we have already assigned it space, use that space,
7918	       not target that we were passed in, as our target
7919	       parameter is only a hint.  */
7920	    if (DECL_RTL (slot) != 0)
7921              {
7922                target = DECL_RTL (slot);
7923                /* If we have already expanded the slot, so don't do
7924                   it again.  (mrs)  */
7925                if (TREE_OPERAND (exp, 1) == NULL_TREE)
7926                  return target;
7927	      }
7928	    else
7929	      {
7930		DECL_RTL (slot) = target;
7931		/* If we must have an addressable slot, then make sure that
7932		   the RTL that we just stored in slot is OK.  */
7933		if (TREE_ADDRESSABLE (slot))
7934		  {
7935		    TREE_ADDRESSABLE (slot) = 0;
7936		    mark_addressable (slot);
7937		  }
7938	      }
7939	  }
7940
7941	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7942	/* Mark it as expanded.  */
7943	TREE_OPERAND (exp, 1) = NULL_TREE;
7944
7945	TREE_USED (slot) = 1;
7946	store_expr (exp1, target, 0);
7947
7948	expand_decl_cleanup (NULL_TREE, cleanups);
7949
7950	return target;
7951      }
7952
7953    case INIT_EXPR:
7954      {
7955	tree lhs = TREE_OPERAND (exp, 0);
7956	tree rhs = TREE_OPERAND (exp, 1);
7957	tree noncopied_parts = 0;
7958	tree lhs_type = TREE_TYPE (lhs);
7959
7960	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7961	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7962	  noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7963						  TYPE_NONCOPIED_PARTS (lhs_type));
7964	while (noncopied_parts != 0)
7965	  {
7966	    expand_assignment (TREE_VALUE (noncopied_parts),
7967			       TREE_PURPOSE (noncopied_parts), 0, 0);
7968	    noncopied_parts = TREE_CHAIN (noncopied_parts);
7969	  }
7970	return temp;
7971      }
7972
7973    case MODIFY_EXPR:
7974      {
7975	/* If lhs is complex, expand calls in rhs before computing it.
7976	   That's so we don't compute a pointer and save it over a call.
7977	   If lhs is simple, compute it first so we can give it as a
7978	   target if the rhs is just a call.  This avoids an extra temp and copy
7979	   and that prevents a partial-subsumption which makes bad code.
7980	   Actually we could treat component_ref's of vars like vars.  */
7981
7982	tree lhs = TREE_OPERAND (exp, 0);
7983	tree rhs = TREE_OPERAND (exp, 1);
7984	tree noncopied_parts = 0;
7985	tree lhs_type = TREE_TYPE (lhs);
7986
7987	temp = 0;
7988
7989	if (TREE_CODE (lhs) != VAR_DECL
7990	    && TREE_CODE (lhs) != RESULT_DECL
7991	    && TREE_CODE (lhs) != PARM_DECL
7992	    && ! (TREE_CODE (lhs) == INDIRECT_REF
7993		  && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7994	  preexpand_calls (exp);
7995
7996	/* Check for |= or &= of a bitfield of size one into another bitfield
7997	   of size 1.  In this case, (unless we need the result of the
7998	   assignment) we can do this more efficiently with a
7999	   test followed by an assignment, if necessary.
8000
8001	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8002	   things change so we do, this code should be enhanced to
8003	   support it.  */
8004	if (ignore
8005	    && TREE_CODE (lhs) == COMPONENT_REF
8006	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8007		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8008	    && TREE_OPERAND (rhs, 0) == lhs
8009	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8010	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8011	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8012	  {
8013	    rtx label = gen_label_rtx ();
8014
8015	    do_jump (TREE_OPERAND (rhs, 1),
8016		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8017		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8018	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8019					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8020					      ? integer_one_node
8021					      : integer_zero_node)),
8022			       0, 0);
8023	    do_pending_stack_adjust ();
8024	    emit_label (label);
8025	    return const0_rtx;
8026	  }
8027
8028	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8029	    && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8030	  noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8031						  TYPE_NONCOPIED_PARTS (lhs_type));
8032
8033	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8034	while (noncopied_parts != 0)
8035	  {
8036	    expand_assignment (TREE_PURPOSE (noncopied_parts),
8037			       TREE_VALUE (noncopied_parts), 0, 0);
8038	    noncopied_parts = TREE_CHAIN (noncopied_parts);
8039	  }
8040	return temp;
8041      }
8042
8043    case RETURN_EXPR:
8044      if (!TREE_OPERAND (exp, 0))
8045	expand_null_return ();
8046      else
8047	expand_return (TREE_OPERAND (exp, 0));
8048      return const0_rtx;
8049
8050    case PREINCREMENT_EXPR:
8051    case PREDECREMENT_EXPR:
8052      return expand_increment (exp, 0, ignore);
8053
8054    case POSTINCREMENT_EXPR:
8055    case POSTDECREMENT_EXPR:
8056      /* Faster to treat as pre-increment if result is not used.  */
8057      return expand_increment (exp, ! ignore, ignore);
8058
8059    case ADDR_EXPR:
8060      /* If nonzero, TEMP will be set to the address of something that might
8061	 be a MEM corresponding to a stack slot.  */
8062      temp = 0;
8063
8064      /* Are we taking the address of a nested function?  */
8065      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8066	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8067	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8068	  && ! TREE_STATIC (exp))
8069	{
8070	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8071	  op0 = force_operand (op0, target);
8072	}
8073      /* If we are taking the address of something erroneous, just
8074	 return a zero.  */
8075      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8076	return const0_rtx;
8077      else
8078	{
8079	  /* We make sure to pass const0_rtx down if we came in with
8080	     ignore set, to avoid doing the cleanups twice for something.  */
8081	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8082			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8083			     (modifier == EXPAND_INITIALIZER
8084			      ? modifier : EXPAND_CONST_ADDRESS));
8085
8086	  /* If we are going to ignore the result, OP0 will have been set
8087	     to const0_rtx, so just return it.  Don't get confused and
8088	     think we are taking the address of the constant.  */
8089	  if (ignore)
8090	    return op0;
8091
8092	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8093	     clever and returns a REG when given a MEM.  */
8094	  op0 = protect_from_queue (op0, 1);
8095
8096	  /* We would like the object in memory.  If it is a constant,
8097	     we can have it be statically allocated into memory.  For
8098	     a non-constant (REG, SUBREG or CONCAT), we need to allocate some
8099	     memory and store the value into it.  */
8100
8101	  if (CONSTANT_P (op0))
8102	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8103				   op0);
8104	  else if (GET_CODE (op0) == MEM)
8105	    {
8106	      mark_temp_addr_taken (op0);
8107	      temp = XEXP (op0, 0);
8108	    }
8109
8110	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8111		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8112	    {
8113	      /* If this object is in a register, it must be not
8114		 be BLKmode.  */
8115	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8116	      rtx memloc = assign_temp (inner_type, 1, 1, 1);
8117
8118	      mark_temp_addr_taken (memloc);
8119	      emit_move_insn (memloc, op0);
8120	      op0 = memloc;
8121	    }
8122
8123	  if (GET_CODE (op0) != MEM)
8124	    abort ();
8125
8126	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8127	    {
8128	      temp = XEXP (op0, 0);
8129#ifdef POINTERS_EXTEND_UNSIGNED
8130	      if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8131		  && mode == ptr_mode)
8132		temp = convert_memory_address (ptr_mode, temp);
8133#endif
8134	      return temp;
8135	    }
8136
8137	  op0 = force_operand (XEXP (op0, 0), target);
8138	}
8139
8140      if (flag_force_addr && GET_CODE (op0) != REG)
8141	op0 = force_reg (Pmode, op0);
8142
8143      if (GET_CODE (op0) == REG
8144	  && ! REG_USERVAR_P (op0))
8145	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8146
8147      /* If we might have had a temp slot, add an equivalent address
8148	 for it.  */
8149      if (temp != 0)
8150	update_temp_slot_address (temp, op0);
8151
8152#ifdef POINTERS_EXTEND_UNSIGNED
8153      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8154	  && mode == ptr_mode)
8155	op0 = convert_memory_address (ptr_mode, op0);
8156#endif
8157
8158      return op0;
8159
8160    case ENTRY_VALUE_EXPR:
8161      abort ();
8162
8163    /* COMPLEX type for Extended Pascal & Fortran  */
8164    case COMPLEX_EXPR:
8165      {
8166	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8167	rtx insns;
8168
8169	/* Get the rtx code of the operands.  */
8170	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8171	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8172
8173	if (! target)
8174	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8175
8176	start_sequence ();
8177
8178	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8179	emit_move_insn (gen_realpart (mode, target), op0);
8180	emit_move_insn (gen_imagpart (mode, target), op1);
8181
8182	insns = get_insns ();
8183	end_sequence ();
8184
8185	/* Complex construction should appear as a single unit.  */
8186	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8187	   each with a separate pseudo as destination.
8188	   It's not correct for flow to treat them as a unit.  */
8189	if (GET_CODE (target) != CONCAT)
8190	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8191	else
8192	  emit_insns (insns);
8193
8194	return target;
8195      }
8196
8197    case REALPART_EXPR:
8198      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8199      return gen_realpart (mode, op0);
8200
8201    case IMAGPART_EXPR:
8202      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8203      return gen_imagpart (mode, op0);
8204
8205    case CONJ_EXPR:
8206      {
8207	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8208	rtx imag_t;
8209	rtx insns;
8210
8211	op0  = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8212
8213	if (! target)
8214	  target = gen_reg_rtx (mode);
8215
8216	start_sequence ();
8217
8218	/* Store the realpart and the negated imagpart to target.  */
8219	emit_move_insn (gen_realpart (partmode, target),
8220			gen_realpart (partmode, op0));
8221
8222	imag_t = gen_imagpart (partmode, target);
8223	temp = expand_unop (partmode, neg_optab,
8224			       gen_imagpart (partmode, op0), imag_t, 0);
8225	if (temp != imag_t)
8226	  emit_move_insn (imag_t, temp);
8227
8228	insns = get_insns ();
8229	end_sequence ();
8230
8231	/* Conjugate should appear as a single unit
8232	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8233	   each with a separate pseudo as destination.
8234	   It's not correct for flow to treat them as a unit.  */
8235	if (GET_CODE (target) != CONCAT)
8236	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8237	else
8238	  emit_insns (insns);
8239
8240	return target;
8241      }
8242
8243    case TRY_CATCH_EXPR:
8244      {
8245	tree handler = TREE_OPERAND (exp, 1);
8246
8247	expand_eh_region_start ();
8248
8249	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8250
8251	expand_eh_region_end (handler);
8252
8253	return op0;
8254      }
8255
8256    case TRY_FINALLY_EXPR:
8257      {
8258	tree try_block = TREE_OPERAND (exp, 0);
8259	tree finally_block = TREE_OPERAND (exp, 1);
8260	rtx finally_label = gen_label_rtx ();
8261	rtx done_label = gen_label_rtx ();
8262	rtx return_link = gen_reg_rtx (Pmode);
8263	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8264			      (tree) finally_label, (tree) return_link);
8265	TREE_SIDE_EFFECTS (cleanup) = 1;
8266
8267	/* Start a new binding layer that will keep track of all cleanup
8268	   actions to be performed.  */
8269	expand_start_bindings (0);
8270
8271	target_temp_slot_level = temp_slot_level;
8272
8273	expand_decl_cleanup (NULL_TREE, cleanup);
8274	op0 = expand_expr (try_block, target, tmode, modifier);
8275
8276	preserve_temp_slots (op0);
8277	expand_end_bindings (NULL_TREE, 0, 0);
8278	emit_jump (done_label);
8279	emit_label (finally_label);
8280	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8281	emit_indirect_jump (return_link);
8282	emit_label (done_label);
8283	return op0;
8284      }
8285
8286      case GOTO_SUBROUTINE_EXPR:
8287      {
8288	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8289	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8290	rtx return_address = gen_label_rtx ();
8291	emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8292	emit_jump (subr);
8293	emit_label (return_address);
8294	return const0_rtx;
8295      }
8296
8297    case POPDCC_EXPR:
8298      {
8299	rtx dcc = get_dynamic_cleanup_chain ();
8300	emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8301	return const0_rtx;
8302      }
8303
8304    case POPDHC_EXPR:
8305      {
8306	rtx dhc = get_dynamic_handler_chain ();
8307	emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8308	return const0_rtx;
8309      }
8310
8311    default:
8312      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8313    }
8314
8315  /* Here to do an ordinary binary operator, generating an instruction
8316     from the optab already placed in `this_optab'.  */
8317 binop:
8318  preexpand_calls (exp);
8319  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8320    subtarget = 0;
8321  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8322  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8323 binop2:
8324  temp = expand_binop (mode, this_optab, op0, op1, target,
8325		       unsignedp, OPTAB_LIB_WIDEN);
8326  if (temp == 0)
8327    abort ();
8328  return temp;
8329}
8330
8331
8332
8333/* Return the alignment in bits of EXP, a pointer valued expression.
8334   But don't return more than MAX_ALIGN no matter what.
8335   The alignment returned is, by default, the alignment of the thing that
8336   EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8337
8338   Otherwise, look at the expression to see if we can do better, i.e., if the
8339   expression is actually pointing at an object whose alignment is tighter.  */
8340
8341static int
8342get_pointer_alignment (exp, max_align)
8343     tree exp;
8344     unsigned max_align;
8345{
8346  unsigned align, inner;
8347
8348  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8349    return 0;
8350
8351  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8352  align = MIN (align, max_align);
8353
8354  while (1)
8355    {
8356      switch (TREE_CODE (exp))
8357	{
8358	case NOP_EXPR:
8359	case CONVERT_EXPR:
8360	case NON_LVALUE_EXPR:
8361	  exp = TREE_OPERAND (exp, 0);
8362	  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8363	    return align;
8364	  inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8365	  align = MIN (inner, max_align);
8366	  break;
8367
8368	case PLUS_EXPR:
8369	  /* If sum of pointer + int, restrict our maximum alignment to that
8370	     imposed by the integer.  If not, we can't do any better than
8371	     ALIGN.  */
8372	  if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8373	    return align;
8374
8375	  while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8376		  & (max_align - 1))
8377		 != 0)
8378	    max_align >>= 1;
8379
8380	  exp = TREE_OPERAND (exp, 0);
8381	  break;
8382
8383	case ADDR_EXPR:
8384	  /* See what we are pointing at and look at its alignment.  */
8385	  exp = TREE_OPERAND (exp, 0);
8386	  if (TREE_CODE (exp) == FUNCTION_DECL)
8387	    align = FUNCTION_BOUNDARY;
8388	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8389	    align = DECL_ALIGN (exp);
8390#ifdef CONSTANT_ALIGNMENT
8391	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8392	    align = CONSTANT_ALIGNMENT (exp, align);
8393#endif
8394	  return MIN (align, max_align);
8395
8396	default:
8397	  return align;
8398	}
8399    }
8400}
8401
8402/* Return the tree node and offset if a given argument corresponds to
8403   a string constant.  */
8404
8405static tree
8406string_constant (arg, ptr_offset)
8407     tree arg;
8408     tree *ptr_offset;
8409{
8410  STRIP_NOPS (arg);
8411
8412  if (TREE_CODE (arg) == ADDR_EXPR
8413      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8414    {
8415      *ptr_offset = integer_zero_node;
8416      return TREE_OPERAND (arg, 0);
8417    }
8418  else if (TREE_CODE (arg) == PLUS_EXPR)
8419    {
8420      tree arg0 = TREE_OPERAND (arg, 0);
8421      tree arg1 = TREE_OPERAND (arg, 1);
8422
8423      STRIP_NOPS (arg0);
8424      STRIP_NOPS (arg1);
8425
8426      if (TREE_CODE (arg0) == ADDR_EXPR
8427	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8428	{
8429	  *ptr_offset = arg1;
8430	  return TREE_OPERAND (arg0, 0);
8431	}
8432      else if (TREE_CODE (arg1) == ADDR_EXPR
8433	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8434	{
8435	  *ptr_offset = arg0;
8436	  return TREE_OPERAND (arg1, 0);
8437	}
8438    }
8439
8440  return 0;
8441}
8442
8443/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
8444   way, because it could contain a zero byte in the middle.
8445   TREE_STRING_LENGTH is the size of the character array, not the string.
8446
8447   Unfortunately, string_constant can't access the values of const char
8448   arrays with initializers, so neither can we do so here.  */
8449
8450static tree
8451c_strlen (src)
8452     tree src;
8453{
8454  tree offset_node;
8455  int offset, max;
8456  char *ptr;
8457
8458  src = string_constant (src, &offset_node);
8459  if (src == 0)
8460    return 0;
8461  max = TREE_STRING_LENGTH (src);
8462  ptr = TREE_STRING_POINTER (src);
8463  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8464    {
8465      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8466	 compute the offset to the following null if we don't know where to
8467	 start searching for it.  */
8468      int i;
8469      for (i = 0; i < max; i++)
8470	if (ptr[i] == 0)
8471	  return 0;
8472      /* We don't know the starting offset, but we do know that the string
8473	 has no internal zero bytes.  We can assume that the offset falls
8474	 within the bounds of the string; otherwise, the programmer deserves
8475	 what he gets.  Subtract the offset from the length of the string,
8476	 and return that.  */
8477      /* This would perhaps not be valid if we were dealing with named
8478         arrays in addition to literal string constants.  */
8479      return size_binop (MINUS_EXPR, size_int (max), offset_node);
8480    }
8481
8482  /* We have a known offset into the string.  Start searching there for
8483     a null character.  */
8484  if (offset_node == 0)
8485    offset = 0;
8486  else
8487    {
8488      /* Did we get a long long offset?  If so, punt.  */
8489      if (TREE_INT_CST_HIGH (offset_node) != 0)
8490	return 0;
8491      offset = TREE_INT_CST_LOW (offset_node);
8492    }
8493  /* If the offset is known to be out of bounds, warn, and call strlen at
8494     runtime.  */
8495  if (offset < 0 || offset > max)
8496    {
8497      warning ("offset outside bounds of constant string");
8498      return 0;
8499    }
8500  /* Use strlen to search for the first zero byte.  Since any strings
8501     constructed with build_string will have nulls appended, we win even
8502     if we get handed something like (char[4])"abcd".
8503
8504     Since OFFSET is our starting index into the string, no further
8505     calculation is needed.  */
8506  return size_int (strlen (ptr + offset));
8507}
8508
8509rtx
8510expand_builtin_return_addr (fndecl_code, count, tem)
8511     enum built_in_function fndecl_code;
8512     int count;
8513     rtx tem;
8514{
8515  int i;
8516
8517  /* Some machines need special handling before we can access
8518     arbitrary frames.  For example, on the sparc, we must first flush
8519     all register windows to the stack.  */
8520#ifdef SETUP_FRAME_ADDRESSES
8521  if (count > 0)
8522    SETUP_FRAME_ADDRESSES ();
8523#endif
8524
8525  /* On the sparc, the return address is not in the frame, it is in a
8526     register.  There is no way to access it off of the current frame
8527     pointer, but it can be accessed off the previous frame pointer by
8528     reading the value from the register window save area.  */
8529#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8530  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8531    count--;
8532#endif
8533
8534  /* Scan back COUNT frames to the specified frame.  */
8535  for (i = 0; i < count; i++)
8536    {
8537      /* Assume the dynamic chain pointer is in the word that the
8538	 frame address points to, unless otherwise specified.  */
8539#ifdef DYNAMIC_CHAIN_ADDRESS
8540      tem = DYNAMIC_CHAIN_ADDRESS (tem);
8541#endif
8542      tem = memory_address (Pmode, tem);
8543      tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8544    }
8545
8546  /* For __builtin_frame_address, return what we've got.  */
8547  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8548    return tem;
8549
8550  /* For __builtin_return_address, Get the return address from that
8551     frame.  */
8552#ifdef RETURN_ADDR_RTX
8553  tem = RETURN_ADDR_RTX (count, tem);
8554#else
8555  tem = memory_address (Pmode,
8556			plus_constant (tem, GET_MODE_SIZE (Pmode)));
8557  tem = gen_rtx_MEM (Pmode, tem);
8558#endif
8559  return tem;
8560}
8561
8562/* Construct the leading half of a __builtin_setjmp call.  Control will
8563   return to RECEIVER_LABEL.  This is used directly by sjlj exception
8564   handling code.  */
8565
8566void
8567expand_builtin_setjmp_setup (buf_addr, receiver_label)
8568     rtx buf_addr;
8569     rtx receiver_label;
8570{
8571  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8572  rtx stack_save;
8573
8574#ifdef POINTERS_EXTEND_UNSIGNED
8575  buf_addr = convert_memory_address (Pmode, buf_addr);
8576#endif
8577
8578  buf_addr = force_reg (Pmode, buf_addr);
8579
8580  emit_queue ();
8581
8582  /* We store the frame pointer and the address of receiver_label in
8583     the buffer and use the rest of it for the stack save area, which
8584     is machine-dependent.  */
8585
8586#ifndef BUILTIN_SETJMP_FRAME_VALUE
8587#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8588#endif
8589
8590  emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8591		  BUILTIN_SETJMP_FRAME_VALUE);
8592  emit_move_insn (validize_mem
8593		  (gen_rtx_MEM (Pmode,
8594				plus_constant (buf_addr,
8595					       GET_MODE_SIZE (Pmode)))),
8596		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
8597
8598  stack_save = gen_rtx_MEM (sa_mode,
8599			    plus_constant (buf_addr,
8600					   2 * GET_MODE_SIZE (Pmode)));
8601  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8602
8603  /* If there is further processing to do, do it.  */
8604#ifdef HAVE_builtin_setjmp_setup
8605  if (HAVE_builtin_setjmp_setup)
8606    emit_insn (gen_builtin_setjmp_setup (buf_addr));
8607#endif
8608
8609  /* Tell optimize_save_area_alloca that extra work is going to
8610     need to go on during alloca.  */
8611  current_function_calls_setjmp = 1;
8612
8613  /* Set this so all the registers get saved in our frame; we need to be
8614     able to copy the saved values for any registers from frames we unwind. */
8615  current_function_has_nonlocal_label = 1;
8616}
8617
8618/* Construct the trailing part of a __builtin_setjmp call.
8619   This is used directly by sjlj exception handling code.  */
8620
8621void
8622expand_builtin_setjmp_receiver (receiver_label)
8623      rtx receiver_label ATTRIBUTE_UNUSED;
8624{
8625  /* Clobber the FP when we get here, so we have to make sure it's
8626     marked as used by this function.  */
8627  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8628
8629  /* Mark the static chain as clobbered here so life information
8630     doesn't get messed up for it.  */
8631  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8632
8633  /* Now put in the code to restore the frame pointer, and argument
8634     pointer, if needed.  The code below is from expand_end_bindings
8635     in stmt.c; see detailed documentation there.  */
8636#ifdef HAVE_nonlocal_goto
8637  if (! HAVE_nonlocal_goto)
8638#endif
8639    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8640
8641#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8642  if (fixed_regs[ARG_POINTER_REGNUM])
8643    {
8644#ifdef ELIMINABLE_REGS
8645      size_t i;
8646      static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8647
8648      for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8649	if (elim_regs[i].from == ARG_POINTER_REGNUM
8650	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8651	  break;
8652
8653      if (i == sizeof elim_regs / sizeof elim_regs [0])
8654#endif
8655	{
8656	  /* Now restore our arg pointer from the address at which it
8657	     was saved in our stack frame.
8658	     If there hasn't be space allocated for it yet, make
8659	     some now.  */
8660	  if (arg_pointer_save_area == 0)
8661	    arg_pointer_save_area
8662	      = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8663	  emit_move_insn (virtual_incoming_args_rtx,
8664			  copy_to_reg (arg_pointer_save_area));
8665	}
8666    }
8667#endif
8668
8669#ifdef HAVE_builtin_setjmp_receiver
8670  if (HAVE_builtin_setjmp_receiver)
8671    emit_insn (gen_builtin_setjmp_receiver (receiver_label));
8672  else
8673#endif
8674#ifdef HAVE_nonlocal_goto_receiver
8675    if (HAVE_nonlocal_goto_receiver)
8676      emit_insn (gen_nonlocal_goto_receiver ());
8677    else
8678#endif
8679      {
8680	; /* Nothing */
8681      }
8682
8683  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
8684     insn, but we must not allow the code we just generated to be reordered
8685     by scheduling.  Specifically, the update of the frame pointer must
8686     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
8687     insn.  */
8688  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
8689}
8690
8691
8692/* __builtin_setjmp is passed a pointer to an array of five words (not
8693   all will be used on all machines).  It operates similarly to the C
8694   library function of the same name, but is more efficient.  Much of
8695   the code below (and for longjmp) is copied from the handling of
8696   non-local gotos.
8697
8698   NOTE: This is intended for use by GNAT and the exception handling
8699   scheme in the compiler and will only work in the method used by
8700   them.  */
8701
8702static rtx
8703expand_builtin_setjmp (arglist, target)
8704     tree arglist;
8705     rtx target;
8706{
8707  rtx buf_addr, next_lab, cont_lab;
8708
8709  if (arglist == 0
8710      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8711    return NULL_RTX;
8712
8713  if (target == 0 || GET_CODE (target) != REG
8714      || REGNO (target) < FIRST_PSEUDO_REGISTER)
8715    target = gen_reg_rtx (TYPE_MODE (integer_type_node));
8716
8717  buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8718
8719  next_lab = gen_label_rtx ();
8720  cont_lab = gen_label_rtx ();
8721
8722  expand_builtin_setjmp_setup (buf_addr, next_lab);
8723
8724  /* Set TARGET to zero and branch to the continue label.  */
8725  emit_move_insn (target, const0_rtx);
8726  emit_jump_insn (gen_jump (cont_lab));
8727  emit_barrier ();
8728  emit_label (next_lab);
8729
8730  expand_builtin_setjmp_receiver (next_lab);
8731
8732  /* Set TARGET to one.  */
8733  emit_move_insn (target, const1_rtx);
8734  emit_label (cont_lab);
8735
8736  /* Tell flow about the strange goings on.  Putting `next_lab' on
8737     `nonlocal_goto_handler_labels' to indicates that function
8738     calls may traverse the arc back to this label.  */
8739
8740  current_function_has_nonlocal_label = 1;
8741  nonlocal_goto_handler_labels
8742    = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
8743
8744  return target;
8745}
8746
8747void
8748expand_builtin_longjmp (buf_addr, value)
8749     rtx buf_addr, value;
8750{
8751  rtx fp, lab, stack;
8752  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8753
8754#ifdef POINTERS_EXTEND_UNSIGNED
8755  buf_addr = convert_memory_address (Pmode, buf_addr);
8756#endif
8757  buf_addr = force_reg (Pmode, buf_addr);
8758
8759  /* We used to store value in static_chain_rtx, but that fails if pointers
8760     are smaller than integers.  We instead require that the user must pass
8761     a second argument of 1, because that is what builtin_setjmp will
8762     return.  This also makes EH slightly more efficient, since we are no
8763     longer copying around a value that we don't care about.  */
8764  if (value != const1_rtx)
8765    abort ();
8766
8767#ifdef HAVE_builtin_longjmp
8768  if (HAVE_builtin_longjmp)
8769    emit_insn (gen_builtin_longjmp (buf_addr));
8770  else
8771#endif
8772    {
8773      fp = gen_rtx_MEM (Pmode, buf_addr);
8774      lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8775					       GET_MODE_SIZE (Pmode)));
8776
8777      stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8778						   2 * GET_MODE_SIZE (Pmode)));
8779
8780      /* Pick up FP, label, and SP from the block and jump.  This code is
8781	 from expand_goto in stmt.c; see there for detailed comments.  */
8782#if HAVE_nonlocal_goto
8783      if (HAVE_nonlocal_goto)
8784	/* We have to pass a value to the nonlocal_goto pattern that will
8785	   get copied into the static_chain pointer, but it does not matter
8786	   what that value is, because builtin_setjmp does not use it.  */
8787	emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8788      else
8789#endif
8790	{
8791	  lab = copy_to_reg (lab);
8792
8793	  emit_move_insn (hard_frame_pointer_rtx, fp);
8794	  emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8795
8796	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8797	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8798	  emit_indirect_jump (lab);
8799	}
8800    }
8801}
8802
8803static rtx
8804get_memory_rtx (exp)
8805     tree exp;
8806{
8807  rtx mem;
8808  int is_aggregate;
8809
8810  mem = gen_rtx_MEM (BLKmode,
8811		     memory_address (BLKmode,
8812				     expand_expr (exp, NULL_RTX,
8813						  ptr_mode, EXPAND_SUM)));
8814
8815  RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8816
8817  /* Figure out the type of the object pointed to.  Set MEM_IN_STRUCT_P
8818     if the value is the address of a structure or if the expression is
8819     cast to a pointer to structure type.  */
8820  is_aggregate = 0;
8821
8822  while (TREE_CODE (exp) == NOP_EXPR)
8823    {
8824      tree cast_type = TREE_TYPE (exp);
8825      if (TREE_CODE (cast_type) == POINTER_TYPE
8826	  && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8827	{
8828	  is_aggregate = 1;
8829	  break;
8830	}
8831      exp = TREE_OPERAND (exp, 0);
8832    }
8833
8834  if (is_aggregate == 0)
8835    {
8836      tree type;
8837
8838      if (TREE_CODE (exp) == ADDR_EXPR)
8839	/* If this is the address of an object, check whether the
8840	   object is an array.  */
8841	type = TREE_TYPE (TREE_OPERAND (exp, 0));
8842      else
8843	type = TREE_TYPE (TREE_TYPE (exp));
8844      is_aggregate = AGGREGATE_TYPE_P (type);
8845    }
8846
8847  MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8848  return mem;
8849}
8850
8851
8852/* Expand an expression EXP that calls a built-in function,
8853   with result going to TARGET if that's convenient
8854   (and in mode MODE if that's convenient).
8855   SUBTARGET may be used as the target for computing one of EXP's operands.
8856   IGNORE is nonzero if the value is to be ignored.  */
8857
8858#define CALLED_AS_BUILT_IN(NODE) \
8859   (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8860
8861static rtx
8862expand_builtin (exp, target, subtarget, mode, ignore)
8863     tree exp;
8864     rtx target;
8865     rtx subtarget;
8866     enum machine_mode mode;
8867     int ignore;
8868{
8869  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8870  tree arglist = TREE_OPERAND (exp, 1);
8871  rtx op0;
8872  rtx lab1, insns;
8873  enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8874  optab builtin_optab;
8875
8876  switch (DECL_FUNCTION_CODE (fndecl))
8877    {
8878    case BUILT_IN_ABS:
8879    case BUILT_IN_LABS:
8880    case BUILT_IN_FABS:
8881      /* build_function_call changes these into ABS_EXPR.  */
8882      abort ();
8883
8884    case BUILT_IN_SIN:
8885    case BUILT_IN_COS:
8886      /* Treat these like sqrt, but only if the user asks for them.  */
8887      if (! flag_fast_math)
8888	break;
8889    case BUILT_IN_FSQRT:
8890      /* If not optimizing, call the library function.  */
8891      if (! optimize)
8892	break;
8893
8894      if (arglist == 0
8895	  /* Arg could be wrong type if user redeclared this fcn wrong.  */
8896	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8897	break;
8898
8899      /* Stabilize and compute the argument.  */
8900      if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8901	  && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8902	{
8903	  exp = copy_node (exp);
8904	  arglist = copy_node (arglist);
8905	  TREE_OPERAND (exp, 1) = arglist;
8906	  TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8907	}
8908      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8909
8910      /* Make a suitable register to place result in.  */
8911      target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8912
8913      emit_queue ();
8914      start_sequence ();
8915
8916      switch (DECL_FUNCTION_CODE (fndecl))
8917	{
8918	case BUILT_IN_SIN:
8919	  builtin_optab = sin_optab; break;
8920	case BUILT_IN_COS:
8921	  builtin_optab = cos_optab; break;
8922	case BUILT_IN_FSQRT:
8923	  builtin_optab = sqrt_optab; break;
8924	default:
8925	  abort ();
8926	}
8927
8928      /* Compute into TARGET.
8929	 Set TARGET to wherever the result comes back.  */
8930      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8931			    builtin_optab, op0, target, 0);
8932
8933      /* If we were unable to expand via the builtin, stop the
8934	 sequence (without outputting the insns) and break, causing
8935	 a call to the library function.  */
8936      if (target == 0)
8937	{
8938	  end_sequence ();
8939	  break;
8940        }
8941
8942      /* Check the results by default.  But if flag_fast_math is turned on,
8943	 then assume sqrt will always be called with valid arguments.  */
8944
8945      if (flag_errno_math && ! flag_fast_math)
8946	{
8947	  /* Don't define the builtin FP instructions
8948	     if your machine is not IEEE.  */
8949	  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8950	    abort ();
8951
8952	  lab1 = gen_label_rtx ();
8953
8954	  /* Test the result; if it is NaN, set errno=EDOM because
8955	     the argument was not in the domain.  */
8956	  emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8957				   0, 0, lab1);
8958
8959#ifdef TARGET_EDOM
8960	  {
8961#ifdef GEN_ERRNO_RTX
8962	    rtx errno_rtx = GEN_ERRNO_RTX;
8963#else
8964	    rtx errno_rtx
8965	      = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8966#endif
8967
8968	    emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8969	  }
8970#else
8971	  /* We can't set errno=EDOM directly; let the library call do it.
8972	     Pop the arguments right away in case the call gets deleted.  */
8973	  NO_DEFER_POP;
8974	  expand_call (exp, target, 0);
8975	  OK_DEFER_POP;
8976#endif
8977
8978	  emit_label (lab1);
8979	}
8980
8981      /* Output the entire sequence.  */
8982      insns = get_insns ();
8983      end_sequence ();
8984      emit_insns (insns);
8985
8986      return target;
8987
8988    case BUILT_IN_FMOD:
8989      break;
8990
8991      /* __builtin_apply_args returns block of memory allocated on
8992	 the stack into which is stored the arg pointer, structure
8993	 value address, static chain, and all the registers that might
8994	 possibly be used in performing a function call.  The code is
8995	 moved to the start of the function so the incoming values are
8996	 saved.  */
8997    case BUILT_IN_APPLY_ARGS:
8998      /* Don't do __builtin_apply_args more than once in a function.
8999	 Save the result of the first call and reuse it.  */
9000      if (apply_args_value != 0)
9001	return apply_args_value;
9002      {
9003	/* When this function is called, it means that registers must be
9004	   saved on entry to this function.  So we migrate the
9005	   call to the first insn of this function.  */
9006	rtx temp;
9007	rtx seq;
9008
9009	start_sequence ();
9010	temp = expand_builtin_apply_args ();
9011	seq = get_insns ();
9012	end_sequence ();
9013
9014	apply_args_value = temp;
9015
9016	/* Put the sequence after the NOTE that starts the function.
9017	   If this is inside a SEQUENCE, make the outer-level insn
9018	   chain current, so the code is placed at the start of the
9019	   function.  */
9020	push_topmost_sequence ();
9021	emit_insns_before (seq, NEXT_INSN (get_insns ()));
9022	pop_topmost_sequence ();
9023	return temp;
9024      }
9025
9026      /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9027	 FUNCTION with a copy of the parameters described by
9028	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
9029	 allocated on the stack into which is stored all the registers
9030	 that might possibly be used for returning the result of a
9031	 function.  ARGUMENTS is the value returned by
9032	 __builtin_apply_args.  ARGSIZE is the number of bytes of
9033	 arguments that must be copied.  ??? How should this value be
9034	 computed?  We'll also need a safe worst case value for varargs
9035	 functions.  */
9036    case BUILT_IN_APPLY:
9037      if (arglist == 0
9038	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9039	  || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9040	  || TREE_CHAIN (arglist) == 0
9041	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9042	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9043	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9044	return const0_rtx;
9045      else
9046	{
9047	  int i;
9048	  tree t;
9049	  rtx ops[3];
9050
9051	  for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
9052	    ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
9053
9054	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
9055	}
9056
9057      /* __builtin_return (RESULT) causes the function to return the
9058	 value described by RESULT.  RESULT is address of the block of
9059	 memory returned by __builtin_apply.  */
9060    case BUILT_IN_RETURN:
9061      if (arglist
9062	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9063	  && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
9064	expand_builtin_return (expand_expr (TREE_VALUE (arglist),
9065					    NULL_RTX, VOIDmode, 0));
9066      return const0_rtx;
9067
9068    case BUILT_IN_SAVEREGS:
9069      /* Don't do __builtin_saveregs more than once in a function.
9070	 Save the result of the first call and reuse it.  */
9071      if (saveregs_value != 0)
9072	return saveregs_value;
9073      {
9074	/* When this function is called, it means that registers must be
9075	   saved on entry to this function.  So we migrate the
9076	   call to the first insn of this function.  */
9077	rtx temp;
9078	rtx seq;
9079
9080	/* Now really call the function.  `expand_call' does not call
9081	   expand_builtin, so there is no danger of infinite recursion here.  */
9082	start_sequence ();
9083
9084#ifdef EXPAND_BUILTIN_SAVEREGS
9085	/* Do whatever the machine needs done in this case.  */
9086	temp = EXPAND_BUILTIN_SAVEREGS (arglist);
9087#else
9088	/* The register where the function returns its value
9089	   is likely to have something else in it, such as an argument.
9090	   So preserve that register around the call.  */
9091
9092	if (value_mode != VOIDmode)
9093	  {
9094	    rtx valreg = hard_libcall_value (value_mode);
9095	    rtx saved_valreg = gen_reg_rtx (value_mode);
9096
9097	    emit_move_insn (saved_valreg, valreg);
9098	    temp = expand_call (exp, target, ignore);
9099	    emit_move_insn (valreg, saved_valreg);
9100	  }
9101	else
9102	  /* Generate the call, putting the value in a pseudo.  */
9103	  temp = expand_call (exp, target, ignore);
9104#endif
9105
9106	seq = get_insns ();
9107	end_sequence ();
9108
9109	saveregs_value = temp;
9110
9111	/* Put the sequence after the NOTE that starts the function.
9112	   If this is inside a SEQUENCE, make the outer-level insn
9113	   chain current, so the code is placed at the start of the
9114	   function.  */
9115	push_topmost_sequence ();
9116	emit_insns_before (seq, NEXT_INSN (get_insns ()));
9117	pop_topmost_sequence ();
9118	return temp;
9119      }
9120
9121      /* __builtin_args_info (N) returns word N of the arg space info
9122	 for the current function.  The number and meanings of words
9123	 is controlled by the definition of CUMULATIVE_ARGS.  */
9124    case BUILT_IN_ARGS_INFO:
9125      {
9126	int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
9127	int *word_ptr = (int *) &current_function_args_info;
9128#if 0
9129	/* These are used by the code below that is if 0'ed away */
9130	int i;
9131	tree type, elts, result;
9132#endif
9133
9134	if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
9135	  fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
9136		 __FILE__, __LINE__);
9137
9138	if (arglist != 0)
9139	  {
9140	    tree arg = TREE_VALUE (arglist);
9141	    if (TREE_CODE (arg) != INTEGER_CST)
9142	      error ("argument of `__builtin_args_info' must be constant");
9143	    else
9144	      {
9145		int wordnum = TREE_INT_CST_LOW (arg);
9146
9147		if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
9148		  error ("argument of `__builtin_args_info' out of range");
9149		else
9150		  return GEN_INT (word_ptr[wordnum]);
9151	      }
9152	  }
9153	else
9154	  error ("missing argument in `__builtin_args_info'");
9155
9156	return const0_rtx;
9157
9158#if 0
9159	for (i = 0; i < nwords; i++)
9160	  elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
9161
9162	type = build_array_type (integer_type_node,
9163				 build_index_type (build_int_2 (nwords, 0)));
9164	result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9165	TREE_CONSTANT (result) = 1;
9166	TREE_STATIC (result) = 1;
9167	result = build (INDIRECT_REF, build_pointer_type (type), result);
9168	TREE_CONSTANT (result) = 1;
9169	return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9170#endif
9171      }
9172
9173      /* Return the address of the first anonymous stack arg.  */
9174    case BUILT_IN_NEXT_ARG:
9175      {
9176	tree fntype = TREE_TYPE (current_function_decl);
9177
9178	if ((TYPE_ARG_TYPES (fntype) == 0
9179	     || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9180		 == void_type_node))
9181	    && ! current_function_varargs)
9182	  {
9183	    error ("`va_start' used in function with fixed args");
9184	    return const0_rtx;
9185	  }
9186
9187	if (arglist)
9188	  {
9189	    tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9190	    tree arg = TREE_VALUE (arglist);
9191
9192	    /* Strip off all nops for the sake of the comparison.  This
9193	       is not quite the same as STRIP_NOPS.  It does more.
9194	       We must also strip off INDIRECT_EXPR for C++ reference
9195	       parameters.  */
9196	    while (TREE_CODE (arg) == NOP_EXPR
9197		   || TREE_CODE (arg) == CONVERT_EXPR
9198		   || TREE_CODE (arg) == NON_LVALUE_EXPR
9199		   || TREE_CODE (arg) == INDIRECT_REF)
9200	      arg = TREE_OPERAND (arg, 0);
9201	    if (arg != last_parm)
9202	      warning ("second parameter of `va_start' not last named argument");
9203	  }
9204	else if (! current_function_varargs)
9205	  /* Evidently an out of date version of <stdarg.h>; can't validate
9206	     va_start's second argument, but can still work as intended.  */
9207	  warning ("`__builtin_next_arg' called without an argument");
9208      }
9209
9210      return expand_binop (Pmode, add_optab,
9211			   current_function_internal_arg_pointer,
9212			   current_function_arg_offset_rtx,
9213			   NULL_RTX, 0, OPTAB_LIB_WIDEN);
9214
9215    case BUILT_IN_CLASSIFY_TYPE:
9216      if (arglist != 0)
9217	{
9218	  tree type = TREE_TYPE (TREE_VALUE (arglist));
9219	  enum tree_code code = TREE_CODE (type);
9220	  if (code == VOID_TYPE)
9221	    return GEN_INT (void_type_class);
9222	  if (code == INTEGER_TYPE)
9223	    return GEN_INT (integer_type_class);
9224	  if (code == CHAR_TYPE)
9225	    return GEN_INT (char_type_class);
9226	  if (code == ENUMERAL_TYPE)
9227	    return GEN_INT (enumeral_type_class);
9228	  if (code == BOOLEAN_TYPE)
9229	    return GEN_INT (boolean_type_class);
9230	  if (code == POINTER_TYPE)
9231	    return GEN_INT (pointer_type_class);
9232	  if (code == REFERENCE_TYPE)
9233	    return GEN_INT (reference_type_class);
9234	  if (code == OFFSET_TYPE)
9235	    return GEN_INT (offset_type_class);
9236	  if (code == REAL_TYPE)
9237	    return GEN_INT (real_type_class);
9238	  if (code == COMPLEX_TYPE)
9239	    return GEN_INT (complex_type_class);
9240	  if (code == FUNCTION_TYPE)
9241	    return GEN_INT (function_type_class);
9242	  if (code == METHOD_TYPE)
9243	    return GEN_INT (method_type_class);
9244	  if (code == RECORD_TYPE)
9245	    return GEN_INT (record_type_class);
9246	  if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9247	    return GEN_INT (union_type_class);
9248	  if (code == ARRAY_TYPE)
9249	    {
9250	      if (TYPE_STRING_FLAG (type))
9251		return GEN_INT (string_type_class);
9252	      else
9253		return GEN_INT (array_type_class);
9254	    }
9255	  if (code == SET_TYPE)
9256	    return GEN_INT (set_type_class);
9257	  if (code == FILE_TYPE)
9258	    return GEN_INT (file_type_class);
9259	  if (code == LANG_TYPE)
9260	    return GEN_INT (lang_type_class);
9261	}
9262      return GEN_INT (no_type_class);
9263
9264    case BUILT_IN_CONSTANT_P:
9265      if (arglist == 0)
9266	return const0_rtx;
9267      else
9268	{
9269	  tree arg = TREE_VALUE (arglist);
9270	  rtx tmp;
9271
9272	  /* We return 1 for a numeric type that's known to be a constant
9273	     value at compile-time or for an aggregate type that's a
9274	     literal constant.  */
9275	  STRIP_NOPS (arg);
9276
9277	  /* If we know this is a constant, emit the constant of one.  */
9278	  if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9279	      || (TREE_CODE (arg) == CONSTRUCTOR
9280		  && TREE_CONSTANT (arg))
9281	      || (TREE_CODE (arg) == ADDR_EXPR
9282		  && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9283	    return const1_rtx;
9284
9285	  /* If we aren't going to be running CSE or this expression
9286	     has side effects, show we don't know it to be a constant.
9287	     Likewise if it's a pointer or aggregate type since in those
9288	     case we only want literals, since those are only optimized
9289	     when generating RTL, not later.  */
9290	  if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9291	      || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9292	      || POINTER_TYPE_P (TREE_TYPE (arg)))
9293	    return const0_rtx;
9294
9295	  /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9296	     chance to see if it can deduce whether ARG is constant.  */
9297
9298	  tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9299	  tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9300	  return tmp;
9301	}
9302
9303    case BUILT_IN_FRAME_ADDRESS:
9304      /* The argument must be a nonnegative integer constant.
9305	 It counts the number of frames to scan up the stack.
9306	 The value is the address of that frame.  */
9307    case BUILT_IN_RETURN_ADDRESS:
9308      /* The argument must be a nonnegative integer constant.
9309	 It counts the number of frames to scan up the stack.
9310	 The value is the return address saved in that frame.  */
9311      if (arglist == 0)
9312	/* Warning about missing arg was already issued.  */
9313	return const0_rtx;
9314      else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9315	       || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9316	{
9317	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9318	    error ("invalid arg to `__builtin_frame_address'");
9319	  else
9320	    error ("invalid arg to `__builtin_return_address'");
9321	  return const0_rtx;
9322	}
9323      else
9324	{
9325	  rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9326						TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9327						hard_frame_pointer_rtx);
9328
9329	  /* Some ports cannot access arbitrary stack frames.  */
9330	  if (tem == NULL)
9331	    {
9332	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9333		warning ("unsupported arg to `__builtin_frame_address'");
9334	      else
9335		warning ("unsupported arg to `__builtin_return_address'");
9336	      return const0_rtx;
9337	    }
9338
9339	  /* For __builtin_frame_address, return what we've got.  */
9340	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9341	    return tem;
9342
9343	  if (GET_CODE (tem) != REG
9344	      && ! CONSTANT_P (tem))
9345	    tem = copy_to_mode_reg (Pmode, tem);
9346	  return tem;
9347	}
9348
9349    /* Returns the address of the area where the structure is returned.
9350       0 otherwise.  */
9351    case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9352      if (arglist != 0
9353          || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9354          || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9355        return const0_rtx;
9356      else
9357        return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9358
9359    case BUILT_IN_ALLOCA:
9360      if (arglist == 0
9361	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
9362	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9363	break;
9364
9365      /* Compute the argument.  */
9366      op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9367
9368      /* Allocate the desired space.  */
9369      return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9370
9371    case BUILT_IN_FFS:
9372      /* If not optimizing, call the library function.  */
9373      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9374	break;
9375
9376      if (arglist == 0
9377	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
9378	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9379	break;
9380
9381      /* Compute the argument.  */
9382      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9383      /* Compute ffs, into TARGET if possible.
9384	 Set TARGET to wherever the result comes back.  */
9385      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9386			    ffs_optab, op0, target, 1);
9387      if (target == 0)
9388	abort ();
9389      return target;
9390
9391    case BUILT_IN_STRLEN:
9392      /* If not optimizing, call the library function.  */
9393      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9394	break;
9395
9396      if (arglist == 0
9397	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9398	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9399	break;
9400      else
9401	{
9402	  tree src = TREE_VALUE (arglist);
9403	  tree len = c_strlen (src);
9404
9405	  int align
9406	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9407
9408	  rtx result, src_rtx, char_rtx;
9409	  enum machine_mode insn_mode = value_mode, char_mode;
9410	  enum insn_code icode;
9411
9412	  /* If the length is known, just return it.  */
9413	  if (len != 0)
9414	    return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9415
9416	  /* If SRC is not a pointer type, don't do this operation inline.  */
9417	  if (align == 0)
9418	    break;
9419
9420	  /* Call a function if we can't compute strlen in the right mode.  */
9421
9422	  while (insn_mode != VOIDmode)
9423	    {
9424	      icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9425	      if (icode != CODE_FOR_nothing)
9426		break;
9427
9428	      insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9429	    }
9430	  if (insn_mode == VOIDmode)
9431	    break;
9432
9433	  /* Make a place to write the result of the instruction.  */
9434	  result = target;
9435	  if (! (result != 0
9436		 && GET_CODE (result) == REG
9437		 && GET_MODE (result) == insn_mode
9438		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9439	    result = gen_reg_rtx (insn_mode);
9440
9441	  /* Make sure the operands are acceptable to the predicates.  */
9442
9443	  if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9444	    result = gen_reg_rtx (insn_mode);
9445	  src_rtx = memory_address (BLKmode,
9446				    expand_expr (src, NULL_RTX, ptr_mode,
9447						 EXPAND_NORMAL));
9448
9449	  if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9450	    src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9451
9452	  /* Check the string is readable and has an end.  */
9453	  if (current_function_check_memory_usage)
9454	    emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9455			       src_rtx, Pmode,
9456			       GEN_INT (MEMORY_USE_RO),
9457			       TYPE_MODE (integer_type_node));
9458
9459	  char_rtx = const0_rtx;
9460	  char_mode = insn_operand_mode[(int)icode][2];
9461	  if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9462	    char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9463
9464	  emit_insn (GEN_FCN (icode) (result,
9465				      gen_rtx_MEM (BLKmode, src_rtx),
9466				      char_rtx, GEN_INT (align)));
9467
9468	  /* Return the value in the proper mode for this function.  */
9469	  if (GET_MODE (result) == value_mode)
9470	    return result;
9471	  else if (target != 0)
9472	    {
9473	      convert_move (target, result, 0);
9474	      return target;
9475	    }
9476	  else
9477	    return convert_to_mode (value_mode, result, 0);
9478	}
9479
9480    case BUILT_IN_STRCPY:
9481      /* If not optimizing, call the library function.  */
9482      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9483	break;
9484
9485      if (arglist == 0
9486	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9487	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9488	  || TREE_CHAIN (arglist) == 0
9489	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9490	break;
9491      else
9492	{
9493	  tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9494
9495	  if (len == 0)
9496	    break;
9497
9498	  len = size_binop (PLUS_EXPR, len, integer_one_node);
9499
9500	  chainon (arglist, build_tree_list (NULL_TREE, len));
9501	}
9502
9503      /* Drops in.  */
9504    case BUILT_IN_MEMCPY:
9505      /* If not optimizing, call the library function.  */
9506      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9507	break;
9508
9509      if (arglist == 0
9510	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9511	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9512	  || TREE_CHAIN (arglist) == 0
9513	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9514	      != POINTER_TYPE)
9515	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9516	  || (TREE_CODE (TREE_TYPE (TREE_VALUE
9517				    (TREE_CHAIN (TREE_CHAIN (arglist)))))
9518	      != INTEGER_TYPE))
9519	break;
9520      else
9521	{
9522	  tree dest = TREE_VALUE (arglist);
9523	  tree src = TREE_VALUE (TREE_CHAIN (arglist));
9524	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9525
9526	  int src_align
9527	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9528	  int dest_align
9529	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9530	  rtx dest_mem, src_mem, dest_addr, len_rtx;
9531
9532	  /* If either SRC or DEST is not a pointer type, don't do
9533	     this operation in-line.  */
9534	  if (src_align == 0 || dest_align == 0)
9535	    {
9536	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9537		TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9538	      break;
9539	    }
9540
9541	  dest_mem = get_memory_rtx (dest);
9542	  src_mem = get_memory_rtx (src);
9543	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9544
9545	  /* Just copy the rights of SRC to the rights of DEST.  */
9546	  if (current_function_check_memory_usage)
9547	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9548			       XEXP (dest_mem, 0), Pmode,
9549			       XEXP (src_mem, 0), Pmode,
9550			       len_rtx, TYPE_MODE (sizetype));
9551
9552	  /* Copy word part most expediently.  */
9553	  dest_addr
9554	    = emit_block_move (dest_mem, src_mem, len_rtx,
9555			       MIN (src_align, dest_align));
9556
9557	  if (dest_addr == 0)
9558	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9559
9560	  return dest_addr;
9561	}
9562
9563    case BUILT_IN_MEMSET:
9564      /* If not optimizing, call the library function.  */
9565      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9566	break;
9567
9568      if (arglist == 0
9569	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9570	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9571	  || TREE_CHAIN (arglist) == 0
9572	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9573	      != INTEGER_TYPE)
9574	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9575	  || (INTEGER_TYPE
9576	      != (TREE_CODE (TREE_TYPE
9577			     (TREE_VALUE
9578			      (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9579	break;
9580      else
9581	{
9582	  tree dest = TREE_VALUE (arglist);
9583	  tree val = TREE_VALUE (TREE_CHAIN (arglist));
9584	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9585
9586	  int dest_align
9587	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9588	  rtx dest_mem, dest_addr, len_rtx;
9589
9590	  /* If DEST is not a pointer type, don't do this
9591	     operation in-line.  */
9592	  if (dest_align == 0)
9593	    break;
9594
9595	  /* If the arguments have side-effects, then we can only evaluate
9596	     them at most once.  The following code evaluates them twice if
9597	     they are not constants because we break out to expand_call
9598	     in that case.  They can't be constants if they have side-effects
9599	     so we can check for that first.  Alternatively, we could call
9600	     save_expr to make multiple evaluation safe.  */
9601	  if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9602	    break;
9603
9604	  /* If VAL is not 0, don't do this operation in-line. */
9605	  if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9606	    break;
9607
9608	  /* If LEN does not expand to a constant, don't do this
9609	     operation in-line.  */
9610	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9611	  if (GET_CODE (len_rtx) != CONST_INT)
9612	    break;
9613
9614	  dest_mem = get_memory_rtx (dest);
9615
9616	  /* Just check DST is writable and mark it as readable.  */
9617	  if (current_function_check_memory_usage)
9618	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9619			       XEXP (dest_mem, 0), Pmode,
9620			       len_rtx, TYPE_MODE (sizetype),
9621			       GEN_INT (MEMORY_USE_WO),
9622			       TYPE_MODE (integer_type_node));
9623
9624
9625	  dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9626
9627	  if (dest_addr == 0)
9628	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9629
9630	  return dest_addr;
9631	}
9632
9633/* These comparison functions need an instruction that returns an actual
9634   index.  An ordinary compare that just sets the condition codes
9635   is not enough.  */
9636#ifdef HAVE_cmpstrsi
9637    case BUILT_IN_STRCMP:
9638      /* If not optimizing, call the library function.  */
9639      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9640	break;
9641
9642      /* If we need to check memory accesses, call the library function.  */
9643      if (current_function_check_memory_usage)
9644	break;
9645
9646      if (arglist == 0
9647	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9648	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9649	  || TREE_CHAIN (arglist) == 0
9650	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9651	break;
9652      else if (!HAVE_cmpstrsi)
9653	break;
9654      {
9655	tree arg1 = TREE_VALUE (arglist);
9656	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9657	tree len, len2;
9658
9659	len = c_strlen (arg1);
9660	if (len)
9661	  len = size_binop (PLUS_EXPR, integer_one_node, len);
9662	len2 = c_strlen (arg2);
9663	if (len2)
9664	  len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9665
9666	/* If we don't have a constant length for the first, use the length
9667	   of the second, if we know it.  We don't require a constant for
9668	   this case; some cost analysis could be done if both are available
9669	   but neither is constant.  For now, assume they're equally cheap.
9670
9671	   If both strings have constant lengths, use the smaller.  This
9672	   could arise if optimization results in strcpy being called with
9673	   two fixed strings, or if the code was machine-generated.  We should
9674	   add some code to the `memcmp' handler below to deal with such
9675	   situations, someday.  */
9676	if (!len || TREE_CODE (len) != INTEGER_CST)
9677	  {
9678	    if (len2)
9679	      len = len2;
9680	    else if (len == 0)
9681	      break;
9682	  }
9683	else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9684	  {
9685	    if (tree_int_cst_lt (len2, len))
9686	      len = len2;
9687	  }
9688
9689	chainon (arglist, build_tree_list (NULL_TREE, len));
9690      }
9691
9692      /* Drops in.  */
9693    case BUILT_IN_MEMCMP:
9694      /* If not optimizing, call the library function.  */
9695      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9696	break;
9697
9698      /* If we need to check memory accesses, call the library function.  */
9699      if (current_function_check_memory_usage)
9700	break;
9701
9702      if (arglist == 0
9703	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9704	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9705	  || TREE_CHAIN (arglist) == 0
9706	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9707	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9708	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9709	break;
9710      else if (!HAVE_cmpstrsi)
9711	break;
9712      {
9713	tree arg1 = TREE_VALUE (arglist);
9714	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9715	tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9716	rtx result;
9717
9718	int arg1_align
9719	  = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9720	int arg2_align
9721	  = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9722	enum machine_mode insn_mode
9723	  = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9724
9725	/* If we don't have POINTER_TYPE, call the function.  */
9726	if (arg1_align == 0 || arg2_align == 0)
9727	  {
9728	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9729	      TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9730	    break;
9731	  }
9732
9733	/* Make a place to write the result of the instruction.  */
9734	result = target;
9735	if (! (result != 0
9736	       && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9737	       && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9738	  result = gen_reg_rtx (insn_mode);
9739
9740	emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9741				 get_memory_rtx (arg2),
9742				 expand_expr (len, NULL_RTX, VOIDmode, 0),
9743				 GEN_INT (MIN (arg1_align, arg2_align))));
9744
9745	/* Return the value in the proper mode for this function.  */
9746	mode = TYPE_MODE (TREE_TYPE (exp));
9747	if (GET_MODE (result) == mode)
9748	  return result;
9749	else if (target != 0)
9750	  {
9751	    convert_move (target, result, 0);
9752	    return target;
9753	  }
9754	else
9755	  return convert_to_mode (mode, result, 0);
9756      }
9757#else
9758    case BUILT_IN_STRCMP:
9759    case BUILT_IN_MEMCMP:
9760      break;
9761#endif
9762
9763    case BUILT_IN_SETJMP:
9764      target = expand_builtin_setjmp (arglist, target);
9765      if (target)
9766	return target;
9767	break;
9768
9769      /* __builtin_longjmp is passed a pointer to an array of five words.
9770	 It's similar to the C library longjmp function but works with
9771	 __builtin_setjmp above.  */
9772    case BUILT_IN_LONGJMP:
9773      if (arglist == 0 || TREE_CHAIN (arglist) == 0
9774	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9775	break;
9776      else
9777	{
9778	  rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9779				      VOIDmode, 0);
9780	  rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9781				   NULL_RTX, VOIDmode, 0);
9782
9783	  if (value != const1_rtx)
9784	    {
9785	      error ("__builtin_longjmp second argument must be 1");
9786	      return const0_rtx;
9787	    }
9788
9789	  expand_builtin_longjmp (buf_addr, value);
9790	  return const0_rtx;
9791	}
9792
9793    case BUILT_IN_TRAP:
9794#ifdef HAVE_trap
9795      if (HAVE_trap)
9796	emit_insn (gen_trap ());
9797      else
9798#endif
9799	error ("__builtin_trap not supported by this target");
9800      emit_barrier ();
9801      return const0_rtx;
9802
9803      /* Various hooks for the DWARF 2 __throw routine.  */
9804    case BUILT_IN_UNWIND_INIT:
9805      expand_builtin_unwind_init ();
9806      return const0_rtx;
9807    case BUILT_IN_DWARF_CFA:
9808      return virtual_cfa_rtx;
9809#ifdef DWARF2_UNWIND_INFO
9810    case BUILT_IN_DWARF_FP_REGNUM:
9811      return expand_builtin_dwarf_fp_regnum ();
9812    case BUILT_IN_DWARF_REG_SIZE:
9813      return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9814#endif
9815    case BUILT_IN_FROB_RETURN_ADDR:
9816      return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9817    case BUILT_IN_EXTRACT_RETURN_ADDR:
9818      return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9819    case BUILT_IN_EH_RETURN:
9820      expand_builtin_eh_return (TREE_VALUE (arglist),
9821				TREE_VALUE (TREE_CHAIN (arglist)),
9822				TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9823      return const0_rtx;
9824
9825    default:			/* just do library call, if unknown builtin */
9826      error ("built-in function `%s' not currently supported",
9827	     IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9828    }
9829
9830  /* The switch statement above can drop through to cause the function
9831     to be called normally.  */
9832
9833  return expand_call (exp, target, ignore);
9834}
9835
9836/* Built-in functions to perform an untyped call and return.  */
9837
9838/* For each register that may be used for calling a function, this
9839   gives a mode used to copy the register's value.  VOIDmode indicates
9840   the register is not used for calling a function.  If the machine
9841   has register windows, this gives only the outbound registers.
9842   INCOMING_REGNO gives the corresponding inbound register.  */
9843static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9844
9845/* For each register that may be used for returning values, this gives
9846   a mode used to copy the register's value.  VOIDmode indicates the
9847   register is not used for returning values.  If the machine has
9848   register windows, this gives only the outbound registers.
9849   INCOMING_REGNO gives the corresponding inbound register.  */
9850static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9851
9852/* For each register that may be used for calling a function, this
9853   gives the offset of that register into the block returned by
9854   __builtin_apply_args.  0 indicates that the register is not
9855   used for calling a function.  */
9856static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9857
9858/* Return the offset of register REGNO into the block returned by
9859   __builtin_apply_args.  This is not declared static, since it is
9860   needed in objc-act.c.  */
9861
9862int
9863apply_args_register_offset (regno)
9864     int regno;
9865{
9866  apply_args_size ();
9867
9868  /* Arguments are always put in outgoing registers (in the argument
9869     block) if such make sense.  */
9870#ifdef OUTGOING_REGNO
9871  regno = OUTGOING_REGNO(regno);
9872#endif
9873  return apply_args_reg_offset[regno];
9874}
9875
9876/* Return the size required for the block returned by __builtin_apply_args,
9877   and initialize apply_args_mode.  */
9878
9879static int
9880apply_args_size ()
9881{
9882  static int size = -1;
9883  int align, regno;
9884  enum machine_mode mode;
9885
9886  /* The values computed by this function never change.  */
9887  if (size < 0)
9888    {
9889      /* The first value is the incoming arg-pointer.  */
9890      size = GET_MODE_SIZE (Pmode);
9891
9892      /* The second value is the structure value address unless this is
9893	 passed as an "invisible" first argument.  */
9894      if (struct_value_rtx)
9895	size += GET_MODE_SIZE (Pmode);
9896
9897      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9898	if (FUNCTION_ARG_REGNO_P (regno))
9899	  {
9900	    /* Search for the proper mode for copying this register's
9901	       value.  I'm not sure this is right, but it works so far.  */
9902	    enum machine_mode best_mode = VOIDmode;
9903
9904	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9905		 mode != VOIDmode;
9906		 mode = GET_MODE_WIDER_MODE (mode))
9907	      if (HARD_REGNO_MODE_OK (regno, mode)
9908		  && HARD_REGNO_NREGS (regno, mode) == 1)
9909		best_mode = mode;
9910
9911	    if (best_mode == VOIDmode)
9912	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9913		   mode != VOIDmode;
9914		   mode = GET_MODE_WIDER_MODE (mode))
9915		if (HARD_REGNO_MODE_OK (regno, mode)
9916		    && (mov_optab->handlers[(int) mode].insn_code
9917			!= CODE_FOR_nothing))
9918		  best_mode = mode;
9919
9920	    mode = best_mode;
9921	    if (mode == VOIDmode)
9922	      abort ();
9923
9924	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9925	    if (size % align != 0)
9926	      size = CEIL (size, align) * align;
9927	    apply_args_reg_offset[regno] = size;
9928	    size += GET_MODE_SIZE (mode);
9929	    apply_args_mode[regno] = mode;
9930	  }
9931	else
9932	  {
9933	    apply_args_mode[regno] = VOIDmode;
9934	    apply_args_reg_offset[regno] = 0;
9935	  }
9936    }
9937  return size;
9938}
9939
9940/* Return the size required for the block returned by __builtin_apply,
9941   and initialize apply_result_mode.  */
9942
9943static int
9944apply_result_size ()
9945{
9946  static int size = -1;
9947  int align, regno;
9948  enum machine_mode mode;
9949
9950  /* The values computed by this function never change.  */
9951  if (size < 0)
9952    {
9953      size = 0;
9954
9955      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9956	if (FUNCTION_VALUE_REGNO_P (regno))
9957	  {
9958	    /* Search for the proper mode for copying this register's
9959	       value.  I'm not sure this is right, but it works so far.  */
9960	    enum machine_mode best_mode = VOIDmode;
9961
9962	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9963		 mode != TImode;
9964		 mode = GET_MODE_WIDER_MODE (mode))
9965	      if (HARD_REGNO_MODE_OK (regno, mode))
9966		best_mode = mode;
9967
9968	    if (best_mode == VOIDmode)
9969	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9970		   mode != VOIDmode;
9971		   mode = GET_MODE_WIDER_MODE (mode))
9972		if (HARD_REGNO_MODE_OK (regno, mode)
9973		    && (mov_optab->handlers[(int) mode].insn_code
9974			!= CODE_FOR_nothing))
9975		  best_mode = mode;
9976
9977	    mode = best_mode;
9978	    if (mode == VOIDmode)
9979	      abort ();
9980
9981	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9982	    if (size % align != 0)
9983	      size = CEIL (size, align) * align;
9984	    size += GET_MODE_SIZE (mode);
9985	    apply_result_mode[regno] = mode;
9986	  }
9987	else
9988	  apply_result_mode[regno] = VOIDmode;
9989
9990      /* Allow targets that use untyped_call and untyped_return to override
9991	 the size so that machine-specific information can be stored here.  */
9992#ifdef APPLY_RESULT_SIZE
9993      size = APPLY_RESULT_SIZE;
9994#endif
9995    }
9996  return size;
9997}
9998
9999#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
10000/* Create a vector describing the result block RESULT.  If SAVEP is true,
10001   the result block is used to save the values; otherwise it is used to
10002   restore the values.  */
10003
10004static rtx
10005result_vector (savep, result)
10006     int savep;
10007     rtx result;
10008{
10009  int regno, size, align, nelts;
10010  enum machine_mode mode;
10011  rtx reg, mem;
10012  rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
10013
10014  size = nelts = 0;
10015  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10016    if ((mode = apply_result_mode[regno]) != VOIDmode)
10017      {
10018	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10019	if (size % align != 0)
10020	  size = CEIL (size, align) * align;
10021	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
10022	mem = change_address (result, mode,
10023			      plus_constant (XEXP (result, 0), size));
10024	savevec[nelts++] = (savep
10025			    ? gen_rtx_SET (VOIDmode, mem, reg)
10026			    : gen_rtx_SET (VOIDmode, reg, mem));
10027	size += GET_MODE_SIZE (mode);
10028      }
10029  return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
10030}
10031#endif /* HAVE_untyped_call or HAVE_untyped_return */
10032
10033/* Save the state required to perform an untyped call with the same
10034   arguments as were passed to the current function.  */
10035
10036static rtx
10037expand_builtin_apply_args ()
10038{
10039  rtx registers;
10040  int size, align, regno;
10041  enum machine_mode mode;
10042
10043  /* Create a block where the arg-pointer, structure value address,
10044     and argument registers can be saved.  */
10045  registers = assign_stack_local (BLKmode, apply_args_size (), -1);
10046
10047  /* Walk past the arg-pointer and structure value address.  */
10048  size = GET_MODE_SIZE (Pmode);
10049  if (struct_value_rtx)
10050    size += GET_MODE_SIZE (Pmode);
10051
10052  /* Save each register used in calling a function to the block.  */
10053  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10054    if ((mode = apply_args_mode[regno]) != VOIDmode)
10055      {
10056	rtx tem;
10057
10058	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10059	if (size % align != 0)
10060	  size = CEIL (size, align) * align;
10061
10062	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10063
10064#ifdef STACK_REGS
10065        /* For reg-stack.c's stack register household.
10066	   Compare with a similar piece of code in function.c.  */
10067
10068        emit_insn (gen_rtx_USE (mode, tem));
10069#endif
10070
10071	emit_move_insn (change_address (registers, mode,
10072					plus_constant (XEXP (registers, 0),
10073						       size)),
10074			tem);
10075	size += GET_MODE_SIZE (mode);
10076      }
10077
10078  /* Save the arg pointer to the block.  */
10079  emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
10080		  copy_to_reg (virtual_incoming_args_rtx));
10081  size = GET_MODE_SIZE (Pmode);
10082
10083  /* Save the structure value address unless this is passed as an
10084     "invisible" first argument.  */
10085  if (struct_value_incoming_rtx)
10086    {
10087      emit_move_insn (change_address (registers, Pmode,
10088				      plus_constant (XEXP (registers, 0),
10089						     size)),
10090		      copy_to_reg (struct_value_incoming_rtx));
10091      size += GET_MODE_SIZE (Pmode);
10092    }
10093
10094  /* Return the address of the block.  */
10095  return copy_addr_to_reg (XEXP (registers, 0));
10096}
10097
10098/* Perform an untyped call and save the state required to perform an
10099   untyped return of whatever value was returned by the given function.  */
10100
10101static rtx
10102expand_builtin_apply (function, arguments, argsize)
10103     rtx function, arguments, argsize;
10104{
10105  int size, align, regno;
10106  enum machine_mode mode;
10107  rtx incoming_args, result, reg, dest, call_insn;
10108  rtx old_stack_level = 0;
10109  rtx call_fusage = 0;
10110
10111  /* Create a block where the return registers can be saved.  */
10112  result = assign_stack_local (BLKmode, apply_result_size (), -1);
10113
10114  /* ??? The argsize value should be adjusted here.  */
10115
10116  /* Fetch the arg pointer from the ARGUMENTS block.  */
10117  incoming_args = gen_reg_rtx (Pmode);
10118  emit_move_insn (incoming_args,
10119		  gen_rtx_MEM (Pmode, arguments));
10120#ifndef STACK_GROWS_DOWNWARD
10121  incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
10122				incoming_args, 0, OPTAB_LIB_WIDEN);
10123#endif
10124
10125  /* Perform postincrements before actually calling the function.  */
10126  emit_queue ();
10127
10128  /* Push a new argument block and copy the arguments.  */
10129  do_pending_stack_adjust ();
10130
10131  /* Save the stack with nonlocal if available */
10132#ifdef HAVE_save_stack_nonlocal
10133  if (HAVE_save_stack_nonlocal)
10134    emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
10135  else
10136#endif
10137    emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
10138
10139  /* Push a block of memory onto the stack to store the memory arguments.
10140     Save the address in a register, and copy the memory arguments.  ??? I
10141     haven't figured out how the calling convention macros effect this,
10142     but it's likely that the source and/or destination addresses in
10143     the block copy will need updating in machine specific ways.  */
10144  dest = allocate_dynamic_stack_space (argsize, 0, 0);
10145  emit_block_move (gen_rtx_MEM (BLKmode, dest),
10146		   gen_rtx_MEM (BLKmode, incoming_args),
10147		   argsize,
10148		   PARM_BOUNDARY / BITS_PER_UNIT);
10149
10150  /* Refer to the argument block.  */
10151  apply_args_size ();
10152  arguments = gen_rtx_MEM (BLKmode, arguments);
10153
10154  /* Walk past the arg-pointer and structure value address.  */
10155  size = GET_MODE_SIZE (Pmode);
10156  if (struct_value_rtx)
10157    size += GET_MODE_SIZE (Pmode);
10158
10159  /* Restore each of the registers previously saved.  Make USE insns
10160     for each of these registers for use in making the call.  */
10161  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10162    if ((mode = apply_args_mode[regno]) != VOIDmode)
10163      {
10164	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10165	if (size % align != 0)
10166	  size = CEIL (size, align) * align;
10167	reg = gen_rtx_REG (mode, regno);
10168	emit_move_insn (reg,
10169			change_address (arguments, mode,
10170					plus_constant (XEXP (arguments, 0),
10171						       size)));
10172
10173	use_reg (&call_fusage, reg);
10174	size += GET_MODE_SIZE (mode);
10175      }
10176
10177  /* Restore the structure value address unless this is passed as an
10178     "invisible" first argument.  */
10179  size = GET_MODE_SIZE (Pmode);
10180  if (struct_value_rtx)
10181    {
10182      rtx value = gen_reg_rtx (Pmode);
10183      emit_move_insn (value,
10184		      change_address (arguments, Pmode,
10185				      plus_constant (XEXP (arguments, 0),
10186						     size)));
10187      emit_move_insn (struct_value_rtx, value);
10188      if (GET_CODE (struct_value_rtx) == REG)
10189	  use_reg (&call_fusage, struct_value_rtx);
10190      size += GET_MODE_SIZE (Pmode);
10191    }
10192
10193  /* All arguments and registers used for the call are set up by now!  */
10194  function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10195
10196  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
10197     and we don't want to load it into a register as an optimization,
10198     because prepare_call_address already did it if it should be done.  */
10199  if (GET_CODE (function) != SYMBOL_REF)
10200    function = memory_address (FUNCTION_MODE, function);
10201
10202  /* Generate the actual call instruction and save the return value.  */
10203#ifdef HAVE_untyped_call
10204  if (HAVE_untyped_call)
10205    emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10206				      result, result_vector (1, result)));
10207  else
10208#endif
10209#ifdef HAVE_call_value
10210  if (HAVE_call_value)
10211    {
10212      rtx valreg = 0;
10213
10214      /* Locate the unique return register.  It is not possible to
10215	 express a call that sets more than one return register using
10216	 call_value; use untyped_call for that.  In fact, untyped_call
10217	 only needs to save the return registers in the given block.  */
10218      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10219	if ((mode = apply_result_mode[regno]) != VOIDmode)
10220	  {
10221	    if (valreg)
10222	      abort (); /* HAVE_untyped_call required.  */
10223	    valreg = gen_rtx_REG (mode, regno);
10224	  }
10225
10226      emit_call_insn (gen_call_value (valreg,
10227				      gen_rtx_MEM (FUNCTION_MODE, function),
10228				      const0_rtx, NULL_RTX, const0_rtx));
10229
10230      emit_move_insn (change_address (result, GET_MODE (valreg),
10231				      XEXP (result, 0)),
10232		      valreg);
10233    }
10234  else
10235#endif
10236    abort ();
10237
10238  /* Find the CALL insn we just emitted.  */
10239  for (call_insn = get_last_insn ();
10240       call_insn && GET_CODE (call_insn) != CALL_INSN;
10241       call_insn = PREV_INSN (call_insn))
10242    ;
10243
10244  if (! call_insn)
10245    abort ();
10246
10247  /* Put the register usage information on the CALL.  If there is already
10248     some usage information, put ours at the end.  */
10249  if (CALL_INSN_FUNCTION_USAGE (call_insn))
10250    {
10251      rtx link;
10252
10253      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10254	   link = XEXP (link, 1))
10255	;
10256
10257      XEXP (link, 1) = call_fusage;
10258    }
10259  else
10260    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10261
10262  /* Restore the stack.  */
10263#ifdef HAVE_save_stack_nonlocal
10264  if (HAVE_save_stack_nonlocal)
10265    emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10266  else
10267#endif
10268    emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10269
10270  /* Return the address of the result block.  */
10271  return copy_addr_to_reg (XEXP (result, 0));
10272}
10273
10274/* Perform an untyped return.  */
10275
10276static void
10277expand_builtin_return (result)
10278     rtx result;
10279{
10280  int size, align, regno;
10281  enum machine_mode mode;
10282  rtx reg;
10283  rtx call_fusage = 0;
10284
10285  apply_result_size ();
10286  result = gen_rtx_MEM (BLKmode, result);
10287
10288#ifdef HAVE_untyped_return
10289  if (HAVE_untyped_return)
10290    {
10291      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10292      emit_barrier ();
10293      return;
10294    }
10295#endif
10296
10297  /* Restore the return value and note that each value is used.  */
10298  size = 0;
10299  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10300    if ((mode = apply_result_mode[regno]) != VOIDmode)
10301      {
10302	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10303	if (size % align != 0)
10304	  size = CEIL (size, align) * align;
10305	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10306	emit_move_insn (reg,
10307			change_address (result, mode,
10308					plus_constant (XEXP (result, 0),
10309						       size)));
10310
10311	push_to_sequence (call_fusage);
10312	emit_insn (gen_rtx_USE (VOIDmode, reg));
10313	call_fusage = get_insns ();
10314	end_sequence ();
10315	size += GET_MODE_SIZE (mode);
10316      }
10317
10318  /* Put the USE insns before the return.  */
10319  emit_insns (call_fusage);
10320
10321  /* Return whatever values was restored by jumping directly to the end
10322     of the function.  */
10323  expand_null_return ();
10324}
10325
10326/* Expand code for a post- or pre- increment or decrement
10327   and return the RTX for the result.
10328   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
10329
10330static rtx
10331expand_increment (exp, post, ignore)
10332     register tree exp;
10333     int post, ignore;
10334{
10335  register rtx op0, op1;
10336  register rtx temp, value;
10337  register tree incremented = TREE_OPERAND (exp, 0);
10338  optab this_optab = add_optab;
10339  int icode;
10340  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10341  int op0_is_copy = 0;
10342  int single_insn = 0;
10343  /* 1 means we can't store into OP0 directly,
10344     because it is a subreg narrower than a word,
10345     and we don't dare clobber the rest of the word.  */
10346  int bad_subreg = 0;
10347
10348  /* Stabilize any component ref that might need to be
10349     evaluated more than once below.  */
10350  if (!post
10351      || TREE_CODE (incremented) == BIT_FIELD_REF
10352      || (TREE_CODE (incremented) == COMPONENT_REF
10353	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10354	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10355    incremented = stabilize_reference (incremented);
10356  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
10357     ones into save exprs so that they don't accidentally get evaluated
10358     more than once by the code below.  */
10359  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10360      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10361    incremented = save_expr (incremented);
10362
10363  /* Compute the operands as RTX.
10364     Note whether OP0 is the actual lvalue or a copy of it:
10365     I believe it is a copy iff it is a register or subreg
10366     and insns were generated in computing it.   */
10367
10368  temp = get_last_insn ();
10369  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10370
10371  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10372     in place but instead must do sign- or zero-extension during assignment,
10373     so we copy it into a new register and let the code below use it as
10374     a copy.
10375
10376     Note that we can safely modify this SUBREG since it is know not to be
10377     shared (it was made by the expand_expr call above).  */
10378
10379  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10380    {
10381      if (post)
10382	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10383      else
10384	bad_subreg = 1;
10385    }
10386  else if (GET_CODE (op0) == SUBREG
10387	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10388    {
10389      /* We cannot increment this SUBREG in place.  If we are
10390	 post-incrementing, get a copy of the old value.  Otherwise,
10391	 just mark that we cannot increment in place.  */
10392      if (post)
10393	op0 = copy_to_reg (op0);
10394      else
10395	bad_subreg = 1;
10396    }
10397
10398  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10399		 && temp != get_last_insn ());
10400  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10401		     EXPAND_MEMORY_USE_BAD);
10402
10403  /* Decide whether incrementing or decrementing.  */
10404  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10405      || TREE_CODE (exp) == PREDECREMENT_EXPR)
10406    this_optab = sub_optab;
10407
10408  /* Convert decrement by a constant into a negative increment.  */
10409  if (this_optab == sub_optab
10410      && GET_CODE (op1) == CONST_INT)
10411    {
10412      op1 = GEN_INT (- INTVAL (op1));
10413      this_optab = add_optab;
10414    }
10415
10416  /* For a preincrement, see if we can do this with a single instruction.  */
10417  if (!post)
10418    {
10419      icode = (int) this_optab->handlers[(int) mode].insn_code;
10420      if (icode != (int) CODE_FOR_nothing
10421	  /* Make sure that OP0 is valid for operands 0 and 1
10422	     of the insn we want to queue.  */
10423	  && (*insn_operand_predicate[icode][0]) (op0, mode)
10424	  && (*insn_operand_predicate[icode][1]) (op0, mode)
10425	  && (*insn_operand_predicate[icode][2]) (op1, mode))
10426	single_insn = 1;
10427    }
10428
10429  /* If OP0 is not the actual lvalue, but rather a copy in a register,
10430     then we cannot just increment OP0.  We must therefore contrive to
10431     increment the original value.  Then, for postincrement, we can return
10432     OP0 since it is a copy of the old value.  For preincrement, expand here
10433     unless we can do it with a single insn.
10434
10435     Likewise if storing directly into OP0 would clobber high bits
10436     we need to preserve (bad_subreg).  */
10437  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10438    {
10439      /* This is the easiest way to increment the value wherever it is.
10440	 Problems with multiple evaluation of INCREMENTED are prevented
10441	 because either (1) it is a component_ref or preincrement,
10442	 in which case it was stabilized above, or (2) it is an array_ref
10443	 with constant index in an array in a register, which is
10444	 safe to reevaluate.  */
10445      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10446			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
10447			    ? MINUS_EXPR : PLUS_EXPR),
10448			   TREE_TYPE (exp),
10449			   incremented,
10450			   TREE_OPERAND (exp, 1));
10451
10452      while (TREE_CODE (incremented) == NOP_EXPR
10453	     || TREE_CODE (incremented) == CONVERT_EXPR)
10454	{
10455	  newexp = convert (TREE_TYPE (incremented), newexp);
10456	  incremented = TREE_OPERAND (incremented, 0);
10457	}
10458
10459      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10460      return post ? op0 : temp;
10461    }
10462
10463  if (post)
10464    {
10465      /* We have a true reference to the value in OP0.
10466	 If there is an insn to add or subtract in this mode, queue it.
10467	 Queueing the increment insn avoids the register shuffling
10468	 that often results if we must increment now and first save
10469	 the old value for subsequent use.  */
10470
10471#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
10472      op0 = stabilize (op0);
10473#endif
10474
10475      icode = (int) this_optab->handlers[(int) mode].insn_code;
10476      if (icode != (int) CODE_FOR_nothing
10477	  /* Make sure that OP0 is valid for operands 0 and 1
10478	     of the insn we want to queue.  */
10479	  && (*insn_operand_predicate[icode][0]) (op0, mode)
10480	  && (*insn_operand_predicate[icode][1]) (op0, mode))
10481	{
10482	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10483	    op1 = force_reg (mode, op1);
10484
10485	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10486	}
10487      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10488	{
10489	  rtx addr = (general_operand (XEXP (op0, 0), mode)
10490		      ? force_reg (Pmode, XEXP (op0, 0))
10491		      : copy_to_reg (XEXP (op0, 0)));
10492	  rtx temp, result;
10493
10494	  op0 = change_address (op0, VOIDmode, addr);
10495	  temp = force_reg (GET_MODE (op0), op0);
10496	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10497	    op1 = force_reg (mode, op1);
10498
10499	  /* The increment queue is LIFO, thus we have to `queue'
10500	     the instructions in reverse order.  */
10501	  enqueue_insn (op0, gen_move_insn (op0, temp));
10502	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10503	  return result;
10504	}
10505    }
10506
10507  /* Preincrement, or we can't increment with one simple insn.  */
10508  if (post)
10509    /* Save a copy of the value before inc or dec, to return it later.  */
10510    temp = value = copy_to_reg (op0);
10511  else
10512    /* Arrange to return the incremented value.  */
10513    /* Copy the rtx because expand_binop will protect from the queue,
10514       and the results of that would be invalid for us to return
10515       if our caller does emit_queue before using our result.  */
10516    temp = copy_rtx (value = op0);
10517
10518  /* Increment however we can.  */
10519  op1 = expand_binop (mode, this_optab, value, op1,
10520  		      current_function_check_memory_usage ? NULL_RTX : op0,
10521		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10522  /* Make sure the value is stored into OP0.  */
10523  if (op1 != op0)
10524    emit_move_insn (op0, op1);
10525
10526  return temp;
10527}
10528
10529/* Expand all function calls contained within EXP, innermost ones first.
10530   But don't look within expressions that have sequence points.
10531   For each CALL_EXPR, record the rtx for its value
10532   in the CALL_EXPR_RTL field.  */
10533
10534static void
10535preexpand_calls (exp)
10536     tree exp;
10537{
10538  register int nops, i;
10539  int type = TREE_CODE_CLASS (TREE_CODE (exp));
10540
10541  if (! do_preexpand_calls)
10542    return;
10543
10544  /* Only expressions and references can contain calls.  */
10545
10546  if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10547    return;
10548
10549  switch (TREE_CODE (exp))
10550    {
10551    case CALL_EXPR:
10552      /* Do nothing if already expanded.  */
10553      if (CALL_EXPR_RTL (exp) != 0
10554	  /* Do nothing if the call returns a variable-sized object.  */
10555	  || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10556	  /* Do nothing to built-in functions.  */
10557	  || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10558	      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10559		  == FUNCTION_DECL)
10560	      && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10561	return;
10562
10563      CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10564      return;
10565
10566    case COMPOUND_EXPR:
10567    case COND_EXPR:
10568    case TRUTH_ANDIF_EXPR:
10569    case TRUTH_ORIF_EXPR:
10570      /* If we find one of these, then we can be sure
10571	 the adjust will be done for it (since it makes jumps).
10572	 Do it now, so that if this is inside an argument
10573	 of a function, we don't get the stack adjustment
10574	 after some other args have already been pushed.  */
10575      do_pending_stack_adjust ();
10576      return;
10577
10578    case BLOCK:
10579    case RTL_EXPR:
10580    case WITH_CLEANUP_EXPR:
10581    case CLEANUP_POINT_EXPR:
10582    case TRY_CATCH_EXPR:
10583      return;
10584
10585    case SAVE_EXPR:
10586      if (SAVE_EXPR_RTL (exp) != 0)
10587	return;
10588
10589    default:
10590      break;
10591    }
10592
10593  nops = tree_code_length[(int) TREE_CODE (exp)];
10594  for (i = 0; i < nops; i++)
10595    if (TREE_OPERAND (exp, i) != 0)
10596      {
10597	type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10598	if (type == 'e' || type == '<' || type == '1' || type == '2'
10599	    || type == 'r')
10600	  preexpand_calls (TREE_OPERAND (exp, i));
10601      }
10602}
10603
10604/* At the start of a function, record that we have no previously-pushed
10605   arguments waiting to be popped.  */
10606
10607void
10608init_pending_stack_adjust ()
10609{
10610  pending_stack_adjust = 0;
10611}
10612
10613/* When exiting from function, if safe, clear out any pending stack adjust
10614   so the adjustment won't get done.
10615
10616   Note, if the current function calls alloca, then it must have a
10617   frame pointer regardless of the value of flag_omit_frame_pointer.  */
10618
10619void
10620clear_pending_stack_adjust ()
10621{
10622#ifdef EXIT_IGNORE_STACK
10623  if (optimize > 0
10624      && (! flag_omit_frame_pointer || current_function_calls_alloca)
10625      && EXIT_IGNORE_STACK
10626      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10627      && ! flag_inline_functions)
10628    pending_stack_adjust = 0;
10629#endif
10630}
10631
10632/* Pop any previously-pushed arguments that have not been popped yet.  */
10633
10634void
10635do_pending_stack_adjust ()
10636{
10637  if (inhibit_defer_pop == 0)
10638    {
10639      if (pending_stack_adjust != 0)
10640	adjust_stack (GEN_INT (pending_stack_adjust));
10641      pending_stack_adjust = 0;
10642    }
10643}
10644
10645/* Expand conditional expressions.  */
10646
10647/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10648   LABEL is an rtx of code CODE_LABEL, in this function and all the
10649   functions here.  */
10650
10651void
10652jumpifnot (exp, label)
10653     tree exp;
10654     rtx label;
10655{
10656  do_jump (exp, label, NULL_RTX);
10657}
10658
10659/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
10660
10661void
10662jumpif (exp, label)
10663     tree exp;
10664     rtx label;
10665{
10666  do_jump (exp, NULL_RTX, label);
10667}
10668
10669/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10670   the result is zero, or IF_TRUE_LABEL if the result is one.
10671   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10672   meaning fall through in that case.
10673
10674   do_jump always does any pending stack adjust except when it does not
10675   actually perform a jump.  An example where there is no jump
10676   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10677
10678   This function is responsible for optimizing cases such as
10679   &&, || and comparison operators in EXP.  */
10680
10681void
10682do_jump (exp, if_false_label, if_true_label)
10683     tree exp;
10684     rtx if_false_label, if_true_label;
10685{
10686  register enum tree_code code = TREE_CODE (exp);
10687  /* Some cases need to create a label to jump to
10688     in order to properly fall through.
10689     These cases set DROP_THROUGH_LABEL nonzero.  */
10690  rtx drop_through_label = 0;
10691  rtx temp;
10692  rtx comparison = 0;
10693  int i;
10694  tree type;
10695  enum machine_mode mode;
10696
10697#ifdef MAX_INTEGER_COMPUTATION_MODE
10698  check_max_integer_computation_mode (exp);
10699#endif
10700
10701  emit_queue ();
10702
10703  switch (code)
10704    {
10705    case ERROR_MARK:
10706      break;
10707
10708    case INTEGER_CST:
10709      temp = integer_zerop (exp) ? if_false_label : if_true_label;
10710      if (temp)
10711	emit_jump (temp);
10712      break;
10713
10714#if 0
10715      /* This is not true with #pragma weak  */
10716    case ADDR_EXPR:
10717      /* The address of something can never be zero.  */
10718      if (if_true_label)
10719	emit_jump (if_true_label);
10720      break;
10721#endif
10722
10723    case NOP_EXPR:
10724      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10725	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10726	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10727	goto normal;
10728    case CONVERT_EXPR:
10729      /* If we are narrowing the operand, we have to do the compare in the
10730	 narrower mode.  */
10731      if ((TYPE_PRECISION (TREE_TYPE (exp))
10732	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10733	goto normal;
10734    case NON_LVALUE_EXPR:
10735    case REFERENCE_EXPR:
10736    case ABS_EXPR:
10737    case NEGATE_EXPR:
10738    case LROTATE_EXPR:
10739    case RROTATE_EXPR:
10740      /* These cannot change zero->non-zero or vice versa.  */
10741      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10742      break;
10743
10744#if 0
10745      /* This is never less insns than evaluating the PLUS_EXPR followed by
10746	 a test and can be longer if the test is eliminated.  */
10747    case PLUS_EXPR:
10748      /* Reduce to minus.  */
10749      exp = build (MINUS_EXPR, TREE_TYPE (exp),
10750		   TREE_OPERAND (exp, 0),
10751		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10752				 TREE_OPERAND (exp, 1))));
10753      /* Process as MINUS.  */
10754#endif
10755
10756    case MINUS_EXPR:
10757      /* Non-zero iff operands of minus differ.  */
10758      comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10759				   TREE_OPERAND (exp, 0),
10760				   TREE_OPERAND (exp, 1)),
10761			    NE, NE);
10762      break;
10763
10764    case BIT_AND_EXPR:
10765      /* If we are AND'ing with a small constant, do this comparison in the
10766	 smallest type that fits.  If the machine doesn't have comparisons
10767	 that small, it will be converted back to the wider comparison.
10768	 This helps if we are testing the sign bit of a narrower object.
10769	 combine can't do this for us because it can't know whether a
10770	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
10771
10772      if (! SLOW_BYTE_ACCESS
10773	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10774	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10775	  && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10776	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10777	  && (type = type_for_mode (mode, 1)) != 0
10778	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10779	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10780	      != CODE_FOR_nothing))
10781	{
10782	  do_jump (convert (type, exp), if_false_label, if_true_label);
10783	  break;
10784	}
10785      goto normal;
10786
10787    case TRUTH_NOT_EXPR:
10788      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10789      break;
10790
10791    case TRUTH_ANDIF_EXPR:
10792      if (if_false_label == 0)
10793	if_false_label = drop_through_label = gen_label_rtx ();
10794      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10795      start_cleanup_deferral ();
10796      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10797      end_cleanup_deferral ();
10798      break;
10799
10800    case TRUTH_ORIF_EXPR:
10801      if (if_true_label == 0)
10802	if_true_label = drop_through_label = gen_label_rtx ();
10803      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10804      start_cleanup_deferral ();
10805      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10806      end_cleanup_deferral ();
10807      break;
10808
10809    case COMPOUND_EXPR:
10810      push_temp_slots ();
10811      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10812      preserve_temp_slots (NULL_RTX);
10813      free_temp_slots ();
10814      pop_temp_slots ();
10815      emit_queue ();
10816      do_pending_stack_adjust ();
10817      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10818      break;
10819
10820    case COMPONENT_REF:
10821    case BIT_FIELD_REF:
10822    case ARRAY_REF:
10823      {
10824	int bitsize, bitpos, unsignedp;
10825	enum machine_mode mode;
10826	tree type;
10827	tree offset;
10828	int volatilep = 0;
10829	int alignment;
10830
10831	/* Get description of this reference.  We don't actually care
10832	   about the underlying object here.  */
10833	get_inner_reference (exp, &bitsize, &bitpos, &offset,
10834			     &mode, &unsignedp, &volatilep,
10835			     &alignment);
10836
10837	type = type_for_size (bitsize, unsignedp);
10838	if (! SLOW_BYTE_ACCESS
10839	    && type != 0 && bitsize >= 0
10840	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10841	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10842		!= CODE_FOR_nothing))
10843	  {
10844	    do_jump (convert (type, exp), if_false_label, if_true_label);
10845	    break;
10846	  }
10847	goto normal;
10848      }
10849
10850    case COND_EXPR:
10851      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
10852      if (integer_onep (TREE_OPERAND (exp, 1))
10853	  && integer_zerop (TREE_OPERAND (exp, 2)))
10854	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10855
10856      else if (integer_zerop (TREE_OPERAND (exp, 1))
10857	       && integer_onep (TREE_OPERAND (exp, 2)))
10858	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10859
10860      else
10861	{
10862	  register rtx label1 = gen_label_rtx ();
10863	  drop_through_label = gen_label_rtx ();
10864
10865	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10866
10867	  start_cleanup_deferral ();
10868	  /* Now the THEN-expression.  */
10869	  do_jump (TREE_OPERAND (exp, 1),
10870		   if_false_label ? if_false_label : drop_through_label,
10871		   if_true_label ? if_true_label : drop_through_label);
10872	  /* In case the do_jump just above never jumps.  */
10873	  do_pending_stack_adjust ();
10874	  emit_label (label1);
10875
10876	  /* Now the ELSE-expression.  */
10877	  do_jump (TREE_OPERAND (exp, 2),
10878		   if_false_label ? if_false_label : drop_through_label,
10879		   if_true_label ? if_true_label : drop_through_label);
10880	  end_cleanup_deferral ();
10881	}
10882      break;
10883
10884    case EQ_EXPR:
10885      {
10886	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10887
10888	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10889	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10890	  {
10891	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10892	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10893	    do_jump
10894	      (fold
10895	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10896		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10897				    fold (build1 (REALPART_EXPR,
10898						  TREE_TYPE (inner_type),
10899						  exp0)),
10900				    fold (build1 (REALPART_EXPR,
10901						  TREE_TYPE (inner_type),
10902						  exp1)))),
10903		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10904				    fold (build1 (IMAGPART_EXPR,
10905						  TREE_TYPE (inner_type),
10906						  exp0)),
10907				    fold (build1 (IMAGPART_EXPR,
10908						  TREE_TYPE (inner_type),
10909						  exp1)))))),
10910	       if_false_label, if_true_label);
10911	  }
10912
10913	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10914	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10915
10916	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10917		 && !can_compare_p (TYPE_MODE (inner_type)))
10918	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10919	else
10920	  comparison = compare (exp, EQ, EQ);
10921	break;
10922      }
10923
10924    case NE_EXPR:
10925      {
10926	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10927
10928	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10929	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10930	  {
10931	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10932	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10933	    do_jump
10934	      (fold
10935	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10936		       fold (build (NE_EXPR, TREE_TYPE (exp),
10937				    fold (build1 (REALPART_EXPR,
10938						  TREE_TYPE (inner_type),
10939						  exp0)),
10940				    fold (build1 (REALPART_EXPR,
10941						  TREE_TYPE (inner_type),
10942						  exp1)))),
10943		       fold (build (NE_EXPR, TREE_TYPE (exp),
10944				    fold (build1 (IMAGPART_EXPR,
10945						  TREE_TYPE (inner_type),
10946						  exp0)),
10947				    fold (build1 (IMAGPART_EXPR,
10948						  TREE_TYPE (inner_type),
10949						  exp1)))))),
10950	       if_false_label, if_true_label);
10951	  }
10952
10953	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10954	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10955
10956	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10957		 && !can_compare_p (TYPE_MODE (inner_type)))
10958	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10959	else
10960	  comparison = compare (exp, NE, NE);
10961	break;
10962      }
10963
10964    case LT_EXPR:
10965      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10966	   == MODE_INT)
10967	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10968	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10969      else
10970	comparison = compare (exp, LT, LTU);
10971      break;
10972
10973    case LE_EXPR:
10974      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10975	   == MODE_INT)
10976	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10977	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10978      else
10979	comparison = compare (exp, LE, LEU);
10980      break;
10981
10982    case GT_EXPR:
10983      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10984	   == MODE_INT)
10985	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10986	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10987      else
10988	comparison = compare (exp, GT, GTU);
10989      break;
10990
10991    case GE_EXPR:
10992      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10993	   == MODE_INT)
10994	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10995	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10996      else
10997	comparison = compare (exp, GE, GEU);
10998      break;
10999
11000    default:
11001    normal:
11002      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
11003#if 0
11004      /* This is not needed any more and causes poor code since it causes
11005	 comparisons and tests from non-SI objects to have different code
11006	 sequences.  */
11007      /* Copy to register to avoid generating bad insns by cse
11008	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
11009      if (!cse_not_expected && GET_CODE (temp) == MEM)
11010	temp = copy_to_reg (temp);
11011#endif
11012      do_pending_stack_adjust ();
11013      if (GET_CODE (temp) == CONST_INT)
11014	comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
11015      else if (GET_CODE (temp) == LABEL_REF)
11016	comparison = const_true_rtx;
11017      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
11018	       && !can_compare_p (GET_MODE (temp)))
11019	/* Note swapping the labels gives us not-equal.  */
11020	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
11021      else if (GET_MODE (temp) != VOIDmode)
11022	comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
11023				       NE, TREE_UNSIGNED (TREE_TYPE (exp)),
11024				       GET_MODE (temp), NULL_RTX, 0);
11025      else
11026	abort ();
11027    }
11028
11029  /* Do any postincrements in the expression that was tested.  */
11030  emit_queue ();
11031
11032  /* If COMPARISON is nonzero here, it is an rtx that can be substituted
11033     straight into a conditional jump instruction as the jump condition.
11034     Otherwise, all the work has been done already.  */
11035
11036  if (comparison == const_true_rtx)
11037    {
11038      if (if_true_label)
11039	emit_jump (if_true_label);
11040    }
11041  else if (comparison == const0_rtx)
11042    {
11043      if (if_false_label)
11044	emit_jump (if_false_label);
11045    }
11046  else if (comparison)
11047    do_jump_for_compare (comparison, if_false_label, if_true_label);
11048
11049  if (drop_through_label)
11050    {
11051      /* If do_jump produces code that might be jumped around,
11052	 do any stack adjusts from that code, before the place
11053	 where control merges in.  */
11054      do_pending_stack_adjust ();
11055      emit_label (drop_through_label);
11056    }
11057}
11058
11059/* Given a comparison expression EXP for values too wide to be compared
11060   with one insn, test the comparison and jump to the appropriate label.
11061   The code of EXP is ignored; we always test GT if SWAP is 0,
11062   and LT if SWAP is 1.  */
11063
11064static void
11065do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
11066     tree exp;
11067     int swap;
11068     rtx if_false_label, if_true_label;
11069{
11070  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
11071  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
11072  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11073  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11074  rtx drop_through_label = 0;
11075  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
11076  int i;
11077
11078  if (! if_true_label || ! if_false_label)
11079    drop_through_label = gen_label_rtx ();
11080  if (! if_true_label)
11081    if_true_label = drop_through_label;
11082  if (! if_false_label)
11083    if_false_label = drop_through_label;
11084
11085  /* Compare a word at a time, high order first.  */
11086  for (i = 0; i < nwords; i++)
11087    {
11088      rtx comp;
11089      rtx op0_word, op1_word;
11090
11091      if (WORDS_BIG_ENDIAN)
11092	{
11093	  op0_word = operand_subword_force (op0, i, mode);
11094	  op1_word = operand_subword_force (op1, i, mode);
11095	}
11096      else
11097	{
11098	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11099	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11100	}
11101
11102      /* All but high-order word must be compared as unsigned.  */
11103      comp = compare_from_rtx (op0_word, op1_word,
11104			       (unsignedp || i > 0) ? GTU : GT,
11105			       unsignedp, word_mode, NULL_RTX, 0);
11106      if (comp == const_true_rtx)
11107	emit_jump (if_true_label);
11108      else if (comp != const0_rtx)
11109	do_jump_for_compare (comp, NULL_RTX, if_true_label);
11110
11111      /* Consider lower words only if these are equal.  */
11112      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11113			       NULL_RTX, 0);
11114      if (comp == const_true_rtx)
11115	emit_jump (if_false_label);
11116      else if (comp != const0_rtx)
11117	do_jump_for_compare (comp, NULL_RTX, if_false_label);
11118    }
11119
11120  if (if_false_label)
11121    emit_jump (if_false_label);
11122  if (drop_through_label)
11123    emit_label (drop_through_label);
11124}
11125
11126/* Compare OP0 with OP1, word at a time, in mode MODE.
11127   UNSIGNEDP says to do unsigned comparison.
11128   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
11129
11130void
11131do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
11132     enum machine_mode mode;
11133     int unsignedp;
11134     rtx op0, op1;
11135     rtx if_false_label, if_true_label;
11136{
11137  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11138  rtx drop_through_label = 0;
11139  int i;
11140
11141  if (! if_true_label || ! if_false_label)
11142    drop_through_label = gen_label_rtx ();
11143  if (! if_true_label)
11144    if_true_label = drop_through_label;
11145  if (! if_false_label)
11146    if_false_label = drop_through_label;
11147
11148  /* Compare a word at a time, high order first.  */
11149  for (i = 0; i < nwords; i++)
11150    {
11151      rtx comp;
11152      rtx op0_word, op1_word;
11153
11154      if (WORDS_BIG_ENDIAN)
11155	{
11156	  op0_word = operand_subword_force (op0, i, mode);
11157	  op1_word = operand_subword_force (op1, i, mode);
11158	}
11159      else
11160	{
11161	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11162	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11163	}
11164
11165      /* All but high-order word must be compared as unsigned.  */
11166      comp = compare_from_rtx (op0_word, op1_word,
11167			       (unsignedp || i > 0) ? GTU : GT,
11168			       unsignedp, word_mode, NULL_RTX, 0);
11169      if (comp == const_true_rtx)
11170	emit_jump (if_true_label);
11171      else if (comp != const0_rtx)
11172	do_jump_for_compare (comp, NULL_RTX, if_true_label);
11173
11174      /* Consider lower words only if these are equal.  */
11175      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11176			       NULL_RTX, 0);
11177      if (comp == const_true_rtx)
11178	emit_jump (if_false_label);
11179      else if (comp != const0_rtx)
11180	do_jump_for_compare (comp, NULL_RTX, if_false_label);
11181    }
11182
11183  if (if_false_label)
11184    emit_jump (if_false_label);
11185  if (drop_through_label)
11186    emit_label (drop_through_label);
11187}
11188
11189/* Given an EQ_EXPR expression EXP for values too wide to be compared
11190   with one insn, test the comparison and jump to the appropriate label.  */
11191
11192static void
11193do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11194     tree exp;
11195     rtx if_false_label, if_true_label;
11196{
11197  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11198  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11199  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11200  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11201  int i;
11202  rtx drop_through_label = 0;
11203
11204  if (! if_false_label)
11205    drop_through_label = if_false_label = gen_label_rtx ();
11206
11207  for (i = 0; i < nwords; i++)
11208    {
11209      rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11210				   operand_subword_force (op1, i, mode),
11211				   EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11212				   word_mode, NULL_RTX, 0);
11213      if (comp == const_true_rtx)
11214	emit_jump (if_false_label);
11215      else if (comp != const0_rtx)
11216	do_jump_for_compare (comp, if_false_label, NULL_RTX);
11217    }
11218
11219  if (if_true_label)
11220    emit_jump (if_true_label);
11221  if (drop_through_label)
11222    emit_label (drop_through_label);
11223}
11224
11225/* Jump according to whether OP0 is 0.
11226   We assume that OP0 has an integer mode that is too wide
11227   for the available compare insns.  */
11228
11229void
11230do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11231     rtx op0;
11232     rtx if_false_label, if_true_label;
11233{
11234  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11235  rtx part;
11236  int i;
11237  rtx drop_through_label = 0;
11238
11239  /* The fastest way of doing this comparison on almost any machine is to
11240     "or" all the words and compare the result.  If all have to be loaded
11241     from memory and this is a very wide item, it's possible this may
11242     be slower, but that's highly unlikely.  */
11243
11244  part = gen_reg_rtx (word_mode);
11245  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11246  for (i = 1; i < nwords && part != 0; i++)
11247    part = expand_binop (word_mode, ior_optab, part,
11248			 operand_subword_force (op0, i, GET_MODE (op0)),
11249			 part, 1, OPTAB_WIDEN);
11250
11251  if (part != 0)
11252    {
11253      rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11254				   NULL_RTX, 0);
11255
11256      if (comp == const_true_rtx)
11257	emit_jump (if_false_label);
11258      else if (comp == const0_rtx)
11259	emit_jump (if_true_label);
11260      else
11261	do_jump_for_compare (comp, if_false_label, if_true_label);
11262
11263      return;
11264    }
11265
11266  /* If we couldn't do the "or" simply, do this with a series of compares.  */
11267  if (! if_false_label)
11268    drop_through_label = if_false_label = gen_label_rtx ();
11269
11270  for (i = 0; i < nwords; i++)
11271    {
11272      rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11273							  GET_MODE (op0)),
11274				   const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11275      if (comp == const_true_rtx)
11276	emit_jump (if_false_label);
11277      else if (comp != const0_rtx)
11278	do_jump_for_compare (comp, if_false_label, NULL_RTX);
11279    }
11280
11281  if (if_true_label)
11282    emit_jump (if_true_label);
11283
11284  if (drop_through_label)
11285    emit_label (drop_through_label);
11286}
11287
11288/* Given a comparison expression in rtl form, output conditional branches to
11289   IF_TRUE_LABEL, IF_FALSE_LABEL, or both.  */
11290
11291static void
11292do_jump_for_compare (comparison, if_false_label, if_true_label)
11293     rtx comparison, if_false_label, if_true_label;
11294{
11295  if (if_true_label)
11296    {
11297      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11298	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11299			  (if_true_label));
11300      else
11301	abort ();
11302
11303      if (if_false_label)
11304	emit_jump (if_false_label);
11305    }
11306  else if (if_false_label)
11307    {
11308      rtx first = get_last_insn (), insn, branch;
11309      int br_count;
11310
11311      /* Output the branch with the opposite condition.  Then try to invert
11312	 what is generated.  If more than one insn is a branch, or if the
11313	 branch is not the last insn written, abort. If we can't invert
11314	 the branch, emit make a true label, redirect this jump to that,
11315	 emit a jump to the false label and define the true label.  */
11316      /* ??? Note that we wouldn't have to do any of this nonsense if
11317	 we passed both labels into a combined compare-and-branch.
11318	 Ah well, jump threading does a good job of repairing the damage.  */
11319
11320      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11321	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11322			  (if_false_label));
11323      else
11324	abort ();
11325
11326      /* Here we get the first insn that was just emitted.  It used to be the
11327	 case that, on some machines, emitting the branch would discard
11328	 the previous compare insn and emit a replacement.  This isn't
11329	 done anymore, but abort if we see that FIRST is deleted.  */
11330
11331      if (first == 0)
11332	first = get_insns ();
11333      else if (INSN_DELETED_P (first))
11334	abort ();
11335      else
11336	first = NEXT_INSN (first);
11337
11338      /* Look for multiple branches in this sequence, as might be generated
11339	 for a multi-word integer comparison.  */
11340
11341      br_count = 0;
11342      branch = NULL_RTX;
11343      for (insn = first; insn ; insn = NEXT_INSN (insn))
11344	if (GET_CODE (insn) == JUMP_INSN)
11345	  {
11346	    branch = insn;
11347	    br_count += 1;
11348	  }
11349
11350      /* If we've got one branch at the end of the sequence,
11351	 we can try to reverse it.  */
11352
11353      if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11354	{
11355	  rtx insn_label;
11356	  insn_label = XEXP (condjump_label (branch), 0);
11357	  JUMP_LABEL (branch) = insn_label;
11358
11359	  if (insn_label != if_false_label)
11360	    abort ();
11361
11362	  if (invert_jump (branch, if_false_label))
11363	    return;
11364	}
11365
11366      /* Multiple branches, or reversion failed.  Convert to branches
11367	 around an unconditional jump.  */
11368
11369      if_true_label = gen_label_rtx ();
11370      for (insn = first; insn; insn = NEXT_INSN (insn))
11371	if (GET_CODE (insn) == JUMP_INSN)
11372	  {
11373	    rtx insn_label;
11374	    insn_label = XEXP (condjump_label (insn), 0);
11375	    JUMP_LABEL (insn) = insn_label;
11376
11377	    if (insn_label == if_false_label)
11378	      redirect_jump (insn, if_true_label);
11379	  }
11380	emit_jump (if_false_label);
11381	emit_label (if_true_label);
11382    }
11383}
11384
11385/* Generate code for a comparison expression EXP
11386   (including code to compute the values to be compared)
11387   and set (CC0) according to the result.
11388   SIGNED_CODE should be the rtx operation for this comparison for
11389   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11390
11391   We force a stack adjustment unless there are currently
11392   things pushed on the stack that aren't yet used.  */
11393
11394static rtx
11395compare (exp, signed_code, unsigned_code)
11396     register tree exp;
11397     enum rtx_code signed_code, unsigned_code;
11398{
11399  register rtx op0, op1;
11400  register tree type;
11401  register enum machine_mode mode;
11402  int unsignedp;
11403  enum rtx_code code;
11404
11405  /* Don't crash if the comparison was erroneous.  */
11406  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11407  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11408    return op0;
11409
11410  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11411  type = TREE_TYPE (TREE_OPERAND (exp, 0));
11412  mode = TYPE_MODE (type);
11413  unsignedp = TREE_UNSIGNED (type);
11414  code = unsignedp ? unsigned_code : signed_code;
11415
11416#ifdef HAVE_canonicalize_funcptr_for_compare
11417  /* If function pointers need to be "canonicalized" before they can
11418     be reliably compared, then canonicalize them.  */
11419  if (HAVE_canonicalize_funcptr_for_compare
11420      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11421      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11422	  == FUNCTION_TYPE))
11423    {
11424      rtx new_op0 = gen_reg_rtx (mode);
11425
11426      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11427      op0 = new_op0;
11428    }
11429
11430  if (HAVE_canonicalize_funcptr_for_compare
11431      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11432      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11433	  == FUNCTION_TYPE))
11434    {
11435      rtx new_op1 = gen_reg_rtx (mode);
11436
11437      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11438      op1 = new_op1;
11439    }
11440#endif
11441
11442  return compare_from_rtx (op0, op1, code, unsignedp, mode,
11443			   ((mode == BLKmode)
11444			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11445			   TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11446}
11447
11448/* Like compare but expects the values to compare as two rtx's.
11449   The decision as to signed or unsigned comparison must be made by the caller.
11450
11451   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11452   compared.
11453
11454   If ALIGN is non-zero, it is the alignment of this type; if zero, the
11455   size of MODE should be used.  */
11456
11457rtx
11458compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11459     register rtx op0, op1;
11460     enum rtx_code code;
11461     int unsignedp;
11462     enum machine_mode mode;
11463     rtx size;
11464     int align;
11465{
11466  rtx tem;
11467
11468  /* If one operand is constant, make it the second one.  Only do this
11469     if the other operand is not constant as well.  */
11470
11471  if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11472      || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11473    {
11474      tem = op0;
11475      op0 = op1;
11476      op1 = tem;
11477      code = swap_condition (code);
11478    }
11479
11480  if (flag_force_mem)
11481    {
11482      op0 = force_not_mem (op0);
11483      op1 = force_not_mem (op1);
11484    }
11485
11486  do_pending_stack_adjust ();
11487
11488  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11489      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11490    return tem;
11491
11492#if 0
11493  /* There's no need to do this now that combine.c can eliminate lots of
11494     sign extensions.  This can be less efficient in certain cases on other
11495     machines.  */
11496
11497  /* If this is a signed equality comparison, we can do it as an
11498     unsigned comparison since zero-extension is cheaper than sign
11499     extension and comparisons with zero are done as unsigned.  This is
11500     the case even on machines that can do fast sign extension, since
11501     zero-extension is easier to combine with other operations than
11502     sign-extension is.  If we are comparing against a constant, we must
11503     convert it to what it would look like unsigned.  */
11504  if ((code == EQ || code == NE) && ! unsignedp
11505      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11506    {
11507      if (GET_CODE (op1) == CONST_INT
11508	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11509	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11510      unsignedp = 1;
11511    }
11512#endif
11513
11514  emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11515
11516  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11517}
11518
11519/* Generate code to calculate EXP using a store-flag instruction
11520   and return an rtx for the result.  EXP is either a comparison
11521   or a TRUTH_NOT_EXPR whose operand is a comparison.
11522
11523   If TARGET is nonzero, store the result there if convenient.
11524
11525   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11526   cheap.
11527
11528   Return zero if there is no suitable set-flag instruction
11529   available on this machine.
11530
11531   Once expand_expr has been called on the arguments of the comparison,
11532   we are committed to doing the store flag, since it is not safe to
11533   re-evaluate the expression.  We emit the store-flag insn by calling
11534   emit_store_flag, but only expand the arguments if we have a reason
11535   to believe that emit_store_flag will be successful.  If we think that
11536   it will, but it isn't, we have to simulate the store-flag with a
11537   set/jump/set sequence.  */
11538
11539static rtx
11540do_store_flag (exp, target, mode, only_cheap)
11541     tree exp;
11542     rtx target;
11543     enum machine_mode mode;
11544     int only_cheap;
11545{
11546  enum rtx_code code;
11547  tree arg0, arg1, type;
11548  tree tem;
11549  enum machine_mode operand_mode;
11550  int invert = 0;
11551  int unsignedp;
11552  rtx op0, op1;
11553  enum insn_code icode;
11554  rtx subtarget = target;
11555  rtx result, label;
11556
11557  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11558     result at the end.  We can't simply invert the test since it would
11559     have already been inverted if it were valid.  This case occurs for
11560     some floating-point comparisons.  */
11561
11562  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11563    invert = 1, exp = TREE_OPERAND (exp, 0);
11564
11565  arg0 = TREE_OPERAND (exp, 0);
11566  arg1 = TREE_OPERAND (exp, 1);
11567  type = TREE_TYPE (arg0);
11568  operand_mode = TYPE_MODE (type);
11569  unsignedp = TREE_UNSIGNED (type);
11570
11571  /* We won't bother with BLKmode store-flag operations because it would mean
11572     passing a lot of information to emit_store_flag.  */
11573  if (operand_mode == BLKmode)
11574    return 0;
11575
11576  /* We won't bother with store-flag operations involving function pointers
11577     when function pointers must be canonicalized before comparisons.  */
11578#ifdef HAVE_canonicalize_funcptr_for_compare
11579  if (HAVE_canonicalize_funcptr_for_compare
11580      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11581	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11582	       == FUNCTION_TYPE))
11583	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11584	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11585		  == FUNCTION_TYPE))))
11586    return 0;
11587#endif
11588
11589  STRIP_NOPS (arg0);
11590  STRIP_NOPS (arg1);
11591
11592  /* Get the rtx comparison code to use.  We know that EXP is a comparison
11593     operation of some type.  Some comparisons against 1 and -1 can be
11594     converted to comparisons with zero.  Do so here so that the tests
11595     below will be aware that we have a comparison with zero.   These
11596     tests will not catch constants in the first operand, but constants
11597     are rarely passed as the first operand.  */
11598
11599  switch (TREE_CODE (exp))
11600    {
11601    case EQ_EXPR:
11602      code = EQ;
11603      break;
11604    case NE_EXPR:
11605      code = NE;
11606      break;
11607    case LT_EXPR:
11608      if (integer_onep (arg1))
11609	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11610      else
11611	code = unsignedp ? LTU : LT;
11612      break;
11613    case LE_EXPR:
11614      if (! unsignedp && integer_all_onesp (arg1))
11615	arg1 = integer_zero_node, code = LT;
11616      else
11617	code = unsignedp ? LEU : LE;
11618      break;
11619    case GT_EXPR:
11620      if (! unsignedp && integer_all_onesp (arg1))
11621	arg1 = integer_zero_node, code = GE;
11622      else
11623	code = unsignedp ? GTU : GT;
11624      break;
11625    case GE_EXPR:
11626      if (integer_onep (arg1))
11627	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11628      else
11629	code = unsignedp ? GEU : GE;
11630      break;
11631    default:
11632      abort ();
11633    }
11634
11635  /* Put a constant second.  */
11636  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11637    {
11638      tem = arg0; arg0 = arg1; arg1 = tem;
11639      code = swap_condition (code);
11640    }
11641
11642  /* If this is an equality or inequality test of a single bit, we can
11643     do this by shifting the bit being tested to the low-order bit and
11644     masking the result with the constant 1.  If the condition was EQ,
11645     we xor it with 1.  This does not require an scc insn and is faster
11646     than an scc insn even if we have it.  */
11647
11648  if ((code == NE || code == EQ)
11649      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11650      && integer_pow2p (TREE_OPERAND (arg0, 1)))
11651    {
11652      tree inner = TREE_OPERAND (arg0, 0);
11653      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11654      int ops_unsignedp;
11655
11656      /* If INNER is a right shift of a constant and it plus BITNUM does
11657	 not overflow, adjust BITNUM and INNER.  */
11658
11659      if (TREE_CODE (inner) == RSHIFT_EXPR
11660	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11661	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11662	  && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11663	      < TYPE_PRECISION (type)))
11664	{
11665	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11666	  inner = TREE_OPERAND (inner, 0);
11667	}
11668
11669      /* If we are going to be able to omit the AND below, we must do our
11670	 operations as unsigned.  If we must use the AND, we have a choice.
11671	 Normally unsigned is faster, but for some machines signed is.  */
11672      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11673#ifdef LOAD_EXTEND_OP
11674		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11675#else
11676		       : 1
11677#endif
11678		       );
11679
11680      if (subtarget == 0 || GET_CODE (subtarget) != REG
11681	  || GET_MODE (subtarget) != operand_mode
11682	  || ! safe_from_p (subtarget, inner, 1))
11683	subtarget = 0;
11684
11685      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11686
11687      if (bitnum != 0)
11688	op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11689			    size_int (bitnum), subtarget, ops_unsignedp);
11690
11691      if (GET_MODE (op0) != mode)
11692	op0 = convert_to_mode (mode, op0, ops_unsignedp);
11693
11694      if ((code == EQ && ! invert) || (code == NE && invert))
11695	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11696			    ops_unsignedp, OPTAB_LIB_WIDEN);
11697
11698      /* Put the AND last so it can combine with more things.  */
11699      if (bitnum != TYPE_PRECISION (type) - 1)
11700	op0 = expand_and (op0, const1_rtx, subtarget);
11701
11702      return op0;
11703    }
11704
11705  /* Now see if we are likely to be able to do this.  Return if not.  */
11706  if (! can_compare_p (operand_mode))
11707    return 0;
11708  icode = setcc_gen_code[(int) code];
11709  if (icode == CODE_FOR_nothing
11710      || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11711    {
11712      /* We can only do this if it is one of the special cases that
11713	 can be handled without an scc insn.  */
11714      if ((code == LT && integer_zerop (arg1))
11715	  || (! only_cheap && code == GE && integer_zerop (arg1)))
11716	;
11717      else if (BRANCH_COST >= 0
11718	       && ! only_cheap && (code == NE || code == EQ)
11719	       && TREE_CODE (type) != REAL_TYPE
11720	       && ((abs_optab->handlers[(int) operand_mode].insn_code
11721		    != CODE_FOR_nothing)
11722		   || (ffs_optab->handlers[(int) operand_mode].insn_code
11723		       != CODE_FOR_nothing)))
11724	;
11725      else
11726	return 0;
11727    }
11728
11729  preexpand_calls (exp);
11730  if (subtarget == 0 || GET_CODE (subtarget) != REG
11731      || GET_MODE (subtarget) != operand_mode
11732      || ! safe_from_p (subtarget, arg1, 1))
11733    subtarget = 0;
11734
11735  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11736  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11737
11738  if (target == 0)
11739    target = gen_reg_rtx (mode);
11740
11741  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
11742     because, if the emit_store_flag does anything it will succeed and
11743     OP0 and OP1 will not be used subsequently.  */
11744
11745  result = emit_store_flag (target, code,
11746			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11747			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11748			    operand_mode, unsignedp, 1);
11749
11750  if (result)
11751    {
11752      if (invert)
11753	result = expand_binop (mode, xor_optab, result, const1_rtx,
11754			       result, 0, OPTAB_LIB_WIDEN);
11755      return result;
11756    }
11757
11758  /* If this failed, we have to do this with set/compare/jump/set code.  */
11759  if (GET_CODE (target) != REG
11760      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11761    target = gen_reg_rtx (GET_MODE (target));
11762
11763  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11764  result = compare_from_rtx (op0, op1, code, unsignedp,
11765			     operand_mode, NULL_RTX, 0);
11766  if (GET_CODE (result) == CONST_INT)
11767    return (((result == const0_rtx && ! invert)
11768	     || (result != const0_rtx && invert))
11769	    ? const0_rtx : const1_rtx);
11770
11771  label = gen_label_rtx ();
11772  if (bcc_gen_fctn[(int) code] == 0)
11773    abort ();
11774
11775  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11776  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11777  emit_label (label);
11778
11779  return target;
11780}
11781
11782/* Generate a tablejump instruction (used for switch statements).  */
11783
11784#ifdef HAVE_tablejump
11785
11786/* INDEX is the value being switched on, with the lowest value
11787   in the table already subtracted.
11788   MODE is its expected mode (needed if INDEX is constant).
11789   RANGE is the length of the jump table.
11790   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11791
11792   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11793   index value is out of range.  */
11794
11795void
11796do_tablejump (index, mode, range, table_label, default_label)
11797     rtx index, range, table_label, default_label;
11798     enum machine_mode mode;
11799{
11800  register rtx temp, vector;
11801
11802  /* Do an unsigned comparison (in the proper mode) between the index
11803     expression and the value which represents the length of the range.
11804     Since we just finished subtracting the lower bound of the range
11805     from the index expression, this comparison allows us to simultaneously
11806     check that the original index expression value is both greater than
11807     or equal to the minimum value of the range and less than or equal to
11808     the maximum value of the range.  */
11809
11810  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11811			   0, default_label);
11812
11813  /* If index is in range, it must fit in Pmode.
11814     Convert to Pmode so we can index with it.  */
11815  if (mode != Pmode)
11816    index = convert_to_mode (Pmode, index, 1);
11817
11818  /* Don't let a MEM slip thru, because then INDEX that comes
11819     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11820     and break_out_memory_refs will go to work on it and mess it up.  */
11821#ifdef PIC_CASE_VECTOR_ADDRESS
11822  if (flag_pic && GET_CODE (index) != REG)
11823    index = copy_to_mode_reg (Pmode, index);
11824#endif
11825
11826  /* If flag_force_addr were to affect this address
11827     it could interfere with the tricky assumptions made
11828     about addresses that contain label-refs,
11829     which may be valid only very near the tablejump itself.  */
11830  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11831     GET_MODE_SIZE, because this indicates how large insns are.  The other
11832     uses should all be Pmode, because they are addresses.  This code
11833     could fail if addresses and insns are not the same size.  */
11834  index = gen_rtx_PLUS (Pmode,
11835			gen_rtx_MULT (Pmode, index,
11836				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11837			gen_rtx_LABEL_REF (Pmode, table_label));
11838#ifdef PIC_CASE_VECTOR_ADDRESS
11839  if (flag_pic)
11840    index = PIC_CASE_VECTOR_ADDRESS (index);
11841  else
11842#endif
11843    index = memory_address_noforce (CASE_VECTOR_MODE, index);
11844  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11845  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11846  RTX_UNCHANGING_P (vector) = 1;
11847  convert_move (temp, vector, 0);
11848
11849  emit_jump_insn (gen_tablejump (temp, table_label));
11850
11851  /* If we are generating PIC code or if the table is PC-relative, the
11852     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11853  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11854    emit_barrier ();
11855}
11856
11857#endif /* HAVE_tablejump */
11858