explow.c revision 52284
1/* Subroutines for manipulating rtx's in semantically interesting ways.
2   Copyright (C) 1987, 91, 94-97, 1998, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22#include "config.h"
23#include "system.h"
24#include "toplev.h"
25#include "rtl.h"
26#include "tree.h"
27#include "flags.h"
28#include "expr.h"
29#include "hard-reg-set.h"
30#include "insn-config.h"
31#include "recog.h"
32#include "insn-flags.h"
33#include "insn-codes.h"
34
35#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
36#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
37#endif
38
39static rtx break_out_memory_refs	PROTO((rtx));
40static void emit_stack_probe		PROTO((rtx));
41/* Return an rtx for the sum of X and the integer C.
42
43   This function should be used via the `plus_constant' macro.  */
44
45rtx
46plus_constant_wide (x, c)
47     register rtx x;
48     register HOST_WIDE_INT c;
49{
50  register RTX_CODE code;
51  register enum machine_mode mode;
52  register rtx tem;
53  int all_constant = 0;
54
55  if (c == 0)
56    return x;
57
58 restart:
59
60  code = GET_CODE (x);
61  mode = GET_MODE (x);
62  switch (code)
63    {
64    case CONST_INT:
65      return GEN_INT (INTVAL (x) + c);
66
67    case CONST_DOUBLE:
68      {
69	HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
70	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
71	HOST_WIDE_INT l2 = c;
72	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
73	HOST_WIDE_INT lv, hv;
74
75	add_double (l1, h1, l2, h2, &lv, &hv);
76
77	return immed_double_const (lv, hv, VOIDmode);
78      }
79
80    case MEM:
81      /* If this is a reference to the constant pool, try replacing it with
82	 a reference to a new constant.  If the resulting address isn't
83	 valid, don't return it because we have no way to validize it.  */
84      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
85	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
86	{
87	  /* Any rtl we create here must go in a saveable obstack, since
88	     we might have been called from within combine.  */
89	  push_obstacks_nochange ();
90	  rtl_in_saveable_obstack ();
91	  tem
92	    = force_const_mem (GET_MODE (x),
93			       plus_constant (get_pool_constant (XEXP (x, 0)),
94					      c));
95	  pop_obstacks ();
96	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
97	    return tem;
98	}
99      break;
100
101    case CONST:
102      /* If adding to something entirely constant, set a flag
103	 so that we can add a CONST around the result.  */
104      x = XEXP (x, 0);
105      all_constant = 1;
106      goto restart;
107
108    case SYMBOL_REF:
109    case LABEL_REF:
110      all_constant = 1;
111      break;
112
113    case PLUS:
114      /* The interesting case is adding the integer to a sum.
115	 Look for constant term in the sum and combine
116	 with C.  For an integer constant term, we make a combined
117	 integer.  For a constant term that is not an explicit integer,
118	 we cannot really combine, but group them together anyway.
119
120	 Restart or use a recursive call in case the remaining operand is
121	 something that we handle specially, such as a SYMBOL_REF.
122
123	 We may not immediately return from the recursive call here, lest
124	 all_constant gets lost.  */
125
126      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
127	{
128	  c += INTVAL (XEXP (x, 1));
129	  x = XEXP (x, 0);
130	  goto restart;
131	}
132      else if (CONSTANT_P (XEXP (x, 0)))
133	{
134	  x = gen_rtx_PLUS (mode,
135			    plus_constant (XEXP (x, 0), c),
136			    XEXP (x, 1));
137	  c = 0;
138	}
139      else if (CONSTANT_P (XEXP (x, 1)))
140	{
141	  x = gen_rtx_PLUS (mode,
142			    XEXP (x, 0),
143			    plus_constant (XEXP (x, 1), c));
144	  c = 0;
145	}
146      break;
147
148    default:
149      break;
150    }
151
152  if (c != 0)
153    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
154
155  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
156    return x;
157  else if (all_constant)
158    return gen_rtx_CONST (mode, x);
159  else
160    return x;
161}
162
163/* This is the same as `plus_constant', except that it handles LO_SUM.
164
165   This function should be used via the `plus_constant_for_output' macro.  */
166
167rtx
168plus_constant_for_output_wide (x, c)
169     register rtx x;
170     register HOST_WIDE_INT c;
171{
172  register enum machine_mode mode = GET_MODE (x);
173
174  if (GET_CODE (x) == LO_SUM)
175    return gen_rtx_LO_SUM (mode, XEXP (x, 0),
176		    plus_constant_for_output (XEXP (x, 1), c));
177
178  else
179    return plus_constant (x, c);
180}
181
182/* If X is a sum, return a new sum like X but lacking any constant terms.
183   Add all the removed constant terms into *CONSTPTR.
184   X itself is not altered.  The result != X if and only if
185   it is not isomorphic to X.  */
186
187rtx
188eliminate_constant_term (x, constptr)
189     rtx x;
190     rtx *constptr;
191{
192  register rtx x0, x1;
193  rtx tem;
194
195  if (GET_CODE (x) != PLUS)
196    return x;
197
198  /* First handle constants appearing at this level explicitly.  */
199  if (GET_CODE (XEXP (x, 1)) == CONST_INT
200      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
201						XEXP (x, 1)))
202      && GET_CODE (tem) == CONST_INT)
203    {
204      *constptr = tem;
205      return eliminate_constant_term (XEXP (x, 0), constptr);
206    }
207
208  tem = const0_rtx;
209  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
210  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
211  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
212      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
213						*constptr, tem))
214      && GET_CODE (tem) == CONST_INT)
215    {
216      *constptr = tem;
217      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
218    }
219
220  return x;
221}
222
223/* Returns the insn that next references REG after INSN, or 0
224   if REG is clobbered before next referenced or we cannot find
225   an insn that references REG in a straight-line piece of code.  */
226
227rtx
228find_next_ref (reg, insn)
229     rtx reg;
230     rtx insn;
231{
232  rtx next;
233
234  for (insn = NEXT_INSN (insn); insn; insn = next)
235    {
236      next = NEXT_INSN (insn);
237      if (GET_CODE (insn) == NOTE)
238	continue;
239      if (GET_CODE (insn) == CODE_LABEL
240	  || GET_CODE (insn) == BARRIER)
241	return 0;
242      if (GET_CODE (insn) == INSN
243	  || GET_CODE (insn) == JUMP_INSN
244	  || GET_CODE (insn) == CALL_INSN)
245	{
246	  if (reg_set_p (reg, insn))
247	    return 0;
248	  if (reg_mentioned_p (reg, PATTERN (insn)))
249	    return insn;
250	  if (GET_CODE (insn) == JUMP_INSN)
251	    {
252	      if (simplejump_p (insn))
253		next = JUMP_LABEL (insn);
254	      else
255		return 0;
256	    }
257	  if (GET_CODE (insn) == CALL_INSN
258	      && REGNO (reg) < FIRST_PSEUDO_REGISTER
259	      && call_used_regs[REGNO (reg)])
260	    return 0;
261	}
262      else
263	abort ();
264    }
265  return 0;
266}
267
268/* Return an rtx for the size in bytes of the value of EXP.  */
269
270rtx
271expr_size (exp)
272     tree exp;
273{
274  tree size = size_in_bytes (TREE_TYPE (exp));
275
276  if (TREE_CODE (size) != INTEGER_CST
277      && contains_placeholder_p (size))
278    size = build (WITH_RECORD_EXPR, sizetype, size, exp);
279
280  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
281  		      EXPAND_MEMORY_USE_BAD);
282}
283
284/* Return a copy of X in which all memory references
285   and all constants that involve symbol refs
286   have been replaced with new temporary registers.
287   Also emit code to load the memory locations and constants
288   into those registers.
289
290   If X contains no such constants or memory references,
291   X itself (not a copy) is returned.
292
293   If a constant is found in the address that is not a legitimate constant
294   in an insn, it is left alone in the hope that it might be valid in the
295   address.
296
297   X may contain no arithmetic except addition, subtraction and multiplication.
298   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
299
300static rtx
301break_out_memory_refs (x)
302     register rtx x;
303{
304  if (GET_CODE (x) == MEM
305      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
306	  && GET_MODE (x) != VOIDmode))
307    x = force_reg (GET_MODE (x), x);
308  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
309	   || GET_CODE (x) == MULT)
310    {
311      register rtx op0 = break_out_memory_refs (XEXP (x, 0));
312      register rtx op1 = break_out_memory_refs (XEXP (x, 1));
313
314      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
315	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
316    }
317
318  return x;
319}
320
321#ifdef POINTERS_EXTEND_UNSIGNED
322
323/* Given X, a memory address in ptr_mode, convert it to an address
324   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
325   the fact that pointers are not allowed to overflow by commuting arithmetic
326   operations over conversions so that address arithmetic insns can be
327   used.  */
328
329rtx
330convert_memory_address (to_mode, x)
331     enum machine_mode to_mode;
332     rtx x;
333{
334  enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
335  rtx temp;
336
337  /* Here we handle some special cases.  If none of them apply, fall through
338     to the default case.  */
339  switch (GET_CODE (x))
340    {
341    case CONST_INT:
342    case CONST_DOUBLE:
343      return x;
344
345    case LABEL_REF:
346      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
347      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
348      return temp;
349
350    case SYMBOL_REF:
351      temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
352      SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
353      CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
354      return temp;
355
356    case CONST:
357      return gen_rtx_CONST (to_mode,
358			    convert_memory_address (to_mode, XEXP (x, 0)));
359
360    case PLUS:
361    case MULT:
362      /* For addition the second operand is a small constant, we can safely
363	 permute the conversion and addition operation.  We can always safely
364	 permute them if we are making the address narrower.  In addition,
365	 always permute the operations if this is a constant.  */
366      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
367	  || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
368	      && (INTVAL (XEXP (x, 1)) + 20000 < 40000
369		  || CONSTANT_P (XEXP (x, 0)))))
370	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
371			       convert_memory_address (to_mode, XEXP (x, 0)),
372			       convert_memory_address (to_mode, XEXP (x, 1)));
373      break;
374
375    default:
376      break;
377    }
378
379  return convert_modes (to_mode, from_mode,
380			x, POINTERS_EXTEND_UNSIGNED);
381}
382#endif
383
384/* Given a memory address or facsimile X, construct a new address,
385   currently equivalent, that is stable: future stores won't change it.
386
387   X must be composed of constants, register and memory references
388   combined with addition, subtraction and multiplication:
389   in other words, just what you can get from expand_expr if sum_ok is 1.
390
391   Works by making copies of all regs and memory locations used
392   by X and combining them the same way X does.
393   You could also stabilize the reference to this address
394   by copying the address to a register with copy_to_reg;
395   but then you wouldn't get indexed addressing in the reference.  */
396
397rtx
398copy_all_regs (x)
399     register rtx x;
400{
401  if (GET_CODE (x) == REG)
402    {
403      if (REGNO (x) != FRAME_POINTER_REGNUM
404#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
405	  && REGNO (x) != HARD_FRAME_POINTER_REGNUM
406#endif
407	  )
408	x = copy_to_reg (x);
409    }
410  else if (GET_CODE (x) == MEM)
411    x = copy_to_reg (x);
412  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
413	   || GET_CODE (x) == MULT)
414    {
415      register rtx op0 = copy_all_regs (XEXP (x, 0));
416      register rtx op1 = copy_all_regs (XEXP (x, 1));
417      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
418	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
419    }
420  return x;
421}
422
423/* Return something equivalent to X but valid as a memory address
424   for something of mode MODE.  When X is not itself valid, this
425   works by copying X or subexpressions of it into registers.  */
426
427rtx
428memory_address (mode, x)
429     enum machine_mode mode;
430     register rtx x;
431{
432  register rtx oldx = x;
433
434  if (GET_CODE (x) == ADDRESSOF)
435    return x;
436
437#ifdef POINTERS_EXTEND_UNSIGNED
438  if (GET_MODE (x) == ptr_mode)
439    x = convert_memory_address (Pmode, x);
440#endif
441
442  /* By passing constant addresses thru registers
443     we get a chance to cse them.  */
444  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
445    x = force_reg (Pmode, x);
446
447  /* Accept a QUEUED that refers to a REG
448     even though that isn't a valid address.
449     On attempting to put this in an insn we will call protect_from_queue
450     which will turn it into a REG, which is valid.  */
451  else if (GET_CODE (x) == QUEUED
452      && GET_CODE (QUEUED_VAR (x)) == REG)
453    ;
454
455  /* We get better cse by rejecting indirect addressing at this stage.
456     Let the combiner create indirect addresses where appropriate.
457     For now, generate the code so that the subexpressions useful to share
458     are visible.  But not if cse won't be done!  */
459  else
460    {
461      if (! cse_not_expected && GET_CODE (x) != REG)
462	x = break_out_memory_refs (x);
463
464      /* At this point, any valid address is accepted.  */
465      GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
466
467      /* If it was valid before but breaking out memory refs invalidated it,
468	 use it the old way.  */
469      if (memory_address_p (mode, oldx))
470	goto win2;
471
472      /* Perform machine-dependent transformations on X
473	 in certain cases.  This is not necessary since the code
474	 below can handle all possible cases, but machine-dependent
475	 transformations can make better code.  */
476      LEGITIMIZE_ADDRESS (x, oldx, mode, win);
477
478      /* PLUS and MULT can appear in special ways
479	 as the result of attempts to make an address usable for indexing.
480	 Usually they are dealt with by calling force_operand, below.
481	 But a sum containing constant terms is special
482	 if removing them makes the sum a valid address:
483	 then we generate that address in a register
484	 and index off of it.  We do this because it often makes
485	 shorter code, and because the addresses thus generated
486	 in registers often become common subexpressions.  */
487      if (GET_CODE (x) == PLUS)
488	{
489	  rtx constant_term = const0_rtx;
490	  rtx y = eliminate_constant_term (x, &constant_term);
491	  if (constant_term == const0_rtx
492	      || ! memory_address_p (mode, y))
493	    x = force_operand (x, NULL_RTX);
494	  else
495	    {
496	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
497	      if (! memory_address_p (mode, y))
498		x = force_operand (x, NULL_RTX);
499	      else
500		x = y;
501	    }
502	}
503
504      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
505	x = force_operand (x, NULL_RTX);
506
507      /* If we have a register that's an invalid address,
508	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
509      else if (GET_CODE (x) == REG)
510	x = copy_to_reg (x);
511
512      /* Last resort: copy the value to a register, since
513	 the register is a valid address.  */
514      else
515	x = force_reg (Pmode, x);
516
517      goto done;
518
519    win2:
520      x = oldx;
521    win:
522      if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
523	  /* Don't copy an addr via a reg if it is one of our stack slots.  */
524	  && ! (GET_CODE (x) == PLUS
525		&& (XEXP (x, 0) == virtual_stack_vars_rtx
526		    || XEXP (x, 0) == virtual_incoming_args_rtx)))
527	{
528	  if (general_operand (x, Pmode))
529	    x = force_reg (Pmode, x);
530	  else
531	    x = force_operand (x, NULL_RTX);
532	}
533    }
534
535 done:
536
537  /* If we didn't change the address, we are done.  Otherwise, mark
538     a reg as a pointer if we have REG or REG + CONST_INT.  */
539  if (oldx == x)
540    return x;
541  else if (GET_CODE (x) == REG)
542    mark_reg_pointer (x, 1);
543  else if (GET_CODE (x) == PLUS
544	   && GET_CODE (XEXP (x, 0)) == REG
545	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
546    mark_reg_pointer (XEXP (x, 0), 1);
547
548  /* OLDX may have been the address on a temporary.  Update the address
549     to indicate that X is now used.  */
550  update_temp_slot_address (oldx, x);
551
552  return x;
553}
554
555/* Like `memory_address' but pretend `flag_force_addr' is 0.  */
556
557rtx
558memory_address_noforce (mode, x)
559     enum machine_mode mode;
560     rtx x;
561{
562  int ambient_force_addr = flag_force_addr;
563  rtx val;
564
565  flag_force_addr = 0;
566  val = memory_address (mode, x);
567  flag_force_addr = ambient_force_addr;
568  return val;
569}
570
571/* Convert a mem ref into one with a valid memory address.
572   Pass through anything else unchanged.  */
573
574rtx
575validize_mem (ref)
576     rtx ref;
577{
578  if (GET_CODE (ref) != MEM)
579    return ref;
580  if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
581    return ref;
582  /* Don't alter REF itself, since that is probably a stack slot.  */
583  return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
584}
585
586/* Return a modified copy of X with its memory address copied
587   into a temporary register to protect it from side effects.
588   If X is not a MEM, it is returned unchanged (and not copied).
589   Perhaps even if it is a MEM, if there is no need to change it.  */
590
591rtx
592stabilize (x)
593     rtx x;
594{
595  register rtx addr;
596  if (GET_CODE (x) != MEM)
597    return x;
598  addr = XEXP (x, 0);
599  if (rtx_unstable_p (addr))
600    {
601      rtx temp = copy_all_regs (addr);
602      rtx mem;
603      if (GET_CODE (temp) != REG)
604	temp = copy_to_reg (temp);
605      mem = gen_rtx_MEM (GET_MODE (x), temp);
606
607      /* Mark returned memref with in_struct if it's in an array or
608	 structure.  Copy const and volatile from original memref.  */
609
610      RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
611      MEM_COPY_ATTRIBUTES (mem, x);
612      if (GET_CODE (addr) == PLUS)
613	MEM_SET_IN_STRUCT_P (mem, 1);
614
615      /* Since the new MEM is just like the old X, it can alias only
616	 the things that X could.  */
617      MEM_ALIAS_SET (mem) = MEM_ALIAS_SET (x);
618
619      return mem;
620    }
621  return x;
622}
623
624/* Copy the value or contents of X to a new temp reg and return that reg.  */
625
626rtx
627copy_to_reg (x)
628     rtx x;
629{
630  register rtx temp = gen_reg_rtx (GET_MODE (x));
631
632  /* If not an operand, must be an address with PLUS and MULT so
633     do the computation.  */
634  if (! general_operand (x, VOIDmode))
635    x = force_operand (x, temp);
636
637  if (x != temp)
638    emit_move_insn (temp, x);
639
640  return temp;
641}
642
643/* Like copy_to_reg but always give the new register mode Pmode
644   in case X is a constant.  */
645
646rtx
647copy_addr_to_reg (x)
648     rtx x;
649{
650  return copy_to_mode_reg (Pmode, x);
651}
652
653/* Like copy_to_reg but always give the new register mode MODE
654   in case X is a constant.  */
655
656rtx
657copy_to_mode_reg (mode, x)
658     enum machine_mode mode;
659     rtx x;
660{
661  register rtx temp = gen_reg_rtx (mode);
662
663  /* If not an operand, must be an address with PLUS and MULT so
664     do the computation.  */
665  if (! general_operand (x, VOIDmode))
666    x = force_operand (x, temp);
667
668  if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
669    abort ();
670  if (x != temp)
671    emit_move_insn (temp, x);
672  return temp;
673}
674
675/* Load X into a register if it is not already one.
676   Use mode MODE for the register.
677   X should be valid for mode MODE, but it may be a constant which
678   is valid for all integer modes; that's why caller must specify MODE.
679
680   The caller must not alter the value in the register we return,
681   since we mark it as a "constant" register.  */
682
683rtx
684force_reg (mode, x)
685     enum machine_mode mode;
686     rtx x;
687{
688  register rtx temp, insn, set;
689
690  if (GET_CODE (x) == REG)
691    return x;
692  temp = gen_reg_rtx (mode);
693  insn = emit_move_insn (temp, x);
694
695  /* Let optimizers know that TEMP's value never changes
696     and that X can be substituted for it.  Don't get confused
697     if INSN set something else (such as a SUBREG of TEMP).  */
698  if (CONSTANT_P (x)
699      && (set = single_set (insn)) != 0
700      && SET_DEST (set) == temp)
701    {
702      rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
703
704      if (note)
705	XEXP (note, 0) = x;
706      else
707	REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
708    }
709  return temp;
710}
711
712/* If X is a memory ref, copy its contents to a new temp reg and return
713   that reg.  Otherwise, return X.  */
714
715rtx
716force_not_mem (x)
717     rtx x;
718{
719  register rtx temp;
720  if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
721    return x;
722  temp = gen_reg_rtx (GET_MODE (x));
723  emit_move_insn (temp, x);
724  return temp;
725}
726
727/* Copy X to TARGET (if it's nonzero and a reg)
728   or to a new temp reg and return that reg.
729   MODE is the mode to use for X in case it is a constant.  */
730
731rtx
732copy_to_suggested_reg (x, target, mode)
733     rtx x, target;
734     enum machine_mode mode;
735{
736  register rtx temp;
737
738  if (target && GET_CODE (target) == REG)
739    temp = target;
740  else
741    temp = gen_reg_rtx (mode);
742
743  emit_move_insn (temp, x);
744  return temp;
745}
746
747/* Return the mode to use to store a scalar of TYPE and MODE.
748   PUNSIGNEDP points to the signedness of the type and may be adjusted
749   to show what signedness to use on extension operations.
750
751   FOR_CALL is non-zero if this call is promoting args for a call.  */
752
753enum machine_mode
754promote_mode (type, mode, punsignedp, for_call)
755     tree type;
756     enum machine_mode mode;
757     int *punsignedp;
758     int for_call ATTRIBUTE_UNUSED;
759{
760  enum tree_code code = TREE_CODE (type);
761  int unsignedp = *punsignedp;
762
763#ifdef PROMOTE_FOR_CALL_ONLY
764  if (! for_call)
765    return mode;
766#endif
767
768  switch (code)
769    {
770#ifdef PROMOTE_MODE
771    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
772    case CHAR_TYPE:      case REAL_TYPE:       case OFFSET_TYPE:
773      PROMOTE_MODE (mode, unsignedp, type);
774      break;
775#endif
776
777#ifdef POINTERS_EXTEND_UNSIGNED
778    case REFERENCE_TYPE:
779    case POINTER_TYPE:
780      mode = Pmode;
781      unsignedp = POINTERS_EXTEND_UNSIGNED;
782      break;
783#endif
784
785    default:
786      break;
787    }
788
789  *punsignedp = unsignedp;
790  return mode;
791}
792
793/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
794   This pops when ADJUST is positive.  ADJUST need not be constant.  */
795
796void
797adjust_stack (adjust)
798     rtx adjust;
799{
800  rtx temp;
801  adjust = protect_from_queue (adjust, 0);
802
803  if (adjust == const0_rtx)
804    return;
805
806  temp = expand_binop (Pmode,
807#ifdef STACK_GROWS_DOWNWARD
808		       add_optab,
809#else
810		       sub_optab,
811#endif
812		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
813		       OPTAB_LIB_WIDEN);
814
815  if (temp != stack_pointer_rtx)
816    emit_move_insn (stack_pointer_rtx, temp);
817}
818
819/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
820   This pushes when ADJUST is positive.  ADJUST need not be constant.  */
821
822void
823anti_adjust_stack (adjust)
824     rtx adjust;
825{
826  rtx temp;
827  adjust = protect_from_queue (adjust, 0);
828
829  if (adjust == const0_rtx)
830    return;
831
832  temp = expand_binop (Pmode,
833#ifdef STACK_GROWS_DOWNWARD
834		       sub_optab,
835#else
836		       add_optab,
837#endif
838		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
839		       OPTAB_LIB_WIDEN);
840
841  if (temp != stack_pointer_rtx)
842    emit_move_insn (stack_pointer_rtx, temp);
843}
844
845/* Round the size of a block to be pushed up to the boundary required
846   by this machine.  SIZE is the desired size, which need not be constant.  */
847
848rtx
849round_push (size)
850     rtx size;
851{
852#ifdef PREFERRED_STACK_BOUNDARY
853  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
854  if (align == 1)
855    return size;
856  if (GET_CODE (size) == CONST_INT)
857    {
858      int new = (INTVAL (size) + align - 1) / align * align;
859      if (INTVAL (size) != new)
860	size = GEN_INT (new);
861    }
862  else
863    {
864      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
865	 but we know it can't.  So add ourselves and then do
866	 TRUNC_DIV_EXPR.  */
867      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
868			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
869      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
870			    NULL_RTX, 1);
871      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
872    }
873#endif /* PREFERRED_STACK_BOUNDARY */
874  return size;
875}
876
877/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
878   to a previously-created save area.  If no save area has been allocated,
879   this function will allocate one.  If a save area is specified, it
880   must be of the proper mode.
881
882   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
883   are emitted at the current position.  */
884
885void
886emit_stack_save (save_level, psave, after)
887     enum save_level save_level;
888     rtx *psave;
889     rtx after;
890{
891  rtx sa = *psave;
892  /* The default is that we use a move insn and save in a Pmode object.  */
893  rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
894  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
895
896  /* See if this machine has anything special to do for this kind of save.  */
897  switch (save_level)
898    {
899#ifdef HAVE_save_stack_block
900    case SAVE_BLOCK:
901      if (HAVE_save_stack_block)
902	fcn = gen_save_stack_block;
903      break;
904#endif
905#ifdef HAVE_save_stack_function
906    case SAVE_FUNCTION:
907      if (HAVE_save_stack_function)
908	fcn = gen_save_stack_function;
909      break;
910#endif
911#ifdef HAVE_save_stack_nonlocal
912    case SAVE_NONLOCAL:
913      if (HAVE_save_stack_nonlocal)
914	fcn = gen_save_stack_nonlocal;
915      break;
916#endif
917    default:
918      break;
919    }
920
921  /* If there is no save area and we have to allocate one, do so.  Otherwise
922     verify the save area is the proper mode.  */
923
924  if (sa == 0)
925    {
926      if (mode != VOIDmode)
927	{
928	  if (save_level == SAVE_NONLOCAL)
929	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
930	  else
931	    *psave = sa = gen_reg_rtx (mode);
932	}
933    }
934  else
935    {
936      if (mode == VOIDmode || GET_MODE (sa) != mode)
937	abort ();
938    }
939
940  if (after)
941    {
942      rtx seq;
943
944      start_sequence ();
945      /* We must validize inside the sequence, to ensure that any instructions
946	 created by the validize call also get moved to the right place.  */
947      if (sa != 0)
948	sa = validize_mem (sa);
949      emit_insn (fcn (sa, stack_pointer_rtx));
950      seq = gen_sequence ();
951      end_sequence ();
952      emit_insn_after (seq, after);
953    }
954  else
955    {
956      if (sa != 0)
957	sa = validize_mem (sa);
958      emit_insn (fcn (sa, stack_pointer_rtx));
959    }
960}
961
962/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
963   area made by emit_stack_save.  If it is zero, we have nothing to do.
964
965   Put any emitted insns after insn AFTER, if nonzero, otherwise at
966   current position.  */
967
968void
969emit_stack_restore (save_level, sa, after)
970     enum save_level save_level;
971     rtx after;
972     rtx sa;
973{
974  /* The default is that we use a move insn.  */
975  rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
976
977  /* See if this machine has anything special to do for this kind of save.  */
978  switch (save_level)
979    {
980#ifdef HAVE_restore_stack_block
981    case SAVE_BLOCK:
982      if (HAVE_restore_stack_block)
983	fcn = gen_restore_stack_block;
984      break;
985#endif
986#ifdef HAVE_restore_stack_function
987    case SAVE_FUNCTION:
988      if (HAVE_restore_stack_function)
989	fcn = gen_restore_stack_function;
990      break;
991#endif
992#ifdef HAVE_restore_stack_nonlocal
993    case SAVE_NONLOCAL:
994      if (HAVE_restore_stack_nonlocal)
995	fcn = gen_restore_stack_nonlocal;
996      break;
997#endif
998    default:
999      break;
1000    }
1001
1002  if (sa != 0)
1003    sa = validize_mem (sa);
1004
1005  if (after)
1006    {
1007      rtx seq;
1008
1009      start_sequence ();
1010      emit_insn (fcn (stack_pointer_rtx, sa));
1011      seq = gen_sequence ();
1012      end_sequence ();
1013      emit_insn_after (seq, after);
1014    }
1015  else
1016    emit_insn (fcn (stack_pointer_rtx, sa));
1017}
1018
1019#ifdef SETJMP_VIA_SAVE_AREA
1020/* Optimize RTL generated by allocate_dynamic_stack_space for targets
1021   where SETJMP_VIA_SAVE_AREA is true.  The problem is that on these
1022   platforms, the dynamic stack space used can corrupt the original
1023   frame, thus causing a crash if a longjmp unwinds to it.  */
1024
1025void
1026optimize_save_area_alloca (insns)
1027     rtx insns;
1028{
1029  rtx insn;
1030
1031  for (insn = insns; insn; insn = NEXT_INSN(insn))
1032    {
1033      rtx note;
1034
1035      if (GET_CODE (insn) != INSN)
1036	continue;
1037
1038      for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1039	{
1040	  if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1041	    continue;
1042
1043	  if (!current_function_calls_setjmp)
1044	    {
1045	      rtx pat = PATTERN (insn);
1046
1047	      /* If we do not see the note in a pattern matching
1048		 these precise characteristics, we did something
1049		 entirely wrong in allocate_dynamic_stack_space.
1050
1051		 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1052		 was defined on a machine where stacks grow towards higher
1053		 addresses.
1054
1055		 Right now only supported port with stack that grow upward
1056		 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA.  */
1057	      if (GET_CODE (pat) != SET
1058		  || SET_DEST (pat) != stack_pointer_rtx
1059		  || GET_CODE (SET_SRC (pat)) != MINUS
1060		  || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1061		abort ();
1062
1063	      /* This will now be transformed into a (set REG REG)
1064		 so we can just blow away all the other notes.  */
1065	      XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1066	      REG_NOTES (insn) = NULL_RTX;
1067	    }
1068	  else
1069	    {
1070	      /* setjmp was called, we must remove the REG_SAVE_AREA
1071		 note so that later passes do not get confused by its
1072		 presence.  */
1073	      if (note == REG_NOTES (insn))
1074		{
1075		  REG_NOTES (insn) = XEXP (note, 1);
1076		}
1077	      else
1078		{
1079		  rtx srch;
1080
1081		  for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1082		    if (XEXP (srch, 1) == note)
1083		      break;
1084
1085		  if (srch == NULL_RTX)
1086		    abort();
1087
1088		  XEXP (srch, 1) = XEXP (note, 1);
1089		}
1090	    }
1091	  /* Once we've seen the note of interest, we need not look at
1092	     the rest of them.  */
1093	  break;
1094	}
1095    }
1096}
1097#endif /* SETJMP_VIA_SAVE_AREA */
1098
1099/* Return an rtx representing the address of an area of memory dynamically
1100   pushed on the stack.  This region of memory is always aligned to
1101   a multiple of BIGGEST_ALIGNMENT.
1102
1103   Any required stack pointer alignment is preserved.
1104
1105   SIZE is an rtx representing the size of the area.
1106   TARGET is a place in which the address can be placed.
1107
1108   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1109
1110rtx
1111allocate_dynamic_stack_space (size, target, known_align)
1112     rtx size;
1113     rtx target;
1114     int known_align;
1115{
1116#ifdef SETJMP_VIA_SAVE_AREA
1117  rtx setjmpless_size = NULL_RTX;
1118#endif
1119
1120  /* If we're asking for zero bytes, it doesn't matter what we point
1121     to since we can't dereference it.  But return a reasonable
1122     address anyway.  */
1123  if (size == const0_rtx)
1124    return virtual_stack_dynamic_rtx;
1125
1126  /* Otherwise, show we're calling alloca or equivalent.  */
1127  current_function_calls_alloca = 1;
1128
1129  /* Ensure the size is in the proper mode.  */
1130  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1131    size = convert_to_mode (Pmode, size, 1);
1132
1133  /* We will need to ensure that the address we return is aligned to
1134     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1135     always know its final value at this point in the compilation (it
1136     might depend on the size of the outgoing parameter lists, for
1137     example), so we must align the value to be returned in that case.
1138     (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1139     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1140     We must also do an alignment operation on the returned value if
1141     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1142
1143     If we have to align, we must leave space in SIZE for the hole
1144     that might result from the alignment operation.  */
1145
1146#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
1147#define MUST_ALIGN 1
1148#else
1149#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1150#endif
1151
1152  if (MUST_ALIGN)
1153    {
1154      if (GET_CODE (size) == CONST_INT)
1155	size = GEN_INT (INTVAL (size)
1156			+ (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
1157      else
1158	size = expand_binop (Pmode, add_optab, size,
1159			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1160			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1161    }
1162
1163#ifdef SETJMP_VIA_SAVE_AREA
1164  /* If setjmp restores regs from a save area in the stack frame,
1165     avoid clobbering the reg save area.  Note that the offset of
1166     virtual_incoming_args_rtx includes the preallocated stack args space.
1167     It would be no problem to clobber that, but it's on the wrong side
1168     of the old save area.  */
1169  {
1170    rtx dynamic_offset
1171      = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1172		      stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1173
1174    if (!current_function_calls_setjmp)
1175      {
1176	int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1177
1178	/* See optimize_save_area_alloca to understand what is being
1179	   set up here.  */
1180
1181#if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1182	/* If anyone creates a target with these characteristics, let them
1183	   know that our optimization cannot work correctly in such a case.  */
1184	abort();
1185#endif
1186
1187	if (GET_CODE (size) == CONST_INT)
1188	  {
1189	    int new = INTVAL (size) / align * align;
1190
1191	    if (INTVAL (size) != new)
1192	      setjmpless_size = GEN_INT (new);
1193	    else
1194	      setjmpless_size = size;
1195	  }
1196	else
1197	  {
1198	    /* Since we know overflow is not possible, we avoid using
1199	       CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1200	    setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1201					     GEN_INT (align), NULL_RTX, 1);
1202	    setjmpless_size = expand_mult (Pmode, setjmpless_size,
1203					   GEN_INT (align), NULL_RTX, 1);
1204	  }
1205	/* Our optimization works based upon being able to perform a simple
1206	   transformation of this RTL into a (set REG REG) so make sure things
1207	   did in fact end up in a REG.  */
1208	if (!register_operand (setjmpless_size, Pmode))
1209	  setjmpless_size = force_reg (Pmode, setjmpless_size);
1210      }
1211
1212    size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1213			 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1214  }
1215#endif /* SETJMP_VIA_SAVE_AREA */
1216
1217  /* Round the size to a multiple of the required stack alignment.
1218     Since the stack if presumed to be rounded before this allocation,
1219     this will maintain the required alignment.
1220
1221     If the stack grows downward, we could save an insn by subtracting
1222     SIZE from the stack pointer and then aligning the stack pointer.
1223     The problem with this is that the stack pointer may be unaligned
1224     between the execution of the subtraction and alignment insns and
1225     some machines do not allow this.  Even on those that do, some
1226     signal handlers malfunction if a signal should occur between those
1227     insns.  Since this is an extremely rare event, we have no reliable
1228     way of knowing which systems have this problem.  So we avoid even
1229     momentarily mis-aligning the stack.  */
1230
1231#ifdef PREFERRED_STACK_BOUNDARY
1232  /* If we added a variable amount to SIZE,
1233     we can no longer assume it is aligned.  */
1234#if !defined (SETJMP_VIA_SAVE_AREA)
1235  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1236#endif
1237    size = round_push (size);
1238#endif
1239
1240  do_pending_stack_adjust ();
1241
1242  /* If needed, check that we have the required amount of stack.  Take into
1243     account what has already been checked.  */
1244  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1245    probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1246
1247  /* Don't use a TARGET that isn't a pseudo.  */
1248  if (target == 0 || GET_CODE (target) != REG
1249      || REGNO (target) < FIRST_PSEUDO_REGISTER)
1250    target = gen_reg_rtx (Pmode);
1251
1252  mark_reg_pointer (target, known_align / BITS_PER_UNIT);
1253
1254  /* Perform the required allocation from the stack.  Some systems do
1255     this differently than simply incrementing/decrementing from the
1256     stack pointer, such as acquiring the space by calling malloc().  */
1257#ifdef HAVE_allocate_stack
1258  if (HAVE_allocate_stack)
1259    {
1260      enum machine_mode mode = STACK_SIZE_MODE;
1261
1262      if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
1263	  && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
1264		(target, Pmode)))
1265#ifdef POINTERS_EXTEND_UNSIGNED
1266	target = convert_memory_address (Pmode, target);
1267#else
1268	target = copy_to_mode_reg (Pmode, target);
1269#endif
1270      size = convert_modes (mode, ptr_mode, size, 1);
1271      if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][1]
1272	  && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][1])
1273		(size, mode)))
1274	size = copy_to_mode_reg (mode, size);
1275
1276      emit_insn (gen_allocate_stack (target, size));
1277    }
1278  else
1279#endif
1280    {
1281#ifndef STACK_GROWS_DOWNWARD
1282      emit_move_insn (target, virtual_stack_dynamic_rtx);
1283#endif
1284      size = convert_modes (Pmode, ptr_mode, size, 1);
1285      anti_adjust_stack (size);
1286#ifdef SETJMP_VIA_SAVE_AREA
1287      if (setjmpless_size != NULL_RTX)
1288	{
1289 	  rtx note_target = get_last_insn ();
1290
1291	  REG_NOTES (note_target)
1292	    = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1293				 REG_NOTES (note_target));
1294	}
1295#endif /* SETJMP_VIA_SAVE_AREA */
1296#ifdef STACK_GROWS_DOWNWARD
1297  emit_move_insn (target, virtual_stack_dynamic_rtx);
1298#endif
1299    }
1300
1301  if (MUST_ALIGN)
1302    {
1303      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1304	 but we know it can't.  So add ourselves and then do
1305	 TRUNC_DIV_EXPR.  */
1306      target = expand_binop (Pmode, add_optab, target,
1307			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1308			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1309      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1310			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1311			      NULL_RTX, 1);
1312      target = expand_mult (Pmode, target,
1313			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1314			    NULL_RTX, 1);
1315    }
1316
1317  /* Some systems require a particular insn to refer to the stack
1318     to make the pages exist.  */
1319#ifdef HAVE_probe
1320  if (HAVE_probe)
1321    emit_insn (gen_probe ());
1322#endif
1323
1324  /* Record the new stack level for nonlocal gotos.  */
1325  if (nonlocal_goto_handler_slots != 0)
1326    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1327
1328  return target;
1329}
1330
1331/* Emit one stack probe at ADDRESS, an address within the stack.  */
1332
1333static void
1334emit_stack_probe (address)
1335     rtx address;
1336{
1337  rtx memref = gen_rtx_MEM (word_mode, address);
1338
1339  MEM_VOLATILE_P (memref) = 1;
1340
1341  if (STACK_CHECK_PROBE_LOAD)
1342    emit_move_insn (gen_reg_rtx (word_mode), memref);
1343  else
1344    emit_move_insn (memref, const0_rtx);
1345}
1346
1347/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1348   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
1349   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
1350   subtract from the stack.  If SIZE is constant, this is done
1351   with a fixed number of probes.  Otherwise, we must make a loop.  */
1352
1353#ifdef STACK_GROWS_DOWNWARD
1354#define STACK_GROW_OP MINUS
1355#else
1356#define STACK_GROW_OP PLUS
1357#endif
1358
1359void
1360probe_stack_range (first, size)
1361     HOST_WIDE_INT first;
1362     rtx size;
1363{
1364  /* First see if we have an insn to check the stack.  Use it if so.  */
1365#ifdef HAVE_check_stack
1366  if (HAVE_check_stack)
1367    {
1368      rtx last_addr
1369	= force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1370						stack_pointer_rtx,
1371						plus_constant (size, first)),
1372			 NULL_RTX);
1373
1374      if (insn_operand_predicate[(int) CODE_FOR_check_stack][0]
1375	  && ! ((*insn_operand_predicate[(int) CODE_FOR_check_stack][0])
1376		(last_address, Pmode)))
1377	last_address = copy_to_mode_reg (Pmode, last_address);
1378
1379      emit_insn (gen_check_stack (last_address));
1380      return;
1381    }
1382#endif
1383
1384  /* If we have to generate explicit probes, see if we have a constant
1385     small number of them to generate.  If so, that's the easy case.  */
1386  if (GET_CODE (size) == CONST_INT
1387      && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1388    {
1389      HOST_WIDE_INT offset;
1390
1391      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1392	 for values of N from 1 until it exceeds LAST.  If only one
1393	 probe is needed, this will not generate any code.  Then probe
1394	 at LAST.  */
1395      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1396	   offset < INTVAL (size);
1397	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1398	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1399					  stack_pointer_rtx,
1400					  GEN_INT (offset)));
1401
1402      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1403					stack_pointer_rtx,
1404					plus_constant (size, first)));
1405    }
1406
1407  /* In the variable case, do the same as above, but in a loop.  We emit loop
1408     notes so that loop optimization can be done.  */
1409  else
1410    {
1411      rtx test_addr
1412	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1413					 stack_pointer_rtx,
1414					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1415			 NULL_RTX);
1416      rtx last_addr
1417	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1418					 stack_pointer_rtx,
1419					 plus_constant (size, first)),
1420			 NULL_RTX);
1421      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1422      rtx loop_lab = gen_label_rtx ();
1423      rtx test_lab = gen_label_rtx ();
1424      rtx end_lab = gen_label_rtx ();
1425      rtx temp;
1426
1427      if (GET_CODE (test_addr) != REG
1428	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1429	test_addr = force_reg (Pmode, test_addr);
1430
1431      emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1432      emit_jump (test_lab);
1433
1434      emit_label (loop_lab);
1435      emit_stack_probe (test_addr);
1436
1437      emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1438
1439#ifdef STACK_GROWS_DOWNWARD
1440#define CMP_OPCODE GTU
1441      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1442			   1, OPTAB_WIDEN);
1443#else
1444#define CMP_OPCODE LTU
1445      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1446			   1, OPTAB_WIDEN);
1447#endif
1448
1449      if (temp != test_addr)
1450	abort ();
1451
1452      emit_label (test_lab);
1453      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1454			       NULL_RTX, Pmode, 1, 0, loop_lab);
1455      emit_jump (end_lab);
1456      emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
1457      emit_label (end_lab);
1458
1459      /* If will be doing stupid optimization, show test_addr is still live. */
1460      if (obey_regdecls)
1461	emit_insn (gen_rtx_USE (VOIDmode, test_addr));
1462
1463      emit_stack_probe (last_addr);
1464    }
1465}
1466
1467/* Return an rtx representing the register or memory location
1468   in which a scalar value of data type VALTYPE
1469   was returned by a function call to function FUNC.
1470   FUNC is a FUNCTION_DECL node if the precise function is known,
1471   otherwise 0.  */
1472
1473rtx
1474hard_function_value (valtype, func)
1475     tree valtype;
1476     tree func ATTRIBUTE_UNUSED;
1477{
1478  rtx val = FUNCTION_VALUE (valtype, func);
1479  if (GET_CODE (val) == REG
1480      && GET_MODE (val) == BLKmode)
1481    {
1482      int bytes = int_size_in_bytes (valtype);
1483      enum machine_mode tmpmode;
1484      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485           tmpmode != MAX_MACHINE_MODE;
1486           tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1487        {
1488          /* Have we found a large enough mode?  */
1489          if (GET_MODE_SIZE (tmpmode) >= bytes)
1490            break;
1491        }
1492
1493      /* No suitable mode found.  */
1494      if (tmpmode == MAX_MACHINE_MODE)
1495        abort ();
1496
1497      PUT_MODE (val, tmpmode);
1498    }
1499  return val;
1500}
1501
1502/* Return an rtx representing the register or memory location
1503   in which a scalar value of mode MODE was returned by a library call.  */
1504
1505rtx
1506hard_libcall_value (mode)
1507     enum machine_mode mode;
1508{
1509  return LIBCALL_VALUE (mode);
1510}
1511
1512/* Look up the tree code for a given rtx code
1513   to provide the arithmetic operation for REAL_ARITHMETIC.
1514   The function returns an int because the caller may not know
1515   what `enum tree_code' means.  */
1516
1517int
1518rtx_to_tree_code (code)
1519     enum rtx_code code;
1520{
1521  enum tree_code tcode;
1522
1523  switch (code)
1524    {
1525    case PLUS:
1526      tcode = PLUS_EXPR;
1527      break;
1528    case MINUS:
1529      tcode = MINUS_EXPR;
1530      break;
1531    case MULT:
1532      tcode = MULT_EXPR;
1533      break;
1534    case DIV:
1535      tcode = RDIV_EXPR;
1536      break;
1537    case SMIN:
1538      tcode = MIN_EXPR;
1539      break;
1540    case SMAX:
1541      tcode = MAX_EXPR;
1542      break;
1543    default:
1544      tcode = LAST_AND_UNUSED_TREE_CODE;
1545      break;
1546    }
1547  return ((int) tcode);
1548}
1549