explow.c revision 90075
1/* Subroutines for manipulating rtx's in semantically interesting ways.
2   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22
23#include "config.h"
24#include "system.h"
25#include "toplev.h"
26#include "rtl.h"
27#include "tree.h"
28#include "tm_p.h"
29#include "flags.h"
30#include "function.h"
31#include "expr.h"
32#include "optabs.h"
33#include "hard-reg-set.h"
34#include "insn-config.h"
35#include "ggc.h"
36#include "recog.h"
37
38static rtx break_out_memory_refs	PARAMS ((rtx));
39static void emit_stack_probe		PARAMS ((rtx));
40
41
42/* Truncate and perhaps sign-extend C as appropriate for MODE.  */
43
44HOST_WIDE_INT
45trunc_int_for_mode (c, mode)
46     HOST_WIDE_INT c;
47     enum machine_mode mode;
48{
49  int width = GET_MODE_BITSIZE (mode);
50
51  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
52  if (mode == BImode)
53    return c & 1 ? STORE_FLAG_VALUE : 0;
54
55  /* Sign-extend for the requested mode.  */
56
57  if (width < HOST_BITS_PER_WIDE_INT)
58    {
59      HOST_WIDE_INT sign = 1;
60      sign <<= width - 1;
61      c &= (sign << 1) - 1;
62      c ^= sign;
63      c -= sign;
64    }
65
66  return c;
67}
68
69/* Return an rtx for the sum of X and the integer C.
70
71   This function should be used via the `plus_constant' macro.  */
72
73rtx
74plus_constant_wide (x, c)
75     rtx x;
76     HOST_WIDE_INT c;
77{
78  RTX_CODE code;
79  rtx y;
80  enum machine_mode mode;
81  rtx tem;
82  int all_constant = 0;
83
84  if (c == 0)
85    return x;
86
87 restart:
88
89  code = GET_CODE (x);
90  mode = GET_MODE (x);
91  y = x;
92
93  switch (code)
94    {
95    case CONST_INT:
96      return GEN_INT (INTVAL (x) + c);
97
98    case CONST_DOUBLE:
99      {
100	unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
101	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
102	unsigned HOST_WIDE_INT l2 = c;
103	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
104	unsigned HOST_WIDE_INT lv;
105	HOST_WIDE_INT hv;
106
107	add_double (l1, h1, l2, h2, &lv, &hv);
108
109	return immed_double_const (lv, hv, VOIDmode);
110      }
111
112    case MEM:
113      /* If this is a reference to the constant pool, try replacing it with
114	 a reference to a new constant.  If the resulting address isn't
115	 valid, don't return it because we have no way to validize it.  */
116      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
117	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
118	{
119	  tem
120	    = force_const_mem (GET_MODE (x),
121			       plus_constant (get_pool_constant (XEXP (x, 0)),
122					      c));
123	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
124	    return tem;
125	}
126      break;
127
128    case CONST:
129      /* If adding to something entirely constant, set a flag
130	 so that we can add a CONST around the result.  */
131      x = XEXP (x, 0);
132      all_constant = 1;
133      goto restart;
134
135    case SYMBOL_REF:
136    case LABEL_REF:
137      all_constant = 1;
138      break;
139
140    case PLUS:
141      /* The interesting case is adding the integer to a sum.
142	 Look for constant term in the sum and combine
143	 with C.  For an integer constant term, we make a combined
144	 integer.  For a constant term that is not an explicit integer,
145	 we cannot really combine, but group them together anyway.
146
147	 Restart or use a recursive call in case the remaining operand is
148	 something that we handle specially, such as a SYMBOL_REF.
149
150	 We may not immediately return from the recursive call here, lest
151	 all_constant gets lost.  */
152
153      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
154	{
155	  c += INTVAL (XEXP (x, 1));
156
157	  if (GET_MODE (x) != VOIDmode)
158	    c = trunc_int_for_mode (c, GET_MODE (x));
159
160	  x = XEXP (x, 0);
161	  goto restart;
162	}
163      else if (CONSTANT_P (XEXP (x, 1)))
164	{
165	  x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
166	  c = 0;
167	}
168      else if (find_constant_term_loc (&y))
169	{
170	  /* We need to be careful since X may be shared and we can't
171	     modify it in place.  */
172	  rtx copy = copy_rtx (x);
173	  rtx *const_loc = find_constant_term_loc (&copy);
174
175	  *const_loc = plus_constant (*const_loc, c);
176	  x = copy;
177	  c = 0;
178	}
179      break;
180
181    default:
182      break;
183    }
184
185  if (c != 0)
186    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
187
188  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
189    return x;
190  else if (all_constant)
191    return gen_rtx_CONST (mode, x);
192  else
193    return x;
194}
195
196/* If X is a sum, return a new sum like X but lacking any constant terms.
197   Add all the removed constant terms into *CONSTPTR.
198   X itself is not altered.  The result != X if and only if
199   it is not isomorphic to X.  */
200
201rtx
202eliminate_constant_term (x, constptr)
203     rtx x;
204     rtx *constptr;
205{
206  rtx x0, x1;
207  rtx tem;
208
209  if (GET_CODE (x) != PLUS)
210    return x;
211
212  /* First handle constants appearing at this level explicitly.  */
213  if (GET_CODE (XEXP (x, 1)) == CONST_INT
214      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
215						XEXP (x, 1)))
216      && GET_CODE (tem) == CONST_INT)
217    {
218      *constptr = tem;
219      return eliminate_constant_term (XEXP (x, 0), constptr);
220    }
221
222  tem = const0_rtx;
223  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
224  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
225  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
226      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
227						*constptr, tem))
228      && GET_CODE (tem) == CONST_INT)
229    {
230      *constptr = tem;
231      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
232    }
233
234  return x;
235}
236
237/* Returns the insn that next references REG after INSN, or 0
238   if REG is clobbered before next referenced or we cannot find
239   an insn that references REG in a straight-line piece of code.  */
240
241rtx
242find_next_ref (reg, insn)
243     rtx reg;
244     rtx insn;
245{
246  rtx next;
247
248  for (insn = NEXT_INSN (insn); insn; insn = next)
249    {
250      next = NEXT_INSN (insn);
251      if (GET_CODE (insn) == NOTE)
252	continue;
253      if (GET_CODE (insn) == CODE_LABEL
254	  || GET_CODE (insn) == BARRIER)
255	return 0;
256      if (GET_CODE (insn) == INSN
257	  || GET_CODE (insn) == JUMP_INSN
258	  || GET_CODE (insn) == CALL_INSN)
259	{
260	  if (reg_set_p (reg, insn))
261	    return 0;
262	  if (reg_mentioned_p (reg, PATTERN (insn)))
263	    return insn;
264	  if (GET_CODE (insn) == JUMP_INSN)
265	    {
266	      if (any_uncondjump_p (insn))
267		next = JUMP_LABEL (insn);
268	      else
269		return 0;
270	    }
271	  if (GET_CODE (insn) == CALL_INSN
272	      && REGNO (reg) < FIRST_PSEUDO_REGISTER
273	      && call_used_regs[REGNO (reg)])
274	    return 0;
275	}
276      else
277	abort ();
278    }
279  return 0;
280}
281
282/* Return an rtx for the size in bytes of the value of EXP.  */
283
284rtx
285expr_size (exp)
286     tree exp;
287{
288  tree size;
289
290  if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
291      && DECL_SIZE_UNIT (exp) != 0)
292    size = DECL_SIZE_UNIT (exp);
293  else
294    size = size_in_bytes (TREE_TYPE (exp));
295
296  if (TREE_CODE (size) != INTEGER_CST
297      && contains_placeholder_p (size))
298    size = build (WITH_RECORD_EXPR, sizetype, size, exp);
299
300  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
301
302}
303
304/* Return a copy of X in which all memory references
305   and all constants that involve symbol refs
306   have been replaced with new temporary registers.
307   Also emit code to load the memory locations and constants
308   into those registers.
309
310   If X contains no such constants or memory references,
311   X itself (not a copy) is returned.
312
313   If a constant is found in the address that is not a legitimate constant
314   in an insn, it is left alone in the hope that it might be valid in the
315   address.
316
317   X may contain no arithmetic except addition, subtraction and multiplication.
318   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
319
320static rtx
321break_out_memory_refs (x)
322     rtx x;
323{
324  if (GET_CODE (x) == MEM
325      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
326	  && GET_MODE (x) != VOIDmode))
327    x = force_reg (GET_MODE (x), x);
328  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
329	   || GET_CODE (x) == MULT)
330    {
331      rtx op0 = break_out_memory_refs (XEXP (x, 0));
332      rtx op1 = break_out_memory_refs (XEXP (x, 1));
333
334      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
335	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
336    }
337
338  return x;
339}
340
341#ifdef POINTERS_EXTEND_UNSIGNED
342
343/* Given X, a memory address in ptr_mode, convert it to an address
344   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
345   the fact that pointers are not allowed to overflow by commuting arithmetic
346   operations over conversions so that address arithmetic insns can be
347   used.  */
348
349rtx
350convert_memory_address (to_mode, x)
351     enum machine_mode to_mode;
352     rtx x;
353{
354  enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
355  rtx temp;
356
357  /* Here we handle some special cases.  If none of them apply, fall through
358     to the default case.  */
359  switch (GET_CODE (x))
360    {
361    case CONST_INT:
362    case CONST_DOUBLE:
363      return x;
364
365    case SUBREG:
366      if (POINTERS_EXTEND_UNSIGNED >= 0
367	  && (SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
368	  && GET_MODE (SUBREG_REG (x)) == to_mode)
369	return SUBREG_REG (x);
370      break;
371
372    case LABEL_REF:
373      if (POINTERS_EXTEND_UNSIGNED >= 0)
374	{
375	  temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
376	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
377	  return temp;
378	}
379      break;
380
381    case SYMBOL_REF:
382      if (POINTERS_EXTEND_UNSIGNED >= 0)
383	{
384	  temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
385	  SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
386	  CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
387	  STRING_POOL_ADDRESS_P (temp) = STRING_POOL_ADDRESS_P (x);
388	  return temp;
389	}
390      break;
391
392    case CONST:
393      if (POINTERS_EXTEND_UNSIGNED >= 0)
394        return gen_rtx_CONST (to_mode,
395			      convert_memory_address (to_mode, XEXP (x, 0)));
396      break;
397
398    case PLUS:
399    case MULT:
400      /* For addition the second operand is a small constant, we can safely
401	 permute the conversion and addition operation.  We can always safely
402	 permute them if we are making the address narrower.  In addition,
403	 always permute the operations if this is a constant.  */
404      if (POINTERS_EXTEND_UNSIGNED >= 0
405	  && (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
406	      || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
407		  && (INTVAL (XEXP (x, 1)) + 20000 < 40000
408		      || CONSTANT_P (XEXP (x, 0))))))
409	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
410			       convert_memory_address (to_mode, XEXP (x, 0)),
411			       convert_memory_address (to_mode, XEXP (x, 1)));
412      break;
413
414    default:
415      break;
416    }
417
418  return convert_modes (to_mode, from_mode,
419			x, POINTERS_EXTEND_UNSIGNED);
420}
421#endif
422
423/* Given a memory address or facsimile X, construct a new address,
424   currently equivalent, that is stable: future stores won't change it.
425
426   X must be composed of constants, register and memory references
427   combined with addition, subtraction and multiplication:
428   in other words, just what you can get from expand_expr if sum_ok is 1.
429
430   Works by making copies of all regs and memory locations used
431   by X and combining them the same way X does.
432   You could also stabilize the reference to this address
433   by copying the address to a register with copy_to_reg;
434   but then you wouldn't get indexed addressing in the reference.  */
435
436rtx
437copy_all_regs (x)
438     rtx x;
439{
440  if (GET_CODE (x) == REG)
441    {
442      if (REGNO (x) != FRAME_POINTER_REGNUM
443#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
444	  && REGNO (x) != HARD_FRAME_POINTER_REGNUM
445#endif
446	  )
447	x = copy_to_reg (x);
448    }
449  else if (GET_CODE (x) == MEM)
450    x = copy_to_reg (x);
451  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
452	   || GET_CODE (x) == MULT)
453    {
454      rtx op0 = copy_all_regs (XEXP (x, 0));
455      rtx op1 = copy_all_regs (XEXP (x, 1));
456      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
457	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
458    }
459  return x;
460}
461
462/* Return something equivalent to X but valid as a memory address
463   for something of mode MODE.  When X is not itself valid, this
464   works by copying X or subexpressions of it into registers.  */
465
466rtx
467memory_address (mode, x)
468     enum machine_mode mode;
469     rtx x;
470{
471  rtx oldx = x;
472
473  if (GET_CODE (x) == ADDRESSOF)
474    return x;
475
476#ifdef POINTERS_EXTEND_UNSIGNED
477  if (GET_MODE (x) != Pmode)
478    x = convert_memory_address (Pmode, x);
479#endif
480
481  /* By passing constant addresses thru registers
482     we get a chance to cse them.  */
483  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
484    x = force_reg (Pmode, x);
485
486  /* Accept a QUEUED that refers to a REG
487     even though that isn't a valid address.
488     On attempting to put this in an insn we will call protect_from_queue
489     which will turn it into a REG, which is valid.  */
490  else if (GET_CODE (x) == QUEUED
491      && GET_CODE (QUEUED_VAR (x)) == REG)
492    ;
493
494  /* We get better cse by rejecting indirect addressing at this stage.
495     Let the combiner create indirect addresses where appropriate.
496     For now, generate the code so that the subexpressions useful to share
497     are visible.  But not if cse won't be done!  */
498  else
499    {
500      if (! cse_not_expected && GET_CODE (x) != REG)
501	x = break_out_memory_refs (x);
502
503      /* At this point, any valid address is accepted.  */
504      GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
505
506      /* If it was valid before but breaking out memory refs invalidated it,
507	 use it the old way.  */
508      if (memory_address_p (mode, oldx))
509	goto win2;
510
511      /* Perform machine-dependent transformations on X
512	 in certain cases.  This is not necessary since the code
513	 below can handle all possible cases, but machine-dependent
514	 transformations can make better code.  */
515      LEGITIMIZE_ADDRESS (x, oldx, mode, win);
516
517      /* PLUS and MULT can appear in special ways
518	 as the result of attempts to make an address usable for indexing.
519	 Usually they are dealt with by calling force_operand, below.
520	 But a sum containing constant terms is special
521	 if removing them makes the sum a valid address:
522	 then we generate that address in a register
523	 and index off of it.  We do this because it often makes
524	 shorter code, and because the addresses thus generated
525	 in registers often become common subexpressions.  */
526      if (GET_CODE (x) == PLUS)
527	{
528	  rtx constant_term = const0_rtx;
529	  rtx y = eliminate_constant_term (x, &constant_term);
530	  if (constant_term == const0_rtx
531	      || ! memory_address_p (mode, y))
532	    x = force_operand (x, NULL_RTX);
533	  else
534	    {
535	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
536	      if (! memory_address_p (mode, y))
537		x = force_operand (x, NULL_RTX);
538	      else
539		x = y;
540	    }
541	}
542
543      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
544	x = force_operand (x, NULL_RTX);
545
546      /* If we have a register that's an invalid address,
547	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
548      else if (GET_CODE (x) == REG)
549	x = copy_to_reg (x);
550
551      /* Last resort: copy the value to a register, since
552	 the register is a valid address.  */
553      else
554	x = force_reg (Pmode, x);
555
556      goto done;
557
558    win2:
559      x = oldx;
560    win:
561      if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
562	  /* Don't copy an addr via a reg if it is one of our stack slots.  */
563	  && ! (GET_CODE (x) == PLUS
564		&& (XEXP (x, 0) == virtual_stack_vars_rtx
565		    || XEXP (x, 0) == virtual_incoming_args_rtx)))
566	{
567	  if (general_operand (x, Pmode))
568	    x = force_reg (Pmode, x);
569	  else
570	    x = force_operand (x, NULL_RTX);
571	}
572    }
573
574 done:
575
576  /* If we didn't change the address, we are done.  Otherwise, mark
577     a reg as a pointer if we have REG or REG + CONST_INT.  */
578  if (oldx == x)
579    return x;
580  else if (GET_CODE (x) == REG)
581    mark_reg_pointer (x, BITS_PER_UNIT);
582  else if (GET_CODE (x) == PLUS
583	   && GET_CODE (XEXP (x, 0)) == REG
584	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
585    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
586
587  /* OLDX may have been the address on a temporary.  Update the address
588     to indicate that X is now used.  */
589  update_temp_slot_address (oldx, x);
590
591  return x;
592}
593
594/* Like `memory_address' but pretend `flag_force_addr' is 0.  */
595
596rtx
597memory_address_noforce (mode, x)
598     enum machine_mode mode;
599     rtx x;
600{
601  int ambient_force_addr = flag_force_addr;
602  rtx val;
603
604  flag_force_addr = 0;
605  val = memory_address (mode, x);
606  flag_force_addr = ambient_force_addr;
607  return val;
608}
609
610/* Convert a mem ref into one with a valid memory address.
611   Pass through anything else unchanged.  */
612
613rtx
614validize_mem (ref)
615     rtx ref;
616{
617  if (GET_CODE (ref) != MEM)
618    return ref;
619  if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
620      && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
621    return ref;
622
623  /* Don't alter REF itself, since that is probably a stack slot.  */
624  return replace_equiv_address (ref, XEXP (ref, 0));
625}
626
627/* Given REF, either a MEM or a REG, and T, either the type of X or
628   the expression corresponding to REF, set RTX_UNCHANGING_P if
629   appropriate.  */
630
631void
632maybe_set_unchanging (ref, t)
633     rtx ref;
634     tree t;
635{
636  /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
637     initialization is only executed once, or whose initializer always
638     has the same value.  Currently we simplify this to PARM_DECLs in the
639     first case, and decls with TREE_CONSTANT initializers in the second.  */
640  if ((TREE_READONLY (t) && DECL_P (t)
641       && (TREE_CODE (t) == PARM_DECL
642	   || DECL_INITIAL (t) == NULL_TREE
643	   || TREE_CONSTANT (DECL_INITIAL (t))))
644      || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
645    RTX_UNCHANGING_P (ref) = 1;
646}
647
648/* Return a modified copy of X with its memory address copied
649   into a temporary register to protect it from side effects.
650   If X is not a MEM, it is returned unchanged (and not copied).
651   Perhaps even if it is a MEM, if there is no need to change it.  */
652
653rtx
654stabilize (x)
655     rtx x;
656{
657
658  if (GET_CODE (x) != MEM
659      || ! rtx_unstable_p (XEXP (x, 0)))
660    return x;
661
662  return
663    replace_equiv_address (x, force_reg (Pmode, copy_all_regs (XEXP (x, 0))));
664}
665
666/* Copy the value or contents of X to a new temp reg and return that reg.  */
667
668rtx
669copy_to_reg (x)
670     rtx x;
671{
672  rtx temp = gen_reg_rtx (GET_MODE (x));
673
674  /* If not an operand, must be an address with PLUS and MULT so
675     do the computation.  */
676  if (! general_operand (x, VOIDmode))
677    x = force_operand (x, temp);
678
679  if (x != temp)
680    emit_move_insn (temp, x);
681
682  return temp;
683}
684
685/* Like copy_to_reg but always give the new register mode Pmode
686   in case X is a constant.  */
687
688rtx
689copy_addr_to_reg (x)
690     rtx x;
691{
692  return copy_to_mode_reg (Pmode, x);
693}
694
695/* Like copy_to_reg but always give the new register mode MODE
696   in case X is a constant.  */
697
698rtx
699copy_to_mode_reg (mode, x)
700     enum machine_mode mode;
701     rtx x;
702{
703  rtx temp = gen_reg_rtx (mode);
704
705  /* If not an operand, must be an address with PLUS and MULT so
706     do the computation.  */
707  if (! general_operand (x, VOIDmode))
708    x = force_operand (x, temp);
709
710  if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
711    abort ();
712  if (x != temp)
713    emit_move_insn (temp, x);
714  return temp;
715}
716
717/* Load X into a register if it is not already one.
718   Use mode MODE for the register.
719   X should be valid for mode MODE, but it may be a constant which
720   is valid for all integer modes; that's why caller must specify MODE.
721
722   The caller must not alter the value in the register we return,
723   since we mark it as a "constant" register.  */
724
725rtx
726force_reg (mode, x)
727     enum machine_mode mode;
728     rtx x;
729{
730  rtx temp, insn, set;
731
732  if (GET_CODE (x) == REG)
733    return x;
734
735  temp = gen_reg_rtx (mode);
736
737  if (! general_operand (x, mode))
738    x = force_operand (x, NULL_RTX);
739
740  insn = emit_move_insn (temp, x);
741
742  /* Let optimizers know that TEMP's value never changes
743     and that X can be substituted for it.  Don't get confused
744     if INSN set something else (such as a SUBREG of TEMP).  */
745  if (CONSTANT_P (x)
746      && (set = single_set (insn)) != 0
747      && SET_DEST (set) == temp)
748    set_unique_reg_note (insn, REG_EQUAL, x);
749  return temp;
750}
751
752/* If X is a memory ref, copy its contents to a new temp reg and return
753   that reg.  Otherwise, return X.  */
754
755rtx
756force_not_mem (x)
757     rtx x;
758{
759  rtx temp;
760
761  if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
762    return x;
763
764  temp = gen_reg_rtx (GET_MODE (x));
765  emit_move_insn (temp, x);
766  return temp;
767}
768
769/* Copy X to TARGET (if it's nonzero and a reg)
770   or to a new temp reg and return that reg.
771   MODE is the mode to use for X in case it is a constant.  */
772
773rtx
774copy_to_suggested_reg (x, target, mode)
775     rtx x, target;
776     enum machine_mode mode;
777{
778  rtx temp;
779
780  if (target && GET_CODE (target) == REG)
781    temp = target;
782  else
783    temp = gen_reg_rtx (mode);
784
785  emit_move_insn (temp, x);
786  return temp;
787}
788
789/* Return the mode to use to store a scalar of TYPE and MODE.
790   PUNSIGNEDP points to the signedness of the type and may be adjusted
791   to show what signedness to use on extension operations.
792
793   FOR_CALL is non-zero if this call is promoting args for a call.  */
794
795enum machine_mode
796promote_mode (type, mode, punsignedp, for_call)
797     tree type;
798     enum machine_mode mode;
799     int *punsignedp;
800     int for_call ATTRIBUTE_UNUSED;
801{
802  enum tree_code code = TREE_CODE (type);
803  int unsignedp = *punsignedp;
804
805#ifdef PROMOTE_FOR_CALL_ONLY
806  if (! for_call)
807    return mode;
808#endif
809
810  switch (code)
811    {
812#ifdef PROMOTE_MODE
813    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
814    case CHAR_TYPE:      case REAL_TYPE:       case OFFSET_TYPE:
815      PROMOTE_MODE (mode, unsignedp, type);
816      break;
817#endif
818
819#ifdef POINTERS_EXTEND_UNSIGNED
820    case REFERENCE_TYPE:
821    case POINTER_TYPE:
822      mode = Pmode;
823      unsignedp = POINTERS_EXTEND_UNSIGNED;
824      break;
825#endif
826
827    default:
828      break;
829    }
830
831  *punsignedp = unsignedp;
832  return mode;
833}
834
835/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
836   This pops when ADJUST is positive.  ADJUST need not be constant.  */
837
838void
839adjust_stack (adjust)
840     rtx adjust;
841{
842  rtx temp;
843  adjust = protect_from_queue (adjust, 0);
844
845  if (adjust == const0_rtx)
846    return;
847
848  /* We expect all variable sized adjustments to be multiple of
849     PREFERRED_STACK_BOUNDARY.  */
850  if (GET_CODE (adjust) == CONST_INT)
851    stack_pointer_delta -= INTVAL (adjust);
852
853  temp = expand_binop (Pmode,
854#ifdef STACK_GROWS_DOWNWARD
855		       add_optab,
856#else
857		       sub_optab,
858#endif
859		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
860		       OPTAB_LIB_WIDEN);
861
862  if (temp != stack_pointer_rtx)
863    emit_move_insn (stack_pointer_rtx, temp);
864}
865
866/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
867   This pushes when ADJUST is positive.  ADJUST need not be constant.  */
868
869void
870anti_adjust_stack (adjust)
871     rtx adjust;
872{
873  rtx temp;
874  adjust = protect_from_queue (adjust, 0);
875
876  if (adjust == const0_rtx)
877    return;
878
879  /* We expect all variable sized adjustments to be multiple of
880     PREFERRED_STACK_BOUNDARY.  */
881  if (GET_CODE (adjust) == CONST_INT)
882    stack_pointer_delta += INTVAL (adjust);
883
884  temp = expand_binop (Pmode,
885#ifdef STACK_GROWS_DOWNWARD
886		       sub_optab,
887#else
888		       add_optab,
889#endif
890		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
891		       OPTAB_LIB_WIDEN);
892
893  if (temp != stack_pointer_rtx)
894    emit_move_insn (stack_pointer_rtx, temp);
895}
896
897/* Round the size of a block to be pushed up to the boundary required
898   by this machine.  SIZE is the desired size, which need not be constant.  */
899
900rtx
901round_push (size)
902     rtx size;
903{
904  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
905  if (align == 1)
906    return size;
907  if (GET_CODE (size) == CONST_INT)
908    {
909      int new = (INTVAL (size) + align - 1) / align * align;
910      if (INTVAL (size) != new)
911	size = GEN_INT (new);
912    }
913  else
914    {
915      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
916	 but we know it can't.  So add ourselves and then do
917	 TRUNC_DIV_EXPR.  */
918      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
919			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
920      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
921			    NULL_RTX, 1);
922      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
923    }
924  return size;
925}
926
927/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
928   to a previously-created save area.  If no save area has been allocated,
929   this function will allocate one.  If a save area is specified, it
930   must be of the proper mode.
931
932   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
933   are emitted at the current position.  */
934
935void
936emit_stack_save (save_level, psave, after)
937     enum save_level save_level;
938     rtx *psave;
939     rtx after;
940{
941  rtx sa = *psave;
942  /* The default is that we use a move insn and save in a Pmode object.  */
943  rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
944  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
945
946  /* See if this machine has anything special to do for this kind of save.  */
947  switch (save_level)
948    {
949#ifdef HAVE_save_stack_block
950    case SAVE_BLOCK:
951      if (HAVE_save_stack_block)
952	fcn = gen_save_stack_block;
953      break;
954#endif
955#ifdef HAVE_save_stack_function
956    case SAVE_FUNCTION:
957      if (HAVE_save_stack_function)
958	fcn = gen_save_stack_function;
959      break;
960#endif
961#ifdef HAVE_save_stack_nonlocal
962    case SAVE_NONLOCAL:
963      if (HAVE_save_stack_nonlocal)
964	fcn = gen_save_stack_nonlocal;
965      break;
966#endif
967    default:
968      break;
969    }
970
971  /* If there is no save area and we have to allocate one, do so.  Otherwise
972     verify the save area is the proper mode.  */
973
974  if (sa == 0)
975    {
976      if (mode != VOIDmode)
977	{
978	  if (save_level == SAVE_NONLOCAL)
979	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
980	  else
981	    *psave = sa = gen_reg_rtx (mode);
982	}
983    }
984  else
985    {
986      if (mode == VOIDmode || GET_MODE (sa) != mode)
987	abort ();
988    }
989
990  if (after)
991    {
992      rtx seq;
993
994      start_sequence ();
995      /* We must validize inside the sequence, to ensure that any instructions
996	 created by the validize call also get moved to the right place.  */
997      if (sa != 0)
998	sa = validize_mem (sa);
999      emit_insn (fcn (sa, stack_pointer_rtx));
1000      seq = gen_sequence ();
1001      end_sequence ();
1002      emit_insn_after (seq, after);
1003    }
1004  else
1005    {
1006      if (sa != 0)
1007	sa = validize_mem (sa);
1008      emit_insn (fcn (sa, stack_pointer_rtx));
1009    }
1010}
1011
1012/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
1013   area made by emit_stack_save.  If it is zero, we have nothing to do.
1014
1015   Put any emitted insns after insn AFTER, if nonzero, otherwise at
1016   current position.  */
1017
1018void
1019emit_stack_restore (save_level, sa, after)
1020     enum save_level save_level;
1021     rtx after;
1022     rtx sa;
1023{
1024  /* The default is that we use a move insn.  */
1025  rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1026
1027  /* See if this machine has anything special to do for this kind of save.  */
1028  switch (save_level)
1029    {
1030#ifdef HAVE_restore_stack_block
1031    case SAVE_BLOCK:
1032      if (HAVE_restore_stack_block)
1033	fcn = gen_restore_stack_block;
1034      break;
1035#endif
1036#ifdef HAVE_restore_stack_function
1037    case SAVE_FUNCTION:
1038      if (HAVE_restore_stack_function)
1039	fcn = gen_restore_stack_function;
1040      break;
1041#endif
1042#ifdef HAVE_restore_stack_nonlocal
1043    case SAVE_NONLOCAL:
1044      if (HAVE_restore_stack_nonlocal)
1045	fcn = gen_restore_stack_nonlocal;
1046      break;
1047#endif
1048    default:
1049      break;
1050    }
1051
1052  if (sa != 0)
1053    sa = validize_mem (sa);
1054
1055  if (after)
1056    {
1057      rtx seq;
1058
1059      start_sequence ();
1060      emit_insn (fcn (stack_pointer_rtx, sa));
1061      seq = gen_sequence ();
1062      end_sequence ();
1063      emit_insn_after (seq, after);
1064    }
1065  else
1066    emit_insn (fcn (stack_pointer_rtx, sa));
1067}
1068
1069#ifdef SETJMP_VIA_SAVE_AREA
1070/* Optimize RTL generated by allocate_dynamic_stack_space for targets
1071   where SETJMP_VIA_SAVE_AREA is true.  The problem is that on these
1072   platforms, the dynamic stack space used can corrupt the original
1073   frame, thus causing a crash if a longjmp unwinds to it.  */
1074
1075void
1076optimize_save_area_alloca (insns)
1077     rtx insns;
1078{
1079  rtx insn;
1080
1081  for (insn = insns; insn; insn = NEXT_INSN(insn))
1082    {
1083      rtx note;
1084
1085      if (GET_CODE (insn) != INSN)
1086	continue;
1087
1088      for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1089	{
1090	  if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1091	    continue;
1092
1093	  if (!current_function_calls_setjmp)
1094	    {
1095	      rtx pat = PATTERN (insn);
1096
1097	      /* If we do not see the note in a pattern matching
1098		 these precise characteristics, we did something
1099		 entirely wrong in allocate_dynamic_stack_space.
1100
1101		 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1102		 was defined on a machine where stacks grow towards higher
1103		 addresses.
1104
1105		 Right now only supported port with stack that grow upward
1106		 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA.  */
1107	      if (GET_CODE (pat) != SET
1108		  || SET_DEST (pat) != stack_pointer_rtx
1109		  || GET_CODE (SET_SRC (pat)) != MINUS
1110		  || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1111		abort ();
1112
1113	      /* This will now be transformed into a (set REG REG)
1114		 so we can just blow away all the other notes.  */
1115	      XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1116	      REG_NOTES (insn) = NULL_RTX;
1117	    }
1118	  else
1119	    {
1120	      /* setjmp was called, we must remove the REG_SAVE_AREA
1121		 note so that later passes do not get confused by its
1122		 presence.  */
1123	      if (note == REG_NOTES (insn))
1124		{
1125		  REG_NOTES (insn) = XEXP (note, 1);
1126		}
1127	      else
1128		{
1129		  rtx srch;
1130
1131		  for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1132		    if (XEXP (srch, 1) == note)
1133		      break;
1134
1135		  if (srch == NULL_RTX)
1136		    abort ();
1137
1138		  XEXP (srch, 1) = XEXP (note, 1);
1139		}
1140	    }
1141	  /* Once we've seen the note of interest, we need not look at
1142	     the rest of them.  */
1143	  break;
1144	}
1145    }
1146}
1147#endif /* SETJMP_VIA_SAVE_AREA */
1148
1149/* Return an rtx representing the address of an area of memory dynamically
1150   pushed on the stack.  This region of memory is always aligned to
1151   a multiple of BIGGEST_ALIGNMENT.
1152
1153   Any required stack pointer alignment is preserved.
1154
1155   SIZE is an rtx representing the size of the area.
1156   TARGET is a place in which the address can be placed.
1157
1158   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1159
1160rtx
1161allocate_dynamic_stack_space (size, target, known_align)
1162     rtx size;
1163     rtx target;
1164     int known_align;
1165{
1166#ifdef SETJMP_VIA_SAVE_AREA
1167  rtx setjmpless_size = NULL_RTX;
1168#endif
1169
1170  /* If we're asking for zero bytes, it doesn't matter what we point
1171     to since we can't dereference it.  But return a reasonable
1172     address anyway.  */
1173  if (size == const0_rtx)
1174    return virtual_stack_dynamic_rtx;
1175
1176  /* Otherwise, show we're calling alloca or equivalent.  */
1177  current_function_calls_alloca = 1;
1178
1179  /* Ensure the size is in the proper mode.  */
1180  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1181    size = convert_to_mode (Pmode, size, 1);
1182
1183  /* We can't attempt to minimize alignment necessary, because we don't
1184     know the final value of preferred_stack_boundary yet while executing
1185     this code.  */
1186  cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1187
1188  /* We will need to ensure that the address we return is aligned to
1189     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1190     always know its final value at this point in the compilation (it
1191     might depend on the size of the outgoing parameter lists, for
1192     example), so we must align the value to be returned in that case.
1193     (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1194     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1195     We must also do an alignment operation on the returned value if
1196     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1197
1198     If we have to align, we must leave space in SIZE for the hole
1199     that might result from the alignment operation.  */
1200
1201#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1202#define MUST_ALIGN 1
1203#else
1204#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1205#endif
1206
1207  if (MUST_ALIGN)
1208    size
1209      = force_operand (plus_constant (size,
1210				      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1211		       NULL_RTX);
1212
1213#ifdef SETJMP_VIA_SAVE_AREA
1214  /* If setjmp restores regs from a save area in the stack frame,
1215     avoid clobbering the reg save area.  Note that the offset of
1216     virtual_incoming_args_rtx includes the preallocated stack args space.
1217     It would be no problem to clobber that, but it's on the wrong side
1218     of the old save area.  */
1219  {
1220    rtx dynamic_offset
1221      = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1222		      stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1223
1224    if (!current_function_calls_setjmp)
1225      {
1226	int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1227
1228	/* See optimize_save_area_alloca to understand what is being
1229	   set up here.  */
1230
1231	/* ??? Code below assumes that the save area needs maximal
1232	   alignment.  This constraint may be too strong.  */
1233	if (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1234	  abort ();
1235
1236	if (GET_CODE (size) == CONST_INT)
1237	  {
1238	    HOST_WIDE_INT new = INTVAL (size) / align * align;
1239
1240	    if (INTVAL (size) != new)
1241	      setjmpless_size = GEN_INT (new);
1242	    else
1243	      setjmpless_size = size;
1244	  }
1245	else
1246	  {
1247	    /* Since we know overflow is not possible, we avoid using
1248	       CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1249	    setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1250					     GEN_INT (align), NULL_RTX, 1);
1251	    setjmpless_size = expand_mult (Pmode, setjmpless_size,
1252					   GEN_INT (align), NULL_RTX, 1);
1253	  }
1254	/* Our optimization works based upon being able to perform a simple
1255	   transformation of this RTL into a (set REG REG) so make sure things
1256	   did in fact end up in a REG.  */
1257	if (!register_operand (setjmpless_size, Pmode))
1258	  setjmpless_size = force_reg (Pmode, setjmpless_size);
1259      }
1260
1261    size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1262			 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1263  }
1264#endif /* SETJMP_VIA_SAVE_AREA */
1265
1266  /* Round the size to a multiple of the required stack alignment.
1267     Since the stack if presumed to be rounded before this allocation,
1268     this will maintain the required alignment.
1269
1270     If the stack grows downward, we could save an insn by subtracting
1271     SIZE from the stack pointer and then aligning the stack pointer.
1272     The problem with this is that the stack pointer may be unaligned
1273     between the execution of the subtraction and alignment insns and
1274     some machines do not allow this.  Even on those that do, some
1275     signal handlers malfunction if a signal should occur between those
1276     insns.  Since this is an extremely rare event, we have no reliable
1277     way of knowing which systems have this problem.  So we avoid even
1278     momentarily mis-aligning the stack.  */
1279
1280  /* If we added a variable amount to SIZE,
1281     we can no longer assume it is aligned.  */
1282#if !defined (SETJMP_VIA_SAVE_AREA)
1283  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1284#endif
1285    size = round_push (size);
1286
1287  do_pending_stack_adjust ();
1288
1289 /* We ought to be called always on the toplevel and stack ought to be aligned
1290    properly.  */
1291  if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1292    abort ();
1293
1294  /* If needed, check that we have the required amount of stack.  Take into
1295     account what has already been checked.  */
1296  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1297    probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1298
1299  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1300  if (target == 0 || GET_CODE (target) != REG
1301      || REGNO (target) < FIRST_PSEUDO_REGISTER
1302      || GET_MODE (target) != Pmode)
1303    target = gen_reg_rtx (Pmode);
1304
1305  mark_reg_pointer (target, known_align);
1306
1307  /* Perform the required allocation from the stack.  Some systems do
1308     this differently than simply incrementing/decrementing from the
1309     stack pointer, such as acquiring the space by calling malloc().  */
1310#ifdef HAVE_allocate_stack
1311  if (HAVE_allocate_stack)
1312    {
1313      enum machine_mode mode = STACK_SIZE_MODE;
1314      insn_operand_predicate_fn pred;
1315
1316      /* We don't have to check against the predicate for operand 0 since
1317	 TARGET is known to be a pseudo of the proper mode, which must
1318	 be valid for the operand.  For operand 1, convert to the
1319	 proper mode and validate.  */
1320      if (mode == VOIDmode)
1321	mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1322
1323      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1324      if (pred && ! ((*pred) (size, mode)))
1325	size = copy_to_mode_reg (mode, size);
1326
1327      emit_insn (gen_allocate_stack (target, size));
1328    }
1329  else
1330#endif
1331    {
1332#ifndef STACK_GROWS_DOWNWARD
1333      emit_move_insn (target, virtual_stack_dynamic_rtx);
1334#endif
1335
1336      /* Check stack bounds if necessary.  */
1337      if (current_function_limit_stack)
1338	{
1339	  rtx available;
1340	  rtx space_available = gen_label_rtx ();
1341#ifdef STACK_GROWS_DOWNWARD
1342	  available = expand_binop (Pmode, sub_optab,
1343				    stack_pointer_rtx, stack_limit_rtx,
1344				    NULL_RTX, 1, OPTAB_WIDEN);
1345#else
1346	  available = expand_binop (Pmode, sub_optab,
1347				    stack_limit_rtx, stack_pointer_rtx,
1348				    NULL_RTX, 1, OPTAB_WIDEN);
1349#endif
1350	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1351				   space_available);
1352#ifdef HAVE_trap
1353	  if (HAVE_trap)
1354	    emit_insn (gen_trap ());
1355	  else
1356#endif
1357	    error ("stack limits not supported on this target");
1358	  emit_barrier ();
1359	  emit_label (space_available);
1360	}
1361
1362      anti_adjust_stack (size);
1363#ifdef SETJMP_VIA_SAVE_AREA
1364      if (setjmpless_size != NULL_RTX)
1365	{
1366 	  rtx note_target = get_last_insn ();
1367
1368	  REG_NOTES (note_target)
1369	    = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1370				 REG_NOTES (note_target));
1371	}
1372#endif /* SETJMP_VIA_SAVE_AREA */
1373
1374#ifdef STACK_GROWS_DOWNWARD
1375  emit_move_insn (target, virtual_stack_dynamic_rtx);
1376#endif
1377    }
1378
1379  if (MUST_ALIGN)
1380    {
1381      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1382	 but we know it can't.  So add ourselves and then do
1383	 TRUNC_DIV_EXPR.  */
1384      target = expand_binop (Pmode, add_optab, target,
1385			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1386			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1387      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1388			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1389			      NULL_RTX, 1);
1390      target = expand_mult (Pmode, target,
1391			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1392			    NULL_RTX, 1);
1393    }
1394
1395  /* Some systems require a particular insn to refer to the stack
1396     to make the pages exist.  */
1397#ifdef HAVE_probe
1398  if (HAVE_probe)
1399    emit_insn (gen_probe ());
1400#endif
1401
1402  /* Record the new stack level for nonlocal gotos.  */
1403  if (nonlocal_goto_handler_slots != 0)
1404    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1405
1406  return target;
1407}
1408
1409/* A front end may want to override GCC's stack checking by providing a
1410   run-time routine to call to check the stack, so provide a mechanism for
1411   calling that routine.  */
1412
1413static rtx stack_check_libfunc;
1414
1415void
1416set_stack_check_libfunc (libfunc)
1417     rtx libfunc;
1418{
1419  stack_check_libfunc = libfunc;
1420  ggc_add_rtx_root (&stack_check_libfunc, 1);
1421}
1422
1423/* Emit one stack probe at ADDRESS, an address within the stack.  */
1424
1425static void
1426emit_stack_probe (address)
1427     rtx address;
1428{
1429  rtx memref = gen_rtx_MEM (word_mode, address);
1430
1431  MEM_VOLATILE_P (memref) = 1;
1432
1433  if (STACK_CHECK_PROBE_LOAD)
1434    emit_move_insn (gen_reg_rtx (word_mode), memref);
1435  else
1436    emit_move_insn (memref, const0_rtx);
1437}
1438
1439/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1440   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
1441   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
1442   subtract from the stack.  If SIZE is constant, this is done
1443   with a fixed number of probes.  Otherwise, we must make a loop.  */
1444
1445#ifdef STACK_GROWS_DOWNWARD
1446#define STACK_GROW_OP MINUS
1447#else
1448#define STACK_GROW_OP PLUS
1449#endif
1450
1451void
1452probe_stack_range (first, size)
1453     HOST_WIDE_INT first;
1454     rtx size;
1455{
1456  /* First ensure SIZE is Pmode.  */
1457  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1458    size = convert_to_mode (Pmode, size, 1);
1459
1460  /* Next see if the front end has set up a function for us to call to
1461     check the stack.  */
1462  if (stack_check_libfunc != 0)
1463    {
1464      rtx addr = memory_address (QImode,
1465				 gen_rtx (STACK_GROW_OP, Pmode,
1466					  stack_pointer_rtx,
1467					  plus_constant (size, first)));
1468
1469#ifdef POINTERS_EXTEND_UNSIGNED
1470      if (GET_MODE (addr) != ptr_mode)
1471	addr = convert_memory_address (ptr_mode, addr);
1472#endif
1473
1474      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1475			 ptr_mode);
1476    }
1477
1478  /* Next see if we have an insn to check the stack.  Use it if so.  */
1479#ifdef HAVE_check_stack
1480  else if (HAVE_check_stack)
1481    {
1482      insn_operand_predicate_fn pred;
1483      rtx last_addr
1484	= force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1485						stack_pointer_rtx,
1486						plus_constant (size, first)),
1487			 NULL_RTX);
1488
1489      pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1490      if (pred && ! ((*pred) (last_addr, Pmode)))
1491	last_addr = copy_to_mode_reg (Pmode, last_addr);
1492
1493      emit_insn (gen_check_stack (last_addr));
1494    }
1495#endif
1496
1497  /* If we have to generate explicit probes, see if we have a constant
1498     small number of them to generate.  If so, that's the easy case.  */
1499  else if (GET_CODE (size) == CONST_INT
1500	   && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1501    {
1502      HOST_WIDE_INT offset;
1503
1504      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1505	 for values of N from 1 until it exceeds LAST.  If only one
1506	 probe is needed, this will not generate any code.  Then probe
1507	 at LAST.  */
1508      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1509	   offset < INTVAL (size);
1510	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1511	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1512					  stack_pointer_rtx,
1513					  GEN_INT (offset)));
1514
1515      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1516					stack_pointer_rtx,
1517					plus_constant (size, first)));
1518    }
1519
1520  /* In the variable case, do the same as above, but in a loop.  We emit loop
1521     notes so that loop optimization can be done.  */
1522  else
1523    {
1524      rtx test_addr
1525	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1526					 stack_pointer_rtx,
1527					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1528			 NULL_RTX);
1529      rtx last_addr
1530	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1531					 stack_pointer_rtx,
1532					 plus_constant (size, first)),
1533			 NULL_RTX);
1534      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1535      rtx loop_lab = gen_label_rtx ();
1536      rtx test_lab = gen_label_rtx ();
1537      rtx end_lab = gen_label_rtx ();
1538      rtx temp;
1539
1540      if (GET_CODE (test_addr) != REG
1541	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1542	test_addr = force_reg (Pmode, test_addr);
1543
1544      emit_note (NULL, NOTE_INSN_LOOP_BEG);
1545      emit_jump (test_lab);
1546
1547      emit_label (loop_lab);
1548      emit_stack_probe (test_addr);
1549
1550      emit_note (NULL, NOTE_INSN_LOOP_CONT);
1551
1552#ifdef STACK_GROWS_DOWNWARD
1553#define CMP_OPCODE GTU
1554      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1555			   1, OPTAB_WIDEN);
1556#else
1557#define CMP_OPCODE LTU
1558      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1559			   1, OPTAB_WIDEN);
1560#endif
1561
1562      if (temp != test_addr)
1563	abort ();
1564
1565      emit_label (test_lab);
1566      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1567			       NULL_RTX, Pmode, 1, loop_lab);
1568      emit_jump (end_lab);
1569      emit_note (NULL, NOTE_INSN_LOOP_END);
1570      emit_label (end_lab);
1571
1572      emit_stack_probe (last_addr);
1573    }
1574}
1575
1576/* Return an rtx representing the register or memory location
1577   in which a scalar value of data type VALTYPE
1578   was returned by a function call to function FUNC.
1579   FUNC is a FUNCTION_DECL node if the precise function is known,
1580   otherwise 0.
1581   OUTGOING is 1 if on a machine with register windows this function
1582   should return the register in which the function will put its result
1583   and 0 otherwise.  */
1584
1585rtx
1586hard_function_value (valtype, func, outgoing)
1587     tree valtype;
1588     tree func ATTRIBUTE_UNUSED;
1589     int outgoing ATTRIBUTE_UNUSED;
1590{
1591  rtx val;
1592
1593#ifdef FUNCTION_OUTGOING_VALUE
1594  if (outgoing)
1595    val = FUNCTION_OUTGOING_VALUE (valtype, func);
1596  else
1597#endif
1598    val = FUNCTION_VALUE (valtype, func);
1599
1600  if (GET_CODE (val) == REG
1601      && GET_MODE (val) == BLKmode)
1602    {
1603      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1604      enum machine_mode tmpmode;
1605
1606      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607           tmpmode != VOIDmode;
1608           tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1609        {
1610          /* Have we found a large enough mode?  */
1611          if (GET_MODE_SIZE (tmpmode) >= bytes)
1612            break;
1613        }
1614
1615      /* No suitable mode found.  */
1616      if (tmpmode == VOIDmode)
1617        abort ();
1618
1619      PUT_MODE (val, tmpmode);
1620    }
1621  return val;
1622}
1623
1624/* Return an rtx representing the register or memory location
1625   in which a scalar value of mode MODE was returned by a library call.  */
1626
1627rtx
1628hard_libcall_value (mode)
1629     enum machine_mode mode;
1630{
1631  return LIBCALL_VALUE (mode);
1632}
1633
1634/* Look up the tree code for a given rtx code
1635   to provide the arithmetic operation for REAL_ARITHMETIC.
1636   The function returns an int because the caller may not know
1637   what `enum tree_code' means.  */
1638
1639int
1640rtx_to_tree_code (code)
1641     enum rtx_code code;
1642{
1643  enum tree_code tcode;
1644
1645  switch (code)
1646    {
1647    case PLUS:
1648      tcode = PLUS_EXPR;
1649      break;
1650    case MINUS:
1651      tcode = MINUS_EXPR;
1652      break;
1653    case MULT:
1654      tcode = MULT_EXPR;
1655      break;
1656    case DIV:
1657      tcode = RDIV_EXPR;
1658      break;
1659    case SMIN:
1660      tcode = MIN_EXPR;
1661      break;
1662    case SMAX:
1663      tcode = MAX_EXPR;
1664      break;
1665    default:
1666      tcode = LAST_AND_UNUSED_TREE_CODE;
1667      break;
1668    }
1669  return ((int) tcode);
1670}
1671