explow.c revision 169689
1/* Subroutines for manipulating rtx's in semantically interesting ways.
2   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "toplev.h"
29#include "rtl.h"
30#include "tree.h"
31#include "tm_p.h"
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "optabs.h"
36#include "hard-reg-set.h"
37#include "insn-config.h"
38#include "ggc.h"
39#include "recog.h"
40#include "langhooks.h"
41#include "target.h"
42#include "output.h"
43
44static rtx break_out_memory_refs (rtx);
45static void emit_stack_probe (rtx);
46
47
48/* Truncate and perhaps sign-extend C as appropriate for MODE.  */
49
50HOST_WIDE_INT
51trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
52{
53  int width = GET_MODE_BITSIZE (mode);
54
55  /* You want to truncate to a _what_?  */
56  gcc_assert (SCALAR_INT_MODE_P (mode));
57
58  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
59  if (mode == BImode)
60    return c & 1 ? STORE_FLAG_VALUE : 0;
61
62  /* Sign-extend for the requested mode.  */
63
64  if (width < HOST_BITS_PER_WIDE_INT)
65    {
66      HOST_WIDE_INT sign = 1;
67      sign <<= width - 1;
68      c &= (sign << 1) - 1;
69      c ^= sign;
70      c -= sign;
71    }
72
73  return c;
74}
75
76/* Return an rtx for the sum of X and the integer C.  */
77
78rtx
79plus_constant (rtx x, HOST_WIDE_INT c)
80{
81  RTX_CODE code;
82  rtx y;
83  enum machine_mode mode;
84  rtx tem;
85  int all_constant = 0;
86
87  if (c == 0)
88    return x;
89
90 restart:
91
92  code = GET_CODE (x);
93  mode = GET_MODE (x);
94  y = x;
95
96  switch (code)
97    {
98    case CONST_INT:
99      return GEN_INT (INTVAL (x) + c);
100
101    case CONST_DOUBLE:
102      {
103	unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
104	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
105	unsigned HOST_WIDE_INT l2 = c;
106	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
107	unsigned HOST_WIDE_INT lv;
108	HOST_WIDE_INT hv;
109
110	add_double (l1, h1, l2, h2, &lv, &hv);
111
112	return immed_double_const (lv, hv, VOIDmode);
113      }
114
115    case MEM:
116      /* If this is a reference to the constant pool, try replacing it with
117	 a reference to a new constant.  If the resulting address isn't
118	 valid, don't return it because we have no way to validize it.  */
119      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
120	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
121	{
122	  tem
123	    = force_const_mem (GET_MODE (x),
124			       plus_constant (get_pool_constant (XEXP (x, 0)),
125					      c));
126	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
127	    return tem;
128	}
129      break;
130
131    case CONST:
132      /* If adding to something entirely constant, set a flag
133	 so that we can add a CONST around the result.  */
134      x = XEXP (x, 0);
135      all_constant = 1;
136      goto restart;
137
138    case SYMBOL_REF:
139    case LABEL_REF:
140      all_constant = 1;
141      break;
142
143    case PLUS:
144      /* The interesting case is adding the integer to a sum.
145	 Look for constant term in the sum and combine
146	 with C.  For an integer constant term, we make a combined
147	 integer.  For a constant term that is not an explicit integer,
148	 we cannot really combine, but group them together anyway.
149
150	 Restart or use a recursive call in case the remaining operand is
151	 something that we handle specially, such as a SYMBOL_REF.
152
153	 We may not immediately return from the recursive call here, lest
154	 all_constant gets lost.  */
155
156      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
157	{
158	  c += INTVAL (XEXP (x, 1));
159
160	  if (GET_MODE (x) != VOIDmode)
161	    c = trunc_int_for_mode (c, GET_MODE (x));
162
163	  x = XEXP (x, 0);
164	  goto restart;
165	}
166      else if (CONSTANT_P (XEXP (x, 1)))
167	{
168	  x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
169	  c = 0;
170	}
171      else if (find_constant_term_loc (&y))
172	{
173	  /* We need to be careful since X may be shared and we can't
174	     modify it in place.  */
175	  rtx copy = copy_rtx (x);
176	  rtx *const_loc = find_constant_term_loc (&copy);
177
178	  *const_loc = plus_constant (*const_loc, c);
179	  x = copy;
180	  c = 0;
181	}
182      break;
183
184    default:
185      break;
186    }
187
188  if (c != 0)
189    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
190
191  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
192    return x;
193  else if (all_constant)
194    return gen_rtx_CONST (mode, x);
195  else
196    return x;
197}
198
199/* If X is a sum, return a new sum like X but lacking any constant terms.
200   Add all the removed constant terms into *CONSTPTR.
201   X itself is not altered.  The result != X if and only if
202   it is not isomorphic to X.  */
203
204rtx
205eliminate_constant_term (rtx x, rtx *constptr)
206{
207  rtx x0, x1;
208  rtx tem;
209
210  if (GET_CODE (x) != PLUS)
211    return x;
212
213  /* First handle constants appearing at this level explicitly.  */
214  if (GET_CODE (XEXP (x, 1)) == CONST_INT
215      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
216						XEXP (x, 1)))
217      && GET_CODE (tem) == CONST_INT)
218    {
219      *constptr = tem;
220      return eliminate_constant_term (XEXP (x, 0), constptr);
221    }
222
223  tem = const0_rtx;
224  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
225  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
226  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
227      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
228						*constptr, tem))
229      && GET_CODE (tem) == CONST_INT)
230    {
231      *constptr = tem;
232      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
233    }
234
235  return x;
236}
237
238/* Return an rtx for the size in bytes of the value of EXP.  */
239
240rtx
241expr_size (tree exp)
242{
243  tree size;
244
245  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
246    size = TREE_OPERAND (exp, 1);
247  else
248    size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lang_hooks.expr_size (exp), exp);
249
250  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
251}
252
253/* Return a wide integer for the size in bytes of the value of EXP, or -1
254   if the size can vary or is larger than an integer.  */
255
256HOST_WIDE_INT
257int_expr_size (tree exp)
258{
259  tree size;
260
261  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
262    size = TREE_OPERAND (exp, 1);
263  else
264    size = lang_hooks.expr_size (exp);
265
266  if (size == 0 || !host_integerp (size, 0))
267    return -1;
268
269  return tree_low_cst (size, 0);
270}
271
272/* Return a copy of X in which all memory references
273   and all constants that involve symbol refs
274   have been replaced with new temporary registers.
275   Also emit code to load the memory locations and constants
276   into those registers.
277
278   If X contains no such constants or memory references,
279   X itself (not a copy) is returned.
280
281   If a constant is found in the address that is not a legitimate constant
282   in an insn, it is left alone in the hope that it might be valid in the
283   address.
284
285   X may contain no arithmetic except addition, subtraction and multiplication.
286   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
287
288static rtx
289break_out_memory_refs (rtx x)
290{
291  if (MEM_P (x)
292      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
293	  && GET_MODE (x) != VOIDmode))
294    x = force_reg (GET_MODE (x), x);
295  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
296	   || GET_CODE (x) == MULT)
297    {
298      rtx op0 = break_out_memory_refs (XEXP (x, 0));
299      rtx op1 = break_out_memory_refs (XEXP (x, 1));
300
301      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
302	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
303    }
304
305  return x;
306}
307
308/* Given X, a memory address in ptr_mode, convert it to an address
309   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
310   the fact that pointers are not allowed to overflow by commuting arithmetic
311   operations over conversions so that address arithmetic insns can be
312   used.  */
313
314rtx
315convert_memory_address (enum machine_mode to_mode ATTRIBUTE_UNUSED,
316			rtx x)
317{
318#ifndef POINTERS_EXTEND_UNSIGNED
319  gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
320  return x;
321#else /* defined(POINTERS_EXTEND_UNSIGNED) */
322  enum machine_mode from_mode;
323  rtx temp;
324  enum rtx_code code;
325
326  /* If X already has the right mode, just return it.  */
327  if (GET_MODE (x) == to_mode)
328    return x;
329
330  from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
331
332  /* Here we handle some special cases.  If none of them apply, fall through
333     to the default case.  */
334  switch (GET_CODE (x))
335    {
336    case CONST_INT:
337    case CONST_DOUBLE:
338      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
339	code = TRUNCATE;
340      else if (POINTERS_EXTEND_UNSIGNED < 0)
341	break;
342      else if (POINTERS_EXTEND_UNSIGNED > 0)
343	code = ZERO_EXTEND;
344      else
345	code = SIGN_EXTEND;
346      temp = simplify_unary_operation (code, to_mode, x, from_mode);
347      if (temp)
348	return temp;
349      break;
350
351    case SUBREG:
352      if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
353	  && GET_MODE (SUBREG_REG (x)) == to_mode)
354	return SUBREG_REG (x);
355      break;
356
357    case LABEL_REF:
358      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
359      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
360      return temp;
361      break;
362
363    case SYMBOL_REF:
364      temp = shallow_copy_rtx (x);
365      PUT_MODE (temp, to_mode);
366      return temp;
367      break;
368
369    case CONST:
370      return gen_rtx_CONST (to_mode,
371			    convert_memory_address (to_mode, XEXP (x, 0)));
372      break;
373
374    case PLUS:
375    case MULT:
376      /* For addition we can safely permute the conversion and addition
377	 operation if one operand is a constant and converting the constant
378	 does not change it.  We can always safely permute them if we are
379	 making the address narrower.  */
380      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
381	  || (GET_CODE (x) == PLUS
382	      && GET_CODE (XEXP (x, 1)) == CONST_INT
383	      && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
384	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
385			       convert_memory_address (to_mode, XEXP (x, 0)),
386			       XEXP (x, 1));
387      break;
388
389    default:
390      break;
391    }
392
393  return convert_modes (to_mode, from_mode,
394			x, POINTERS_EXTEND_UNSIGNED);
395#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
396}
397
398/* Return something equivalent to X but valid as a memory address
399   for something of mode MODE.  When X is not itself valid, this
400   works by copying X or subexpressions of it into registers.  */
401
402rtx
403memory_address (enum machine_mode mode, rtx x)
404{
405  rtx oldx = x;
406
407  x = convert_memory_address (Pmode, x);
408
409  /* By passing constant addresses through registers
410     we get a chance to cse them.  */
411  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
412    x = force_reg (Pmode, x);
413
414  /* We get better cse by rejecting indirect addressing at this stage.
415     Let the combiner create indirect addresses where appropriate.
416     For now, generate the code so that the subexpressions useful to share
417     are visible.  But not if cse won't be done!  */
418  else
419    {
420      if (! cse_not_expected && !REG_P (x))
421	x = break_out_memory_refs (x);
422
423      /* At this point, any valid address is accepted.  */
424      if (memory_address_p (mode, x))
425	goto win;
426
427      /* If it was valid before but breaking out memory refs invalidated it,
428	 use it the old way.  */
429      if (memory_address_p (mode, oldx))
430	goto win2;
431
432      /* Perform machine-dependent transformations on X
433	 in certain cases.  This is not necessary since the code
434	 below can handle all possible cases, but machine-dependent
435	 transformations can make better code.  */
436      LEGITIMIZE_ADDRESS (x, oldx, mode, win);
437
438      /* PLUS and MULT can appear in special ways
439	 as the result of attempts to make an address usable for indexing.
440	 Usually they are dealt with by calling force_operand, below.
441	 But a sum containing constant terms is special
442	 if removing them makes the sum a valid address:
443	 then we generate that address in a register
444	 and index off of it.  We do this because it often makes
445	 shorter code, and because the addresses thus generated
446	 in registers often become common subexpressions.  */
447      if (GET_CODE (x) == PLUS)
448	{
449	  rtx constant_term = const0_rtx;
450	  rtx y = eliminate_constant_term (x, &constant_term);
451	  if (constant_term == const0_rtx
452	      || ! memory_address_p (mode, y))
453	    x = force_operand (x, NULL_RTX);
454	  else
455	    {
456	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
457	      if (! memory_address_p (mode, y))
458		x = force_operand (x, NULL_RTX);
459	      else
460		x = y;
461	    }
462	}
463
464      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
465	x = force_operand (x, NULL_RTX);
466
467      /* If we have a register that's an invalid address,
468	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
469      else if (REG_P (x))
470	x = copy_to_reg (x);
471
472      /* Last resort: copy the value to a register, since
473	 the register is a valid address.  */
474      else
475	x = force_reg (Pmode, x);
476
477      goto done;
478
479    win2:
480      x = oldx;
481    win:
482      if (flag_force_addr && ! cse_not_expected && !REG_P (x))
483	{
484	  x = force_operand (x, NULL_RTX);
485	  x = force_reg (Pmode, x);
486	}
487    }
488
489 done:
490
491  /* If we didn't change the address, we are done.  Otherwise, mark
492     a reg as a pointer if we have REG or REG + CONST_INT.  */
493  if (oldx == x)
494    return x;
495  else if (REG_P (x))
496    mark_reg_pointer (x, BITS_PER_UNIT);
497  else if (GET_CODE (x) == PLUS
498	   && REG_P (XEXP (x, 0))
499	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
500    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
501
502  /* OLDX may have been the address on a temporary.  Update the address
503     to indicate that X is now used.  */
504  update_temp_slot_address (oldx, x);
505
506  return x;
507}
508
509/* Like `memory_address' but pretend `flag_force_addr' is 0.  */
510
511rtx
512memory_address_noforce (enum machine_mode mode, rtx x)
513{
514  int ambient_force_addr = flag_force_addr;
515  rtx val;
516
517  flag_force_addr = 0;
518  val = memory_address (mode, x);
519  flag_force_addr = ambient_force_addr;
520  return val;
521}
522
523/* Convert a mem ref into one with a valid memory address.
524   Pass through anything else unchanged.  */
525
526rtx
527validize_mem (rtx ref)
528{
529  if (!MEM_P (ref))
530    return ref;
531  ref = use_anchored_address (ref);
532  if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
533      && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
534    return ref;
535
536  /* Don't alter REF itself, since that is probably a stack slot.  */
537  return replace_equiv_address (ref, XEXP (ref, 0));
538}
539
540/* If X is a memory reference to a member of an object block, try rewriting
541   it to use an anchor instead.  Return the new memory reference on success
542   and the old one on failure.  */
543
544rtx
545use_anchored_address (rtx x)
546{
547  rtx base;
548  HOST_WIDE_INT offset;
549
550  if (!flag_section_anchors)
551    return x;
552
553  if (!MEM_P (x))
554    return x;
555
556  /* Split the address into a base and offset.  */
557  base = XEXP (x, 0);
558  offset = 0;
559  if (GET_CODE (base) == CONST
560      && GET_CODE (XEXP (base, 0)) == PLUS
561      && GET_CODE (XEXP (XEXP (base, 0), 1)) == CONST_INT)
562    {
563      offset += INTVAL (XEXP (XEXP (base, 0), 1));
564      base = XEXP (XEXP (base, 0), 0);
565    }
566
567  /* Check whether BASE is suitable for anchors.  */
568  if (GET_CODE (base) != SYMBOL_REF
569      || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
570      || SYMBOL_REF_ANCHOR_P (base)
571      || SYMBOL_REF_BLOCK (base) == NULL
572      || !targetm.use_anchors_for_symbol_p (base))
573    return x;
574
575  /* Decide where BASE is going to be.  */
576  place_block_symbol (base);
577
578  /* Get the anchor we need to use.  */
579  offset += SYMBOL_REF_BLOCK_OFFSET (base);
580  base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
581			     SYMBOL_REF_TLS_MODEL (base));
582
583  /* Work out the offset from the anchor.  */
584  offset -= SYMBOL_REF_BLOCK_OFFSET (base);
585
586  /* If we're going to run a CSE pass, force the anchor into a register.
587     We will then be able to reuse registers for several accesses, if the
588     target costs say that that's worthwhile.  */
589  if (!cse_not_expected)
590    base = force_reg (GET_MODE (base), base);
591
592  return replace_equiv_address (x, plus_constant (base, offset));
593}
594
595/* Copy the value or contents of X to a new temp reg and return that reg.  */
596
597rtx
598copy_to_reg (rtx x)
599{
600  rtx temp = gen_reg_rtx (GET_MODE (x));
601
602  /* If not an operand, must be an address with PLUS and MULT so
603     do the computation.  */
604  if (! general_operand (x, VOIDmode))
605    x = force_operand (x, temp);
606
607  if (x != temp)
608    emit_move_insn (temp, x);
609
610  return temp;
611}
612
613/* Like copy_to_reg but always give the new register mode Pmode
614   in case X is a constant.  */
615
616rtx
617copy_addr_to_reg (rtx x)
618{
619  return copy_to_mode_reg (Pmode, x);
620}
621
622/* Like copy_to_reg but always give the new register mode MODE
623   in case X is a constant.  */
624
625rtx
626copy_to_mode_reg (enum machine_mode mode, rtx x)
627{
628  rtx temp = gen_reg_rtx (mode);
629
630  /* If not an operand, must be an address with PLUS and MULT so
631     do the computation.  */
632  if (! general_operand (x, VOIDmode))
633    x = force_operand (x, temp);
634
635  gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
636  if (x != temp)
637    emit_move_insn (temp, x);
638  return temp;
639}
640
641/* Load X into a register if it is not already one.
642   Use mode MODE for the register.
643   X should be valid for mode MODE, but it may be a constant which
644   is valid for all integer modes; that's why caller must specify MODE.
645
646   The caller must not alter the value in the register we return,
647   since we mark it as a "constant" register.  */
648
649rtx
650force_reg (enum machine_mode mode, rtx x)
651{
652  rtx temp, insn, set;
653
654  if (REG_P (x))
655    return x;
656
657  if (general_operand (x, mode))
658    {
659      temp = gen_reg_rtx (mode);
660      insn = emit_move_insn (temp, x);
661    }
662  else
663    {
664      temp = force_operand (x, NULL_RTX);
665      if (REG_P (temp))
666	insn = get_last_insn ();
667      else
668	{
669	  rtx temp2 = gen_reg_rtx (mode);
670	  insn = emit_move_insn (temp2, temp);
671	  temp = temp2;
672	}
673    }
674
675  /* Let optimizers know that TEMP's value never changes
676     and that X can be substituted for it.  Don't get confused
677     if INSN set something else (such as a SUBREG of TEMP).  */
678  if (CONSTANT_P (x)
679      && (set = single_set (insn)) != 0
680      && SET_DEST (set) == temp
681      && ! rtx_equal_p (x, SET_SRC (set)))
682    set_unique_reg_note (insn, REG_EQUAL, x);
683
684  /* Let optimizers know that TEMP is a pointer, and if so, the
685     known alignment of that pointer.  */
686  {
687    unsigned align = 0;
688    if (GET_CODE (x) == SYMBOL_REF)
689      {
690        align = BITS_PER_UNIT;
691	if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
692	  align = DECL_ALIGN (SYMBOL_REF_DECL (x));
693      }
694    else if (GET_CODE (x) == LABEL_REF)
695      align = BITS_PER_UNIT;
696    else if (GET_CODE (x) == CONST
697	     && GET_CODE (XEXP (x, 0)) == PLUS
698	     && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
699	     && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
700      {
701	rtx s = XEXP (XEXP (x, 0), 0);
702	rtx c = XEXP (XEXP (x, 0), 1);
703	unsigned sa, ca;
704
705	sa = BITS_PER_UNIT;
706	if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
707	  sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
708
709	ca = exact_log2 (INTVAL (c) & -INTVAL (c)) * BITS_PER_UNIT;
710
711	align = MIN (sa, ca);
712      }
713    else if (MEM_P (x) && MEM_POINTER (x))
714      align = MEM_ALIGN (x);
715
716    if (align)
717      mark_reg_pointer (temp, align);
718  }
719
720  return temp;
721}
722
723/* If X is a memory ref, copy its contents to a new temp reg and return
724   that reg.  Otherwise, return X.  */
725
726rtx
727force_not_mem (rtx x)
728{
729  rtx temp;
730
731  if (!MEM_P (x) || GET_MODE (x) == BLKmode)
732    return x;
733
734  temp = gen_reg_rtx (GET_MODE (x));
735
736  if (MEM_POINTER (x))
737    REG_POINTER (temp) = 1;
738
739  emit_move_insn (temp, x);
740  return temp;
741}
742
743/* Copy X to TARGET (if it's nonzero and a reg)
744   or to a new temp reg and return that reg.
745   MODE is the mode to use for X in case it is a constant.  */
746
747rtx
748copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
749{
750  rtx temp;
751
752  if (target && REG_P (target))
753    temp = target;
754  else
755    temp = gen_reg_rtx (mode);
756
757  emit_move_insn (temp, x);
758  return temp;
759}
760
761/* Return the mode to use to store a scalar of TYPE and MODE.
762   PUNSIGNEDP points to the signedness of the type and may be adjusted
763   to show what signedness to use on extension operations.
764
765   FOR_CALL is nonzero if this call is promoting args for a call.  */
766
767#if defined(PROMOTE_MODE) && !defined(PROMOTE_FUNCTION_MODE)
768#define PROMOTE_FUNCTION_MODE PROMOTE_MODE
769#endif
770
771enum machine_mode
772promote_mode (tree type, enum machine_mode mode, int *punsignedp,
773	      int for_call ATTRIBUTE_UNUSED)
774{
775  enum tree_code code = TREE_CODE (type);
776  int unsignedp = *punsignedp;
777
778#ifndef PROMOTE_MODE
779  if (! for_call)
780    return mode;
781#endif
782
783  switch (code)
784    {
785#ifdef PROMOTE_FUNCTION_MODE
786    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
787    case REAL_TYPE:      case OFFSET_TYPE:
788#ifdef PROMOTE_MODE
789      if (for_call)
790	{
791#endif
792	  PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
793#ifdef PROMOTE_MODE
794	}
795      else
796	{
797	  PROMOTE_MODE (mode, unsignedp, type);
798	}
799#endif
800      break;
801#endif
802
803#ifdef POINTERS_EXTEND_UNSIGNED
804    case REFERENCE_TYPE:
805    case POINTER_TYPE:
806      mode = Pmode;
807      unsignedp = POINTERS_EXTEND_UNSIGNED;
808      break;
809#endif
810
811    default:
812      break;
813    }
814
815  *punsignedp = unsignedp;
816  return mode;
817}
818
819/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
820   This pops when ADJUST is positive.  ADJUST need not be constant.  */
821
822void
823adjust_stack (rtx adjust)
824{
825  rtx temp;
826
827  if (adjust == const0_rtx)
828    return;
829
830  /* We expect all variable sized adjustments to be multiple of
831     PREFERRED_STACK_BOUNDARY.  */
832  if (GET_CODE (adjust) == CONST_INT)
833    stack_pointer_delta -= INTVAL (adjust);
834
835  temp = expand_binop (Pmode,
836#ifdef STACK_GROWS_DOWNWARD
837		       add_optab,
838#else
839		       sub_optab,
840#endif
841		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
842		       OPTAB_LIB_WIDEN);
843
844  if (temp != stack_pointer_rtx)
845    emit_move_insn (stack_pointer_rtx, temp);
846}
847
848/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
849   This pushes when ADJUST is positive.  ADJUST need not be constant.  */
850
851void
852anti_adjust_stack (rtx adjust)
853{
854  rtx temp;
855
856  if (adjust == const0_rtx)
857    return;
858
859  /* We expect all variable sized adjustments to be multiple of
860     PREFERRED_STACK_BOUNDARY.  */
861  if (GET_CODE (adjust) == CONST_INT)
862    stack_pointer_delta += INTVAL (adjust);
863
864  temp = expand_binop (Pmode,
865#ifdef STACK_GROWS_DOWNWARD
866		       sub_optab,
867#else
868		       add_optab,
869#endif
870		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
871		       OPTAB_LIB_WIDEN);
872
873  if (temp != stack_pointer_rtx)
874    emit_move_insn (stack_pointer_rtx, temp);
875}
876
877/* Round the size of a block to be pushed up to the boundary required
878   by this machine.  SIZE is the desired size, which need not be constant.  */
879
880static rtx
881round_push (rtx size)
882{
883  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
884
885  if (align == 1)
886    return size;
887
888  if (GET_CODE (size) == CONST_INT)
889    {
890      HOST_WIDE_INT new = (INTVAL (size) + align - 1) / align * align;
891
892      if (INTVAL (size) != new)
893	size = GEN_INT (new);
894    }
895  else
896    {
897      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
898	 but we know it can't.  So add ourselves and then do
899	 TRUNC_DIV_EXPR.  */
900      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
901			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
902      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
903			    NULL_RTX, 1);
904      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
905    }
906
907  return size;
908}
909
910/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
911   to a previously-created save area.  If no save area has been allocated,
912   this function will allocate one.  If a save area is specified, it
913   must be of the proper mode.
914
915   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
916   are emitted at the current position.  */
917
918void
919emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
920{
921  rtx sa = *psave;
922  /* The default is that we use a move insn and save in a Pmode object.  */
923  rtx (*fcn) (rtx, rtx) = gen_move_insn;
924  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
925
926  /* See if this machine has anything special to do for this kind of save.  */
927  switch (save_level)
928    {
929#ifdef HAVE_save_stack_block
930    case SAVE_BLOCK:
931      if (HAVE_save_stack_block)
932	fcn = gen_save_stack_block;
933      break;
934#endif
935#ifdef HAVE_save_stack_function
936    case SAVE_FUNCTION:
937      if (HAVE_save_stack_function)
938	fcn = gen_save_stack_function;
939      break;
940#endif
941#ifdef HAVE_save_stack_nonlocal
942    case SAVE_NONLOCAL:
943      if (HAVE_save_stack_nonlocal)
944	fcn = gen_save_stack_nonlocal;
945      break;
946#endif
947    default:
948      break;
949    }
950
951  /* If there is no save area and we have to allocate one, do so.  Otherwise
952     verify the save area is the proper mode.  */
953
954  if (sa == 0)
955    {
956      if (mode != VOIDmode)
957	{
958	  if (save_level == SAVE_NONLOCAL)
959	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
960	  else
961	    *psave = sa = gen_reg_rtx (mode);
962	}
963    }
964
965  if (after)
966    {
967      rtx seq;
968
969      start_sequence ();
970      do_pending_stack_adjust ();
971      /* We must validize inside the sequence, to ensure that any instructions
972	 created by the validize call also get moved to the right place.  */
973      if (sa != 0)
974	sa = validize_mem (sa);
975      emit_insn (fcn (sa, stack_pointer_rtx));
976      seq = get_insns ();
977      end_sequence ();
978      emit_insn_after (seq, after);
979    }
980  else
981    {
982      do_pending_stack_adjust ();
983      if (sa != 0)
984	sa = validize_mem (sa);
985      emit_insn (fcn (sa, stack_pointer_rtx));
986    }
987}
988
989/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
990   area made by emit_stack_save.  If it is zero, we have nothing to do.
991
992   Put any emitted insns after insn AFTER, if nonzero, otherwise at
993   current position.  */
994
995void
996emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
997{
998  /* The default is that we use a move insn.  */
999  rtx (*fcn) (rtx, rtx) = gen_move_insn;
1000
1001  /* See if this machine has anything special to do for this kind of save.  */
1002  switch (save_level)
1003    {
1004#ifdef HAVE_restore_stack_block
1005    case SAVE_BLOCK:
1006      if (HAVE_restore_stack_block)
1007	fcn = gen_restore_stack_block;
1008      break;
1009#endif
1010#ifdef HAVE_restore_stack_function
1011    case SAVE_FUNCTION:
1012      if (HAVE_restore_stack_function)
1013	fcn = gen_restore_stack_function;
1014      break;
1015#endif
1016#ifdef HAVE_restore_stack_nonlocal
1017    case SAVE_NONLOCAL:
1018      if (HAVE_restore_stack_nonlocal)
1019	fcn = gen_restore_stack_nonlocal;
1020      break;
1021#endif
1022    default:
1023      break;
1024    }
1025
1026  if (sa != 0)
1027    {
1028      sa = validize_mem (sa);
1029      /* These clobbers prevent the scheduler from moving
1030	 references to variable arrays below the code
1031	 that deletes (pops) the arrays.  */
1032      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1033		    gen_rtx_MEM (BLKmode,
1034			gen_rtx_SCRATCH (VOIDmode))));
1035      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1036		    gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
1037    }
1038
1039  discard_pending_stack_adjust ();
1040
1041  if (after)
1042    {
1043      rtx seq;
1044
1045      start_sequence ();
1046      emit_insn (fcn (stack_pointer_rtx, sa));
1047      seq = get_insns ();
1048      end_sequence ();
1049      emit_insn_after (seq, after);
1050    }
1051  else
1052    emit_insn (fcn (stack_pointer_rtx, sa));
1053}
1054
1055/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1056   function.  This function should be called whenever we allocate or
1057   deallocate dynamic stack space.  */
1058
1059void
1060update_nonlocal_goto_save_area (void)
1061{
1062  tree t_save;
1063  rtx r_save;
1064
1065  /* The nonlocal_goto_save_area object is an array of N pointers.  The
1066     first one is used for the frame pointer save; the rest are sized by
1067     STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
1068     of the stack save area slots.  */
1069  t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1070		   integer_one_node, NULL_TREE, NULL_TREE);
1071  r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1072
1073  emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
1074}
1075
1076/* Return an rtx representing the address of an area of memory dynamically
1077   pushed on the stack.  This region of memory is always aligned to
1078   a multiple of BIGGEST_ALIGNMENT.
1079
1080   Any required stack pointer alignment is preserved.
1081
1082   SIZE is an rtx representing the size of the area.
1083   TARGET is a place in which the address can be placed.
1084
1085   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1086
1087rtx
1088allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
1089{
1090  /* If we're asking for zero bytes, it doesn't matter what we point
1091     to since we can't dereference it.  But return a reasonable
1092     address anyway.  */
1093  if (size == const0_rtx)
1094    return virtual_stack_dynamic_rtx;
1095
1096  /* Otherwise, show we're calling alloca or equivalent.  */
1097  current_function_calls_alloca = 1;
1098
1099  /* Ensure the size is in the proper mode.  */
1100  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1101    size = convert_to_mode (Pmode, size, 1);
1102
1103  /* We can't attempt to minimize alignment necessary, because we don't
1104     know the final value of preferred_stack_boundary yet while executing
1105     this code.  */
1106  cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1107
1108  /* We will need to ensure that the address we return is aligned to
1109     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1110     always know its final value at this point in the compilation (it
1111     might depend on the size of the outgoing parameter lists, for
1112     example), so we must align the value to be returned in that case.
1113     (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1114     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1115     We must also do an alignment operation on the returned value if
1116     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1117
1118     If we have to align, we must leave space in SIZE for the hole
1119     that might result from the alignment operation.  */
1120
1121#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1122#define MUST_ALIGN 1
1123#else
1124#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1125#endif
1126
1127  if (MUST_ALIGN)
1128    size
1129      = force_operand (plus_constant (size,
1130				      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1131		       NULL_RTX);
1132
1133#ifdef SETJMP_VIA_SAVE_AREA
1134  /* If setjmp restores regs from a save area in the stack frame,
1135     avoid clobbering the reg save area.  Note that the offset of
1136     virtual_incoming_args_rtx includes the preallocated stack args space.
1137     It would be no problem to clobber that, but it's on the wrong side
1138     of the old save area.
1139
1140     What used to happen is that, since we did not know for sure
1141     whether setjmp() was invoked until after RTL generation, we
1142     would use reg notes to store the "optimized" size and fix things
1143     up later.  These days we know this information before we ever
1144     start building RTL so the reg notes are unnecessary.  */
1145  if (!current_function_calls_setjmp)
1146    {
1147      int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1148
1149      /* ??? Code below assumes that the save area needs maximal
1150	 alignment.  This constraint may be too strong.  */
1151      gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
1152
1153      if (GET_CODE (size) == CONST_INT)
1154	{
1155	  HOST_WIDE_INT new = INTVAL (size) / align * align;
1156
1157	  if (INTVAL (size) != new)
1158	    size = GEN_INT (new);
1159	}
1160      else
1161	{
1162	  /* Since we know overflow is not possible, we avoid using
1163	     CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1164	  size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1165				GEN_INT (align), NULL_RTX, 1);
1166	  size = expand_mult (Pmode, size,
1167			      GEN_INT (align), NULL_RTX, 1);
1168	}
1169    }
1170  else
1171    {
1172      rtx dynamic_offset
1173	= expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1174			stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1175
1176      size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1177			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
1178    }
1179#endif /* SETJMP_VIA_SAVE_AREA */
1180
1181  /* Round the size to a multiple of the required stack alignment.
1182     Since the stack if presumed to be rounded before this allocation,
1183     this will maintain the required alignment.
1184
1185     If the stack grows downward, we could save an insn by subtracting
1186     SIZE from the stack pointer and then aligning the stack pointer.
1187     The problem with this is that the stack pointer may be unaligned
1188     between the execution of the subtraction and alignment insns and
1189     some machines do not allow this.  Even on those that do, some
1190     signal handlers malfunction if a signal should occur between those
1191     insns.  Since this is an extremely rare event, we have no reliable
1192     way of knowing which systems have this problem.  So we avoid even
1193     momentarily mis-aligning the stack.  */
1194
1195  /* If we added a variable amount to SIZE,
1196     we can no longer assume it is aligned.  */
1197#if !defined (SETJMP_VIA_SAVE_AREA)
1198  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1199#endif
1200    size = round_push (size);
1201
1202  do_pending_stack_adjust ();
1203
1204 /* We ought to be called always on the toplevel and stack ought to be aligned
1205    properly.  */
1206  gcc_assert (!(stack_pointer_delta
1207		% (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1208
1209  /* If needed, check that we have the required amount of stack.  Take into
1210     account what has already been checked.  */
1211  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1212    probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1213
1214  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1215  if (target == 0 || !REG_P (target)
1216      || REGNO (target) < FIRST_PSEUDO_REGISTER
1217      || GET_MODE (target) != Pmode)
1218    target = gen_reg_rtx (Pmode);
1219
1220  mark_reg_pointer (target, known_align);
1221
1222  /* Perform the required allocation from the stack.  Some systems do
1223     this differently than simply incrementing/decrementing from the
1224     stack pointer, such as acquiring the space by calling malloc().  */
1225#ifdef HAVE_allocate_stack
1226  if (HAVE_allocate_stack)
1227    {
1228      enum machine_mode mode = STACK_SIZE_MODE;
1229      insn_operand_predicate_fn pred;
1230
1231      /* We don't have to check against the predicate for operand 0 since
1232	 TARGET is known to be a pseudo of the proper mode, which must
1233	 be valid for the operand.  For operand 1, convert to the
1234	 proper mode and validate.  */
1235      if (mode == VOIDmode)
1236	mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1237
1238      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1239      if (pred && ! ((*pred) (size, mode)))
1240	size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
1241
1242      emit_insn (gen_allocate_stack (target, size));
1243    }
1244  else
1245#endif
1246    {
1247#ifndef STACK_GROWS_DOWNWARD
1248      emit_move_insn (target, virtual_stack_dynamic_rtx);
1249#endif
1250
1251      /* Check stack bounds if necessary.  */
1252      if (current_function_limit_stack)
1253	{
1254	  rtx available;
1255	  rtx space_available = gen_label_rtx ();
1256#ifdef STACK_GROWS_DOWNWARD
1257	  available = expand_binop (Pmode, sub_optab,
1258				    stack_pointer_rtx, stack_limit_rtx,
1259				    NULL_RTX, 1, OPTAB_WIDEN);
1260#else
1261	  available = expand_binop (Pmode, sub_optab,
1262				    stack_limit_rtx, stack_pointer_rtx,
1263				    NULL_RTX, 1, OPTAB_WIDEN);
1264#endif
1265	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1266				   space_available);
1267#ifdef HAVE_trap
1268	  if (HAVE_trap)
1269	    emit_insn (gen_trap ());
1270	  else
1271#endif
1272	    error ("stack limits not supported on this target");
1273	  emit_barrier ();
1274	  emit_label (space_available);
1275	}
1276
1277      anti_adjust_stack (size);
1278
1279#ifdef STACK_GROWS_DOWNWARD
1280      emit_move_insn (target, virtual_stack_dynamic_rtx);
1281#endif
1282    }
1283
1284  if (MUST_ALIGN)
1285    {
1286      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1287	 but we know it can't.  So add ourselves and then do
1288	 TRUNC_DIV_EXPR.  */
1289      target = expand_binop (Pmode, add_optab, target,
1290			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1291			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1292      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1293			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1294			      NULL_RTX, 1);
1295      target = expand_mult (Pmode, target,
1296			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1297			    NULL_RTX, 1);
1298    }
1299
1300  /* Record the new stack level for nonlocal gotos.  */
1301  if (cfun->nonlocal_goto_save_area != 0)
1302    update_nonlocal_goto_save_area ();
1303
1304  return target;
1305}
1306
1307/* A front end may want to override GCC's stack checking by providing a
1308   run-time routine to call to check the stack, so provide a mechanism for
1309   calling that routine.  */
1310
1311static GTY(()) rtx stack_check_libfunc;
1312
1313void
1314set_stack_check_libfunc (rtx libfunc)
1315{
1316  stack_check_libfunc = libfunc;
1317}
1318
1319/* Emit one stack probe at ADDRESS, an address within the stack.  */
1320
1321static void
1322emit_stack_probe (rtx address)
1323{
1324  rtx memref = gen_rtx_MEM (word_mode, address);
1325
1326  MEM_VOLATILE_P (memref) = 1;
1327
1328  if (STACK_CHECK_PROBE_LOAD)
1329    emit_move_insn (gen_reg_rtx (word_mode), memref);
1330  else
1331    emit_move_insn (memref, const0_rtx);
1332}
1333
1334/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1335   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
1336   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
1337   subtract from the stack.  If SIZE is constant, this is done
1338   with a fixed number of probes.  Otherwise, we must make a loop.  */
1339
1340#ifdef STACK_GROWS_DOWNWARD
1341#define STACK_GROW_OP MINUS
1342#else
1343#define STACK_GROW_OP PLUS
1344#endif
1345
1346void
1347probe_stack_range (HOST_WIDE_INT first, rtx size)
1348{
1349  /* First ensure SIZE is Pmode.  */
1350  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1351    size = convert_to_mode (Pmode, size, 1);
1352
1353  /* Next see if the front end has set up a function for us to call to
1354     check the stack.  */
1355  if (stack_check_libfunc != 0)
1356    {
1357      rtx addr = memory_address (QImode,
1358				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1359					         stack_pointer_rtx,
1360					         plus_constant (size, first)));
1361
1362      addr = convert_memory_address (ptr_mode, addr);
1363      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1364			 ptr_mode);
1365    }
1366
1367  /* Next see if we have an insn to check the stack.  Use it if so.  */
1368#ifdef HAVE_check_stack
1369  else if (HAVE_check_stack)
1370    {
1371      insn_operand_predicate_fn pred;
1372      rtx last_addr
1373	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1374					 stack_pointer_rtx,
1375					 plus_constant (size, first)),
1376			 NULL_RTX);
1377
1378      pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1379      if (pred && ! ((*pred) (last_addr, Pmode)))
1380	last_addr = copy_to_mode_reg (Pmode, last_addr);
1381
1382      emit_insn (gen_check_stack (last_addr));
1383    }
1384#endif
1385
1386  /* If we have to generate explicit probes, see if we have a constant
1387     small number of them to generate.  If so, that's the easy case.  */
1388  else if (GET_CODE (size) == CONST_INT
1389	   && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1390    {
1391      HOST_WIDE_INT offset;
1392
1393      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1394	 for values of N from 1 until it exceeds LAST.  If only one
1395	 probe is needed, this will not generate any code.  Then probe
1396	 at LAST.  */
1397      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1398	   offset < INTVAL (size);
1399	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1400	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1401					  stack_pointer_rtx,
1402					  GEN_INT (offset)));
1403
1404      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1405					stack_pointer_rtx,
1406					plus_constant (size, first)));
1407    }
1408
1409  /* In the variable case, do the same as above, but in a loop.  We emit loop
1410     notes so that loop optimization can be done.  */
1411  else
1412    {
1413      rtx test_addr
1414	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1415					 stack_pointer_rtx,
1416					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1417			 NULL_RTX);
1418      rtx last_addr
1419	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1420					 stack_pointer_rtx,
1421					 plus_constant (size, first)),
1422			 NULL_RTX);
1423      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1424      rtx loop_lab = gen_label_rtx ();
1425      rtx test_lab = gen_label_rtx ();
1426      rtx end_lab = gen_label_rtx ();
1427      rtx temp;
1428
1429      if (!REG_P (test_addr)
1430	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1431	test_addr = force_reg (Pmode, test_addr);
1432
1433      emit_jump (test_lab);
1434
1435      emit_label (loop_lab);
1436      emit_stack_probe (test_addr);
1437
1438#ifdef STACK_GROWS_DOWNWARD
1439#define CMP_OPCODE GTU
1440      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1441			   1, OPTAB_WIDEN);
1442#else
1443#define CMP_OPCODE LTU
1444      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1445			   1, OPTAB_WIDEN);
1446#endif
1447
1448      gcc_assert (temp == test_addr);
1449
1450      emit_label (test_lab);
1451      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1452			       NULL_RTX, Pmode, 1, loop_lab);
1453      emit_jump (end_lab);
1454      emit_label (end_lab);
1455
1456      emit_stack_probe (last_addr);
1457    }
1458}
1459
1460/* Return an rtx representing the register or memory location
1461   in which a scalar value of data type VALTYPE
1462   was returned by a function call to function FUNC.
1463   FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1464   function is known, otherwise 0.
1465   OUTGOING is 1 if on a machine with register windows this function
1466   should return the register in which the function will put its result
1467   and 0 otherwise.  */
1468
1469rtx
1470hard_function_value (tree valtype, tree func, tree fntype,
1471		     int outgoing ATTRIBUTE_UNUSED)
1472{
1473  rtx val;
1474
1475  val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1476
1477  if (REG_P (val)
1478      && GET_MODE (val) == BLKmode)
1479    {
1480      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1481      enum machine_mode tmpmode;
1482
1483      /* int_size_in_bytes can return -1.  We don't need a check here
1484	 since the value of bytes will then be large enough that no
1485	 mode will match anyway.  */
1486
1487      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1488	   tmpmode != VOIDmode;
1489	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1490	{
1491	  /* Have we found a large enough mode?  */
1492	  if (GET_MODE_SIZE (tmpmode) >= bytes)
1493	    break;
1494	}
1495
1496      /* No suitable mode found.  */
1497      gcc_assert (tmpmode != VOIDmode);
1498
1499      PUT_MODE (val, tmpmode);
1500    }
1501  return val;
1502}
1503
1504/* Return an rtx representing the register or memory location
1505   in which a scalar value of mode MODE was returned by a library call.  */
1506
1507rtx
1508hard_libcall_value (enum machine_mode mode)
1509{
1510  return LIBCALL_VALUE (mode);
1511}
1512
1513/* Look up the tree code for a given rtx code
1514   to provide the arithmetic operation for REAL_ARITHMETIC.
1515   The function returns an int because the caller may not know
1516   what `enum tree_code' means.  */
1517
1518int
1519rtx_to_tree_code (enum rtx_code code)
1520{
1521  enum tree_code tcode;
1522
1523  switch (code)
1524    {
1525    case PLUS:
1526      tcode = PLUS_EXPR;
1527      break;
1528    case MINUS:
1529      tcode = MINUS_EXPR;
1530      break;
1531    case MULT:
1532      tcode = MULT_EXPR;
1533      break;
1534    case DIV:
1535      tcode = RDIV_EXPR;
1536      break;
1537    case SMIN:
1538      tcode = MIN_EXPR;
1539      break;
1540    case SMAX:
1541      tcode = MAX_EXPR;
1542      break;
1543    default:
1544      tcode = LAST_AND_UNUSED_TREE_CODE;
1545      break;
1546    }
1547  return ((int) tcode);
1548}
1549
1550#include "gt-explow.h"
1551