rtlanal.c revision 132718
1/* Analyze RTL for C-Compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "toplev.h"
28#include "rtl.h"
29#include "hard-reg-set.h"
30#include "insn-config.h"
31#include "recog.h"
32#include "tm_p.h"
33#include "flags.h"
34#include "basic-block.h"
35#include "real.h"
36
37/* Forward declarations */
38static int global_reg_mentioned_p_1 (rtx *, void *);
39static void set_of_1 (rtx, rtx, void *);
40static void insn_dependent_p_1 (rtx, rtx, void *);
41static int rtx_referenced_p_1 (rtx *, void *);
42static int computed_jump_p_1 (rtx);
43static void parms_set (rtx, rtx, void *);
44static bool hoist_test_store (rtx, rtx, regset);
45static void hoist_update_store (rtx, rtx *, rtx, rtx);
46
47/* Bit flags that specify the machine subtype we are compiling for.
48   Bits are tested using macros TARGET_... defined in the tm.h file
49   and set by `-m...' switches.  Must be defined in rtlanal.c.  */
50
51int target_flags;
52
53/* Return 1 if the value of X is unstable
54   (would be different at a different point in the program).
55   The frame pointer, arg pointer, etc. are considered stable
56   (within one function) and so is anything marked `unchanging'.  */
57
58int
59rtx_unstable_p (rtx x)
60{
61  RTX_CODE code = GET_CODE (x);
62  int i;
63  const char *fmt;
64
65  switch (code)
66    {
67    case MEM:
68      return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
69
70    case QUEUED:
71      return 1;
72
73    case ADDRESSOF:
74    case CONST:
75    case CONST_INT:
76    case CONST_DOUBLE:
77    case CONST_VECTOR:
78    case SYMBOL_REF:
79    case LABEL_REF:
80      return 0;
81
82    case REG:
83      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
84      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
85	  /* The arg pointer varies if it is not a fixed register.  */
86	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
87	  || RTX_UNCHANGING_P (x))
88	return 0;
89#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90      /* ??? When call-clobbered, the value is stable modulo the restore
91	 that must happen after a call.  This currently screws up local-alloc
92	 into believing that the restore is not needed.  */
93      if (x == pic_offset_table_rtx)
94	return 0;
95#endif
96      return 1;
97
98    case ASM_OPERANDS:
99      if (MEM_VOLATILE_P (x))
100	return 1;
101
102      /* Fall through.  */
103
104    default:
105      break;
106    }
107
108  fmt = GET_RTX_FORMAT (code);
109  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
110    if (fmt[i] == 'e')
111      {
112	if (rtx_unstable_p (XEXP (x, i)))
113	  return 1;
114      }
115    else if (fmt[i] == 'E')
116      {
117	int j;
118	for (j = 0; j < XVECLEN (x, i); j++)
119	  if (rtx_unstable_p (XVECEXP (x, i, j)))
120	    return 1;
121      }
122
123  return 0;
124}
125
126/* Return 1 if X has a value that can vary even between two
127   executions of the program.  0 means X can be compared reliably
128   against certain constants or near-constants.
129   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130   zero, we are slightly more conservative.
131   The frame pointer and the arg pointer are considered constant.  */
132
133int
134rtx_varies_p (rtx x, int for_alias)
135{
136  RTX_CODE code;
137  int i;
138  const char *fmt;
139
140  if (!x)
141    return 0;
142
143  code = GET_CODE (x);
144  switch (code)
145    {
146    case MEM:
147      return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
148
149    case QUEUED:
150      return 1;
151
152    case CONST:
153    case CONST_INT:
154    case CONST_DOUBLE:
155    case CONST_VECTOR:
156    case SYMBOL_REF:
157    case LABEL_REF:
158      return 0;
159
160    case ADDRESSOF:
161      /* This will resolve to some offset from the frame pointer.  */
162      return 0;
163
164    case REG:
165      /* Note that we have to test for the actual rtx used for the frame
166	 and arg pointers and not just the register number in case we have
167	 eliminated the frame and/or arg pointer and are using it
168	 for pseudos.  */
169      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
170	  /* The arg pointer varies if it is not a fixed register.  */
171	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
172	return 0;
173      if (x == pic_offset_table_rtx
174#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
175	  /* ??? When call-clobbered, the value is stable modulo the restore
176	     that must happen after a call.  This currently screws up
177	     local-alloc into believing that the restore is not needed, so we
178	     must return 0 only if we are called from alias analysis.  */
179	  && for_alias
180#endif
181	  )
182	return 0;
183      return 1;
184
185    case LO_SUM:
186      /* The operand 0 of a LO_SUM is considered constant
187	 (in fact it is related specifically to operand 1)
188	 during alias analysis.  */
189      return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
190	     || rtx_varies_p (XEXP (x, 1), for_alias);
191
192    case ASM_OPERANDS:
193      if (MEM_VOLATILE_P (x))
194	return 1;
195
196      /* Fall through.  */
197
198    default:
199      break;
200    }
201
202  fmt = GET_RTX_FORMAT (code);
203  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
204    if (fmt[i] == 'e')
205      {
206	if (rtx_varies_p (XEXP (x, i), for_alias))
207	  return 1;
208      }
209    else if (fmt[i] == 'E')
210      {
211	int j;
212	for (j = 0; j < XVECLEN (x, i); j++)
213	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
214	    return 1;
215      }
216
217  return 0;
218}
219
220/* Return 0 if the use of X as an address in a MEM can cause a trap.  */
221
222int
223rtx_addr_can_trap_p (rtx x)
224{
225  enum rtx_code code = GET_CODE (x);
226
227  switch (code)
228    {
229    case SYMBOL_REF:
230      return SYMBOL_REF_WEAK (x);
231
232    case LABEL_REF:
233      return 0;
234
235    case ADDRESSOF:
236      /* This will resolve to some offset from the frame pointer.  */
237      return 0;
238
239    case REG:
240      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
241      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
242	  || x == stack_pointer_rtx
243	  /* The arg pointer varies if it is not a fixed register.  */
244	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
245	return 0;
246      /* All of the virtual frame registers are stack references.  */
247      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
248	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
249	return 0;
250      return 1;
251
252    case CONST:
253      return rtx_addr_can_trap_p (XEXP (x, 0));
254
255    case PLUS:
256      /* An address is assumed not to trap if it is an address that can't
257	 trap plus a constant integer or it is the pic register plus a
258	 constant.  */
259      return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
260		 && GET_CODE (XEXP (x, 1)) == CONST_INT)
261		|| (XEXP (x, 0) == pic_offset_table_rtx
262		    && CONSTANT_P (XEXP (x, 1))));
263
264    case LO_SUM:
265    case PRE_MODIFY:
266      return rtx_addr_can_trap_p (XEXP (x, 1));
267
268    case PRE_DEC:
269    case PRE_INC:
270    case POST_DEC:
271    case POST_INC:
272    case POST_MODIFY:
273      return rtx_addr_can_trap_p (XEXP (x, 0));
274
275    default:
276      break;
277    }
278
279  /* If it isn't one of the case above, it can cause a trap.  */
280  return 1;
281}
282
283/* Return true if X is an address that is known to not be zero.  */
284
285bool
286nonzero_address_p (rtx x)
287{
288  enum rtx_code code = GET_CODE (x);
289
290  switch (code)
291    {
292    case SYMBOL_REF:
293      return !SYMBOL_REF_WEAK (x);
294
295    case LABEL_REF:
296      return true;
297
298    case ADDRESSOF:
299      /* This will resolve to some offset from the frame pointer.  */
300      return true;
301
302    case REG:
303      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
304      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
305	  || x == stack_pointer_rtx
306	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
307	return true;
308      /* All of the virtual frame registers are stack references.  */
309      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
310	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
311	return true;
312      return false;
313
314    case CONST:
315      return nonzero_address_p (XEXP (x, 0));
316
317    case PLUS:
318      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
319	{
320	  /* Pointers aren't allowed to wrap.  If we've got a register
321	     that is known to be a pointer, and a positive offset, then
322	     the composite can't be zero.  */
323	  if (INTVAL (XEXP (x, 1)) > 0
324	      && REG_P (XEXP (x, 0))
325	      && REG_POINTER (XEXP (x, 0)))
326	    return true;
327
328	  return nonzero_address_p (XEXP (x, 0));
329	}
330      /* Handle PIC references.  */
331      else if (XEXP (x, 0) == pic_offset_table_rtx
332	       && CONSTANT_P (XEXP (x, 1)))
333	return true;
334      return false;
335
336    case PRE_MODIFY:
337      /* Similar to the above; allow positive offsets.  Further, since
338	 auto-inc is only allowed in memories, the register must be a
339	 pointer.  */
340      if (GET_CODE (XEXP (x, 1)) == CONST_INT
341	  && INTVAL (XEXP (x, 1)) > 0)
342	return true;
343      return nonzero_address_p (XEXP (x, 0));
344
345    case PRE_INC:
346      /* Similarly.  Further, the offset is always positive.  */
347      return true;
348
349    case PRE_DEC:
350    case POST_DEC:
351    case POST_INC:
352    case POST_MODIFY:
353      return nonzero_address_p (XEXP (x, 0));
354
355    case LO_SUM:
356      return nonzero_address_p (XEXP (x, 1));
357
358    default:
359      break;
360    }
361
362  /* If it isn't one of the case above, might be zero.  */
363  return false;
364}
365
366/* Return 1 if X refers to a memory location whose address
367   cannot be compared reliably with constant addresses,
368   or if X refers to a BLKmode memory object.
369   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
370   zero, we are slightly more conservative.  */
371
372int
373rtx_addr_varies_p (rtx x, int for_alias)
374{
375  enum rtx_code code;
376  int i;
377  const char *fmt;
378
379  if (x == 0)
380    return 0;
381
382  code = GET_CODE (x);
383  if (code == MEM)
384    return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
385
386  fmt = GET_RTX_FORMAT (code);
387  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
388    if (fmt[i] == 'e')
389      {
390	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
391	  return 1;
392      }
393    else if (fmt[i] == 'E')
394      {
395	int j;
396	for (j = 0; j < XVECLEN (x, i); j++)
397	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
398	    return 1;
399      }
400  return 0;
401}
402
403/* Return the value of the integer term in X, if one is apparent;
404   otherwise return 0.
405   Only obvious integer terms are detected.
406   This is used in cse.c with the `related_value' field.  */
407
408HOST_WIDE_INT
409get_integer_term (rtx x)
410{
411  if (GET_CODE (x) == CONST)
412    x = XEXP (x, 0);
413
414  if (GET_CODE (x) == MINUS
415      && GET_CODE (XEXP (x, 1)) == CONST_INT)
416    return - INTVAL (XEXP (x, 1));
417  if (GET_CODE (x) == PLUS
418      && GET_CODE (XEXP (x, 1)) == CONST_INT)
419    return INTVAL (XEXP (x, 1));
420  return 0;
421}
422
423/* If X is a constant, return the value sans apparent integer term;
424   otherwise return 0.
425   Only obvious integer terms are detected.  */
426
427rtx
428get_related_value (rtx x)
429{
430  if (GET_CODE (x) != CONST)
431    return 0;
432  x = XEXP (x, 0);
433  if (GET_CODE (x) == PLUS
434      && GET_CODE (XEXP (x, 1)) == CONST_INT)
435    return XEXP (x, 0);
436  else if (GET_CODE (x) == MINUS
437	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
438    return XEXP (x, 0);
439  return 0;
440}
441
442/* Given a tablejump insn INSN, return the RTL expression for the offset
443   into the jump table.  If the offset cannot be determined, then return
444   NULL_RTX.
445
446   If EARLIEST is nonzero, it is a pointer to a place where the earliest
447   insn used in locating the offset was found.  */
448
449rtx
450get_jump_table_offset (rtx insn, rtx *earliest)
451{
452  rtx label;
453  rtx table;
454  rtx set;
455  rtx old_insn;
456  rtx x;
457  rtx old_x;
458  rtx y;
459  rtx old_y;
460  int i;
461
462  if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
463    return NULL_RTX;
464
465  x = SET_SRC (set);
466
467  /* Some targets (eg, ARM) emit a tablejump that also
468     contains the out-of-range target.  */
469  if (GET_CODE (x) == IF_THEN_ELSE
470      && GET_CODE (XEXP (x, 2)) == LABEL_REF)
471    x = XEXP (x, 1);
472
473  /* Search backwards and locate the expression stored in X.  */
474  for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
475       old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
476    ;
477
478  /* If X is an expression using a relative address then strip
479     off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
480     or the jump table label.  */
481  if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
482      && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
483    {
484      for (i = 0; i < 2; i++)
485	{
486	  old_insn = insn;
487	  y = XEXP (x, i);
488
489	  if (y == pc_rtx || y == pic_offset_table_rtx)
490	    break;
491
492	  for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
493	       old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
494	    ;
495
496	  if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
497	    break;
498	}
499
500      if (i >= 2)
501	return NULL_RTX;
502
503      x = XEXP (x, 1 - i);
504
505      for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
506	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
507	;
508    }
509
510  /* Strip off any sign or zero extension.  */
511  if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
512    {
513      x = XEXP (x, 0);
514
515      for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
516	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
517	;
518    }
519
520  /* If X isn't a MEM then this isn't a tablejump we understand.  */
521  if (GET_CODE (x) != MEM)
522    return NULL_RTX;
523
524  /* Strip off the MEM.  */
525  x = XEXP (x, 0);
526
527  for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
528       old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
529    ;
530
531  /* If X isn't a PLUS than this isn't a tablejump we understand.  */
532  if (GET_CODE (x) != PLUS)
533    return NULL_RTX;
534
535  /* At this point we should have an expression representing the jump table
536     plus an offset.  Examine each operand in order to determine which one
537     represents the jump table.  Knowing that tells us that the other operand
538     must represent the offset.  */
539  for (i = 0; i < 2; i++)
540    {
541      old_insn = insn;
542      y = XEXP (x, i);
543
544      for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
545	   old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
546	;
547
548      if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
549	  && reg_mentioned_p (label, y))
550	break;
551    }
552
553  if (i >= 2)
554    return NULL_RTX;
555
556  x = XEXP (x, 1 - i);
557
558  /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM.  */
559  if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
560    for (i = 0; i < 2; i++)
561      if (XEXP (x, i) == pic_offset_table_rtx)
562	{
563	  x = XEXP (x, 1 - i);
564	  break;
565	}
566
567  if (earliest)
568    *earliest = insn;
569
570  /* Return the RTL expression representing the offset.  */
571  return x;
572}
573
574/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
575   a global register.  */
576
577static int
578global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
579{
580  int regno;
581  rtx x = *loc;
582
583  if (! x)
584    return 0;
585
586  switch (GET_CODE (x))
587    {
588    case SUBREG:
589      if (GET_CODE (SUBREG_REG (x)) == REG)
590	{
591	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
592	      && global_regs[subreg_regno (x)])
593	    return 1;
594	  return 0;
595	}
596      break;
597
598    case REG:
599      regno = REGNO (x);
600      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
601	return 1;
602      return 0;
603
604    case SCRATCH:
605    case PC:
606    case CC0:
607    case CONST_INT:
608    case CONST_DOUBLE:
609    case CONST:
610    case LABEL_REF:
611      return 0;
612
613    case CALL:
614      /* A non-constant call might use a global register.  */
615      return 1;
616
617    default:
618      break;
619    }
620
621  return 0;
622}
623
624/* Returns nonzero if X mentions a global register.  */
625
626int
627global_reg_mentioned_p (rtx x)
628{
629  if (INSN_P (x))
630    {
631      if (GET_CODE (x) == CALL_INSN)
632	{
633	  if (! CONST_OR_PURE_CALL_P (x))
634	    return 1;
635	  x = CALL_INSN_FUNCTION_USAGE (x);
636	  if (x == 0)
637	    return 0;
638	}
639      else
640	x = PATTERN (x);
641    }
642
643  return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
644}
645
646/* Return the number of places FIND appears within X.  If COUNT_DEST is
647   zero, we do not count occurrences inside the destination of a SET.  */
648
649int
650count_occurrences (rtx x, rtx find, int count_dest)
651{
652  int i, j;
653  enum rtx_code code;
654  const char *format_ptr;
655  int count;
656
657  if (x == find)
658    return 1;
659
660  code = GET_CODE (x);
661
662  switch (code)
663    {
664    case REG:
665    case CONST_INT:
666    case CONST_DOUBLE:
667    case CONST_VECTOR:
668    case SYMBOL_REF:
669    case CODE_LABEL:
670    case PC:
671    case CC0:
672      return 0;
673
674    case MEM:
675      if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
676	return 1;
677      break;
678
679    case SET:
680      if (SET_DEST (x) == find && ! count_dest)
681	return count_occurrences (SET_SRC (x), find, count_dest);
682      break;
683
684    default:
685      break;
686    }
687
688  format_ptr = GET_RTX_FORMAT (code);
689  count = 0;
690
691  for (i = 0; i < GET_RTX_LENGTH (code); i++)
692    {
693      switch (*format_ptr++)
694	{
695	case 'e':
696	  count += count_occurrences (XEXP (x, i), find, count_dest);
697	  break;
698
699	case 'E':
700	  for (j = 0; j < XVECLEN (x, i); j++)
701	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
702	  break;
703	}
704    }
705  return count;
706}
707
708/* Nonzero if register REG appears somewhere within IN.
709   Also works if REG is not a register; in this case it checks
710   for a subexpression of IN that is Lisp "equal" to REG.  */
711
712int
713reg_mentioned_p (rtx reg, rtx in)
714{
715  const char *fmt;
716  int i;
717  enum rtx_code code;
718
719  if (in == 0)
720    return 0;
721
722  if (reg == in)
723    return 1;
724
725  if (GET_CODE (in) == LABEL_REF)
726    return reg == XEXP (in, 0);
727
728  code = GET_CODE (in);
729
730  switch (code)
731    {
732      /* Compare registers by number.  */
733    case REG:
734      return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
735
736      /* These codes have no constituent expressions
737	 and are unique.  */
738    case SCRATCH:
739    case CC0:
740    case PC:
741      return 0;
742
743    case CONST_INT:
744    case CONST_VECTOR:
745    case CONST_DOUBLE:
746      /* These are kept unique for a given value.  */
747      return 0;
748
749    default:
750      break;
751    }
752
753  if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
754    return 1;
755
756  fmt = GET_RTX_FORMAT (code);
757
758  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
759    {
760      if (fmt[i] == 'E')
761	{
762	  int j;
763	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
764	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
765	      return 1;
766	}
767      else if (fmt[i] == 'e'
768	       && reg_mentioned_p (reg, XEXP (in, i)))
769	return 1;
770    }
771  return 0;
772}
773
774/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
775   no CODE_LABEL insn.  */
776
777int
778no_labels_between_p (rtx beg, rtx end)
779{
780  rtx p;
781  if (beg == end)
782    return 0;
783  for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
784    if (GET_CODE (p) == CODE_LABEL)
785      return 0;
786  return 1;
787}
788
789/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
790   no JUMP_INSN insn.  */
791
792int
793no_jumps_between_p (rtx beg, rtx end)
794{
795  rtx p;
796  for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
797    if (GET_CODE (p) == JUMP_INSN)
798      return 0;
799  return 1;
800}
801
802/* Nonzero if register REG is used in an insn between
803   FROM_INSN and TO_INSN (exclusive of those two).  */
804
805int
806reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
807{
808  rtx insn;
809
810  if (from_insn == to_insn)
811    return 0;
812
813  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
814    if (INSN_P (insn)
815	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
816	   || (GET_CODE (insn) == CALL_INSN
817	      && (find_reg_fusage (insn, USE, reg)
818		  || find_reg_fusage (insn, CLOBBER, reg)))))
819      return 1;
820  return 0;
821}
822
823/* Nonzero if the old value of X, a register, is referenced in BODY.  If X
824   is entirely replaced by a new value and the only use is as a SET_DEST,
825   we do not consider it a reference.  */
826
827int
828reg_referenced_p (rtx x, rtx body)
829{
830  int i;
831
832  switch (GET_CODE (body))
833    {
834    case SET:
835      if (reg_overlap_mentioned_p (x, SET_SRC (body)))
836	return 1;
837
838      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
839	 of a REG that occupies all of the REG, the insn references X if
840	 it is mentioned in the destination.  */
841      if (GET_CODE (SET_DEST (body)) != CC0
842	  && GET_CODE (SET_DEST (body)) != PC
843	  && GET_CODE (SET_DEST (body)) != REG
844	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
845		&& GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
846		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
847		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
848		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
849			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
850	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
851	return 1;
852      return 0;
853
854    case ASM_OPERANDS:
855      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
856	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
857	  return 1;
858      return 0;
859
860    case CALL:
861    case USE:
862    case IF_THEN_ELSE:
863      return reg_overlap_mentioned_p (x, body);
864
865    case TRAP_IF:
866      return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
867
868    case PREFETCH:
869      return reg_overlap_mentioned_p (x, XEXP (body, 0));
870
871    case UNSPEC:
872    case UNSPEC_VOLATILE:
873      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
874	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
875	  return 1;
876      return 0;
877
878    case PARALLEL:
879      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
880	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
881	  return 1;
882      return 0;
883
884    case CLOBBER:
885      if (GET_CODE (XEXP (body, 0)) == MEM)
886	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
887	  return 1;
888      return 0;
889
890    case COND_EXEC:
891      if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
892	return 1;
893      return reg_referenced_p (x, COND_EXEC_CODE (body));
894
895    default:
896      return 0;
897    }
898}
899
900/* Nonzero if register REG is referenced in an insn between
901   FROM_INSN and TO_INSN (exclusive of those two).  Sets of REG do
902   not count.  */
903
904int
905reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
906{
907  rtx insn;
908
909  if (from_insn == to_insn)
910    return 0;
911
912  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
913    if (INSN_P (insn)
914	&& (reg_referenced_p (reg, PATTERN (insn))
915	   || (GET_CODE (insn) == CALL_INSN
916	      && find_reg_fusage (insn, USE, reg))))
917      return 1;
918  return 0;
919}
920
921/* Nonzero if register REG is set or clobbered in an insn between
922   FROM_INSN and TO_INSN (exclusive of those two).  */
923
924int
925reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
926{
927  rtx insn;
928
929  if (from_insn == to_insn)
930    return 0;
931
932  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
933    if (INSN_P (insn) && reg_set_p (reg, insn))
934      return 1;
935  return 0;
936}
937
938/* Internals of reg_set_between_p.  */
939int
940reg_set_p (rtx reg, rtx insn)
941{
942  /* We can be passed an insn or part of one.  If we are passed an insn,
943     check if a side-effect of the insn clobbers REG.  */
944  if (INSN_P (insn)
945      && (FIND_REG_INC_NOTE (insn, reg)
946	  || (GET_CODE (insn) == CALL_INSN
947	      /* We'd like to test call_used_regs here, but rtlanal.c can't
948		 reference that variable due to its use in genattrtab.  So
949		 we'll just be more conservative.
950
951		 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
952		 information holds all clobbered registers.  */
953	      && ((GET_CODE (reg) == REG
954		   && REGNO (reg) < FIRST_PSEUDO_REGISTER)
955		  || GET_CODE (reg) == MEM
956		  || find_reg_fusage (insn, CLOBBER, reg)))))
957    return 1;
958
959  return set_of (reg, insn) != NULL_RTX;
960}
961
962/* Similar to reg_set_between_p, but check all registers in X.  Return 0
963   only if none of them are modified between START and END.  Do not
964   consider non-registers one way or the other.  */
965
966int
967regs_set_between_p (rtx x, rtx start, rtx end)
968{
969  enum rtx_code code = GET_CODE (x);
970  const char *fmt;
971  int i, j;
972
973  switch (code)
974    {
975    case CONST_INT:
976    case CONST_DOUBLE:
977    case CONST_VECTOR:
978    case CONST:
979    case SYMBOL_REF:
980    case LABEL_REF:
981    case PC:
982    case CC0:
983      return 0;
984
985    case REG:
986      return reg_set_between_p (x, start, end);
987
988    default:
989      break;
990    }
991
992  fmt = GET_RTX_FORMAT (code);
993  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
994    {
995      if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
996	return 1;
997
998      else if (fmt[i] == 'E')
999	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1000	  if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1001	    return 1;
1002    }
1003
1004  return 0;
1005}
1006
1007/* Similar to reg_set_between_p, but check all registers in X.  Return 0
1008   only if none of them are modified between START and END.  Return 1 if
1009   X contains a MEM; this routine does usememory aliasing.  */
1010
1011int
1012modified_between_p (rtx x, rtx start, rtx end)
1013{
1014  enum rtx_code code = GET_CODE (x);
1015  const char *fmt;
1016  int i, j;
1017  rtx insn;
1018
1019  if (start == end)
1020    return 0;
1021
1022  switch (code)
1023    {
1024    case CONST_INT:
1025    case CONST_DOUBLE:
1026    case CONST_VECTOR:
1027    case CONST:
1028    case SYMBOL_REF:
1029    case LABEL_REF:
1030      return 0;
1031
1032    case PC:
1033    case CC0:
1034      return 1;
1035
1036    case MEM:
1037      if (RTX_UNCHANGING_P (x))
1038	return 0;
1039      if (modified_between_p (XEXP (x, 0), start, end))
1040	return 1;
1041      for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1042	if (memory_modified_in_insn_p (x, insn))
1043	  return 1;
1044      return 0;
1045      break;
1046
1047    case REG:
1048      return reg_set_between_p (x, start, end);
1049
1050    default:
1051      break;
1052    }
1053
1054  fmt = GET_RTX_FORMAT (code);
1055  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1056    {
1057      if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1058	return 1;
1059
1060      else if (fmt[i] == 'E')
1061	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1062	  if (modified_between_p (XVECEXP (x, i, j), start, end))
1063	    return 1;
1064    }
1065
1066  return 0;
1067}
1068
1069/* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
1070   of them are modified in INSN.  Return 1 if X contains a MEM; this routine
1071   does use memory aliasing.  */
1072
1073int
1074modified_in_p (rtx x, rtx insn)
1075{
1076  enum rtx_code code = GET_CODE (x);
1077  const char *fmt;
1078  int i, j;
1079
1080  switch (code)
1081    {
1082    case CONST_INT:
1083    case CONST_DOUBLE:
1084    case CONST_VECTOR:
1085    case CONST:
1086    case SYMBOL_REF:
1087    case LABEL_REF:
1088      return 0;
1089
1090    case PC:
1091    case CC0:
1092      return 1;
1093
1094    case MEM:
1095      if (RTX_UNCHANGING_P (x))
1096	return 0;
1097      if (modified_in_p (XEXP (x, 0), insn))
1098	return 1;
1099      if (memory_modified_in_insn_p (x, insn))
1100	return 1;
1101      return 0;
1102      break;
1103
1104    case REG:
1105      return reg_set_p (x, insn);
1106
1107    default:
1108      break;
1109    }
1110
1111  fmt = GET_RTX_FORMAT (code);
1112  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1113    {
1114      if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1115	return 1;
1116
1117      else if (fmt[i] == 'E')
1118	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1119	  if (modified_in_p (XVECEXP (x, i, j), insn))
1120	    return 1;
1121    }
1122
1123  return 0;
1124}
1125
1126/* Return true if anything in insn X is (anti,output,true) dependent on
1127   anything in insn Y.  */
1128
1129int
1130insn_dependent_p (rtx x, rtx y)
1131{
1132  rtx tmp;
1133
1134  if (! INSN_P (x) || ! INSN_P (y))
1135    abort ();
1136
1137  tmp = PATTERN (y);
1138  note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1139  if (tmp == NULL_RTX)
1140    return 1;
1141
1142  tmp = PATTERN (x);
1143  note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1144  if (tmp == NULL_RTX)
1145    return 1;
1146
1147  return 0;
1148}
1149
1150/* A helper routine for insn_dependent_p called through note_stores.  */
1151
1152static void
1153insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1154{
1155  rtx * pinsn = (rtx *) data;
1156
1157  if (*pinsn && reg_mentioned_p (x, *pinsn))
1158    *pinsn = NULL_RTX;
1159}
1160
1161/* Helper function for set_of.  */
1162struct set_of_data
1163  {
1164    rtx found;
1165    rtx pat;
1166  };
1167
1168static void
1169set_of_1 (rtx x, rtx pat, void *data1)
1170{
1171   struct set_of_data *data = (struct set_of_data *) (data1);
1172   if (rtx_equal_p (x, data->pat)
1173       || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1174     data->found = pat;
1175}
1176
1177/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1178   (either directly or via STRICT_LOW_PART and similar modifiers).  */
1179rtx
1180set_of (rtx pat, rtx insn)
1181{
1182  struct set_of_data data;
1183  data.found = NULL_RTX;
1184  data.pat = pat;
1185  note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1186  return data.found;
1187}
1188
1189/* Given an INSN, return a SET expression if this insn has only a single SET.
1190   It may also have CLOBBERs, USEs, or SET whose output
1191   will not be used, which we ignore.  */
1192
1193rtx
1194single_set_2 (rtx insn, rtx pat)
1195{
1196  rtx set = NULL;
1197  int set_verified = 1;
1198  int i;
1199
1200  if (GET_CODE (pat) == PARALLEL)
1201    {
1202      for (i = 0; i < XVECLEN (pat, 0); i++)
1203	{
1204	  rtx sub = XVECEXP (pat, 0, i);
1205	  switch (GET_CODE (sub))
1206	    {
1207	    case USE:
1208	    case CLOBBER:
1209	      break;
1210
1211	    case SET:
1212	      /* We can consider insns having multiple sets, where all
1213		 but one are dead as single set insns.  In common case
1214		 only single set is present in the pattern so we want
1215		 to avoid checking for REG_UNUSED notes unless necessary.
1216
1217		 When we reach set first time, we just expect this is
1218		 the single set we are looking for and only when more
1219		 sets are found in the insn, we check them.  */
1220	      if (!set_verified)
1221		{
1222		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1223		      && !side_effects_p (set))
1224		    set = NULL;
1225		  else
1226		    set_verified = 1;
1227		}
1228	      if (!set)
1229		set = sub, set_verified = 0;
1230	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1231		       || side_effects_p (sub))
1232		return NULL_RTX;
1233	      break;
1234
1235	    default:
1236	      return NULL_RTX;
1237	    }
1238	}
1239    }
1240  return set;
1241}
1242
1243/* Given an INSN, return nonzero if it has more than one SET, else return
1244   zero.  */
1245
1246int
1247multiple_sets (rtx insn)
1248{
1249  int found;
1250  int i;
1251
1252  /* INSN must be an insn.  */
1253  if (! INSN_P (insn))
1254    return 0;
1255
1256  /* Only a PARALLEL can have multiple SETs.  */
1257  if (GET_CODE (PATTERN (insn)) == PARALLEL)
1258    {
1259      for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1260	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1261	  {
1262	    /* If we have already found a SET, then return now.  */
1263	    if (found)
1264	      return 1;
1265	    else
1266	      found = 1;
1267	  }
1268    }
1269
1270  /* Either zero or one SET.  */
1271  return 0;
1272}
1273
1274/* Return nonzero if the destination of SET equals the source
1275   and there are no side effects.  */
1276
1277int
1278set_noop_p (rtx set)
1279{
1280  rtx src = SET_SRC (set);
1281  rtx dst = SET_DEST (set);
1282
1283  if (dst == pc_rtx && src == pc_rtx)
1284    return 1;
1285
1286  if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1287    return rtx_equal_p (dst, src) && !side_effects_p (dst);
1288
1289  if (GET_CODE (dst) == SIGN_EXTRACT
1290      || GET_CODE (dst) == ZERO_EXTRACT)
1291    return rtx_equal_p (XEXP (dst, 0), src)
1292	   && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1293	   && !side_effects_p (src);
1294
1295  if (GET_CODE (dst) == STRICT_LOW_PART)
1296    dst = XEXP (dst, 0);
1297
1298  if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1299    {
1300      if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1301	return 0;
1302      src = SUBREG_REG (src);
1303      dst = SUBREG_REG (dst);
1304    }
1305
1306  return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1307	  && REGNO (src) == REGNO (dst));
1308}
1309
1310/* Return nonzero if an insn consists only of SETs, each of which only sets a
1311   value to itself.  */
1312
1313int
1314noop_move_p (rtx insn)
1315{
1316  rtx pat = PATTERN (insn);
1317
1318  if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1319    return 1;
1320
1321  /* Insns carrying these notes are useful later on.  */
1322  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1323    return 0;
1324
1325  /* For now treat an insn with a REG_RETVAL note as a
1326     a special insn which should not be considered a no-op.  */
1327  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1328    return 0;
1329
1330  if (GET_CODE (pat) == SET && set_noop_p (pat))
1331    return 1;
1332
1333  if (GET_CODE (pat) == PARALLEL)
1334    {
1335      int i;
1336      /* If nothing but SETs of registers to themselves,
1337	 this insn can also be deleted.  */
1338      for (i = 0; i < XVECLEN (pat, 0); i++)
1339	{
1340	  rtx tem = XVECEXP (pat, 0, i);
1341
1342	  if (GET_CODE (tem) == USE
1343	      || GET_CODE (tem) == CLOBBER)
1344	    continue;
1345
1346	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1347	    return 0;
1348	}
1349
1350      return 1;
1351    }
1352  return 0;
1353}
1354
1355
1356/* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1357   is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1358   If the object was modified, if we hit a partial assignment to X, or hit a
1359   CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1360   point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1361   be the src.  */
1362
1363rtx
1364find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1365{
1366  rtx p;
1367
1368  for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1369       p = PREV_INSN (p))
1370    if (INSN_P (p))
1371      {
1372	rtx set = single_set (p);
1373	rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1374
1375	if (set && rtx_equal_p (x, SET_DEST (set)))
1376	  {
1377	    rtx src = SET_SRC (set);
1378
1379	    if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1380	      src = XEXP (note, 0);
1381
1382	    if ((valid_to == NULL_RTX
1383		 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1384		/* Reject hard registers because we don't usually want
1385		   to use them; we'd rather use a pseudo.  */
1386		&& (! (GET_CODE (src) == REG
1387		      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1388	      {
1389		*pinsn = p;
1390		return src;
1391	      }
1392	  }
1393
1394	/* If set in non-simple way, we don't have a value.  */
1395	if (reg_set_p (x, p))
1396	  break;
1397      }
1398
1399  return x;
1400}
1401
1402/* Return nonzero if register in range [REGNO, ENDREGNO)
1403   appears either explicitly or implicitly in X
1404   other than being stored into.
1405
1406   References contained within the substructure at LOC do not count.
1407   LOC may be zero, meaning don't ignore anything.  */
1408
1409int
1410refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1411		   rtx *loc)
1412{
1413  int i;
1414  unsigned int x_regno;
1415  RTX_CODE code;
1416  const char *fmt;
1417
1418 repeat:
1419  /* The contents of a REG_NONNEG note is always zero, so we must come here
1420     upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1421  if (x == 0)
1422    return 0;
1423
1424  code = GET_CODE (x);
1425
1426  switch (code)
1427    {
1428    case REG:
1429      x_regno = REGNO (x);
1430
1431      /* If we modifying the stack, frame, or argument pointer, it will
1432	 clobber a virtual register.  In fact, we could be more precise,
1433	 but it isn't worth it.  */
1434      if ((x_regno == STACK_POINTER_REGNUM
1435#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1436	   || x_regno == ARG_POINTER_REGNUM
1437#endif
1438	   || x_regno == FRAME_POINTER_REGNUM)
1439	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1440	return 1;
1441
1442      return (endregno > x_regno
1443	      && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1444				    ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1445			      : 1));
1446
1447    case SUBREG:
1448      /* If this is a SUBREG of a hard reg, we can see exactly which
1449	 registers are being modified.  Otherwise, handle normally.  */
1450      if (GET_CODE (SUBREG_REG (x)) == REG
1451	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1452	{
1453	  unsigned int inner_regno = subreg_regno (x);
1454	  unsigned int inner_endregno
1455	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1456			     ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1457
1458	  return endregno > inner_regno && regno < inner_endregno;
1459	}
1460      break;
1461
1462    case CLOBBER:
1463    case SET:
1464      if (&SET_DEST (x) != loc
1465	  /* Note setting a SUBREG counts as referring to the REG it is in for
1466	     a pseudo but not for hard registers since we can
1467	     treat each word individually.  */
1468	  && ((GET_CODE (SET_DEST (x)) == SUBREG
1469	       && loc != &SUBREG_REG (SET_DEST (x))
1470	       && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1471	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1472	       && refers_to_regno_p (regno, endregno,
1473				     SUBREG_REG (SET_DEST (x)), loc))
1474	      || (GET_CODE (SET_DEST (x)) != REG
1475		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1476	return 1;
1477
1478      if (code == CLOBBER || loc == &SET_SRC (x))
1479	return 0;
1480      x = SET_SRC (x);
1481      goto repeat;
1482
1483    default:
1484      break;
1485    }
1486
1487  /* X does not match, so try its subexpressions.  */
1488
1489  fmt = GET_RTX_FORMAT (code);
1490  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1491    {
1492      if (fmt[i] == 'e' && loc != &XEXP (x, i))
1493	{
1494	  if (i == 0)
1495	    {
1496	      x = XEXP (x, 0);
1497	      goto repeat;
1498	    }
1499	  else
1500	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1501	      return 1;
1502	}
1503      else if (fmt[i] == 'E')
1504	{
1505	  int j;
1506	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1507	    if (loc != &XVECEXP (x, i, j)
1508		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1509	      return 1;
1510	}
1511    }
1512  return 0;
1513}
1514
1515/* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1516   we check if any register number in X conflicts with the relevant register
1517   numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1518   contains a MEM (we don't bother checking for memory addresses that can't
1519   conflict because we expect this to be a rare case.  */
1520
1521int
1522reg_overlap_mentioned_p (rtx x, rtx in)
1523{
1524  unsigned int regno, endregno;
1525
1526  /* Overly conservative.  */
1527  if (GET_CODE (x) == STRICT_LOW_PART
1528      || GET_CODE (x) == ZERO_EXTRACT
1529      || GET_CODE (x) == SIGN_EXTRACT)
1530    x = XEXP (x, 0);
1531
1532  /* If either argument is a constant, then modifying X can not affect IN.  */
1533  if (CONSTANT_P (x) || CONSTANT_P (in))
1534    return 0;
1535
1536  switch (GET_CODE (x))
1537    {
1538    case SUBREG:
1539      regno = REGNO (SUBREG_REG (x));
1540      if (regno < FIRST_PSEUDO_REGISTER)
1541	regno = subreg_regno (x);
1542      goto do_reg;
1543
1544    case REG:
1545      regno = REGNO (x);
1546    do_reg:
1547      endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1548			  ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1549      return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1550
1551    case MEM:
1552      {
1553	const char *fmt;
1554	int i;
1555
1556	if (GET_CODE (in) == MEM)
1557	  return 1;
1558
1559	fmt = GET_RTX_FORMAT (GET_CODE (in));
1560	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1561	  if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1562	    return 1;
1563
1564	return 0;
1565      }
1566
1567    case SCRATCH:
1568    case PC:
1569    case CC0:
1570      return reg_mentioned_p (x, in);
1571
1572    case PARALLEL:
1573      {
1574	int i;
1575
1576	/* If any register in here refers to it we return true.  */
1577	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1578	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
1579	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1580	      return 1;
1581	return 0;
1582      }
1583
1584    default:
1585      break;
1586    }
1587
1588  abort ();
1589}
1590
1591/* Return the last value to which REG was set prior to INSN.  If we can't
1592   find it easily, return 0.
1593
1594   We only return a REG, SUBREG, or constant because it is too hard to
1595   check if a MEM remains unchanged.  */
1596
1597rtx
1598reg_set_last (rtx x, rtx insn)
1599{
1600  rtx orig_insn = insn;
1601
1602  /* Scan backwards until reg_set_last_1 changed one of the above flags.
1603     Stop when we reach a label or X is a hard reg and we reach a
1604     CALL_INSN (if reg_set_last_last_regno is a hard reg).
1605
1606     If we find a set of X, ensure that its SET_SRC remains unchanged.  */
1607
1608  /* We compare with <= here, because reg_set_last_last_regno
1609     is actually the number of the first reg *not* in X.  */
1610  for (;
1611       insn && GET_CODE (insn) != CODE_LABEL
1612       && ! (GET_CODE (insn) == CALL_INSN
1613	     && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1614       insn = PREV_INSN (insn))
1615    if (INSN_P (insn))
1616      {
1617	rtx set = set_of (x, insn);
1618	/* OK, this function modify our register.  See if we understand it.  */
1619	if (set)
1620	  {
1621	    rtx last_value;
1622	    if (GET_CODE (set) != SET || SET_DEST (set) != x)
1623	      return 0;
1624	    last_value = SET_SRC (x);
1625	    if (CONSTANT_P (last_value)
1626		|| ((GET_CODE (last_value) == REG
1627		     || GET_CODE (last_value) == SUBREG)
1628		    && ! reg_set_between_p (last_value,
1629					    insn, orig_insn)))
1630	      return last_value;
1631	    else
1632	      return 0;
1633	  }
1634      }
1635
1636  return 0;
1637}
1638
1639/* Call FUN on each register or MEM that is stored into or clobbered by X.
1640   (X would be the pattern of an insn).
1641   FUN receives two arguments:
1642     the REG, MEM, CC0 or PC being stored in or clobbered,
1643     the SET or CLOBBER rtx that does the store.
1644
1645  If the item being stored in or clobbered is a SUBREG of a hard register,
1646  the SUBREG will be passed.  */
1647
1648void
1649note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1650{
1651  int i;
1652
1653  if (GET_CODE (x) == COND_EXEC)
1654    x = COND_EXEC_CODE (x);
1655
1656  if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1657    {
1658      rtx dest = SET_DEST (x);
1659
1660      while ((GET_CODE (dest) == SUBREG
1661	      && (GET_CODE (SUBREG_REG (dest)) != REG
1662		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1663	     || GET_CODE (dest) == ZERO_EXTRACT
1664	     || GET_CODE (dest) == SIGN_EXTRACT
1665	     || GET_CODE (dest) == STRICT_LOW_PART)
1666	dest = XEXP (dest, 0);
1667
1668      /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1669	 each of whose first operand is a register.  */
1670      if (GET_CODE (dest) == PARALLEL)
1671	{
1672	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1673	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1674	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1675	}
1676      else
1677	(*fun) (dest, x, data);
1678    }
1679
1680  else if (GET_CODE (x) == PARALLEL)
1681    for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1682      note_stores (XVECEXP (x, 0, i), fun, data);
1683}
1684
1685/* Like notes_stores, but call FUN for each expression that is being
1686   referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1687   FUN for each expression, not any interior subexpressions.  FUN receives a
1688   pointer to the expression and the DATA passed to this function.
1689
1690   Note that this is not quite the same test as that done in reg_referenced_p
1691   since that considers something as being referenced if it is being
1692   partially set, while we do not.  */
1693
1694void
1695note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1696{
1697  rtx body = *pbody;
1698  int i;
1699
1700  switch (GET_CODE (body))
1701    {
1702    case COND_EXEC:
1703      (*fun) (&COND_EXEC_TEST (body), data);
1704      note_uses (&COND_EXEC_CODE (body), fun, data);
1705      return;
1706
1707    case PARALLEL:
1708      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1709	note_uses (&XVECEXP (body, 0, i), fun, data);
1710      return;
1711
1712    case USE:
1713      (*fun) (&XEXP (body, 0), data);
1714      return;
1715
1716    case ASM_OPERANDS:
1717      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1718	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1719      return;
1720
1721    case TRAP_IF:
1722      (*fun) (&TRAP_CONDITION (body), data);
1723      return;
1724
1725    case PREFETCH:
1726      (*fun) (&XEXP (body, 0), data);
1727      return;
1728
1729    case UNSPEC:
1730    case UNSPEC_VOLATILE:
1731      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1732	(*fun) (&XVECEXP (body, 0, i), data);
1733      return;
1734
1735    case CLOBBER:
1736      if (GET_CODE (XEXP (body, 0)) == MEM)
1737	(*fun) (&XEXP (XEXP (body, 0), 0), data);
1738      return;
1739
1740    case SET:
1741      {
1742	rtx dest = SET_DEST (body);
1743
1744	/* For sets we replace everything in source plus registers in memory
1745	   expression in store and operands of a ZERO_EXTRACT.  */
1746	(*fun) (&SET_SRC (body), data);
1747
1748	if (GET_CODE (dest) == ZERO_EXTRACT)
1749	  {
1750	    (*fun) (&XEXP (dest, 1), data);
1751	    (*fun) (&XEXP (dest, 2), data);
1752	  }
1753
1754	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1755	  dest = XEXP (dest, 0);
1756
1757	if (GET_CODE (dest) == MEM)
1758	  (*fun) (&XEXP (dest, 0), data);
1759      }
1760      return;
1761
1762    default:
1763      /* All the other possibilities never store.  */
1764      (*fun) (pbody, data);
1765      return;
1766    }
1767}
1768
1769/* Return nonzero if X's old contents don't survive after INSN.
1770   This will be true if X is (cc0) or if X is a register and
1771   X dies in INSN or because INSN entirely sets X.
1772
1773   "Entirely set" means set directly and not through a SUBREG,
1774   ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1775   Likewise, REG_INC does not count.
1776
1777   REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1778   but for this use that makes no difference, since regs don't overlap
1779   during their lifetimes.  Therefore, this function may be used
1780   at any time after deaths have been computed (in flow.c).
1781
1782   If REG is a hard reg that occupies multiple machine registers, this
1783   function will only return 1 if each of those registers will be replaced
1784   by INSN.  */
1785
1786int
1787dead_or_set_p (rtx insn, rtx x)
1788{
1789  unsigned int regno, last_regno;
1790  unsigned int i;
1791
1792  /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1793  if (GET_CODE (x) == CC0)
1794    return 1;
1795
1796  if (GET_CODE (x) != REG)
1797    abort ();
1798
1799  regno = REGNO (x);
1800  last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1801		: regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1802
1803  for (i = regno; i <= last_regno; i++)
1804    if (! dead_or_set_regno_p (insn, i))
1805      return 0;
1806
1807  return 1;
1808}
1809
1810/* Utility function for dead_or_set_p to check an individual register.  Also
1811   called from flow.c.  */
1812
1813int
1814dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1815{
1816  unsigned int regno, endregno;
1817  rtx pattern;
1818
1819  /* See if there is a death note for something that includes TEST_REGNO.  */
1820  if (find_regno_note (insn, REG_DEAD, test_regno))
1821    return 1;
1822
1823  if (GET_CODE (insn) == CALL_INSN
1824      && find_regno_fusage (insn, CLOBBER, test_regno))
1825    return 1;
1826
1827  pattern = PATTERN (insn);
1828
1829  if (GET_CODE (pattern) == COND_EXEC)
1830    pattern = COND_EXEC_CODE (pattern);
1831
1832  if (GET_CODE (pattern) == SET)
1833    {
1834      rtx dest = SET_DEST (pattern);
1835
1836      /* A value is totally replaced if it is the destination or the
1837	 destination is a SUBREG of REGNO that does not change the number of
1838	 words in it.  */
1839      if (GET_CODE (dest) == SUBREG
1840	  && (((GET_MODE_SIZE (GET_MODE (dest))
1841		+ UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1842	      == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1843		   + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1844	dest = SUBREG_REG (dest);
1845
1846      if (GET_CODE (dest) != REG)
1847	return 0;
1848
1849      regno = REGNO (dest);
1850      endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1851		  : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1852
1853      return (test_regno >= regno && test_regno < endregno);
1854    }
1855  else if (GET_CODE (pattern) == PARALLEL)
1856    {
1857      int i;
1858
1859      for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1860	{
1861	  rtx body = XVECEXP (pattern, 0, i);
1862
1863	  if (GET_CODE (body) == COND_EXEC)
1864	    body = COND_EXEC_CODE (body);
1865
1866	  if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1867	    {
1868	      rtx dest = SET_DEST (body);
1869
1870	      if (GET_CODE (dest) == SUBREG
1871		  && (((GET_MODE_SIZE (GET_MODE (dest))
1872			+ UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1873		      == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1874			   + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1875		dest = SUBREG_REG (dest);
1876
1877	      if (GET_CODE (dest) != REG)
1878		continue;
1879
1880	      regno = REGNO (dest);
1881	      endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1882			  : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1883
1884	      if (test_regno >= regno && test_regno < endregno)
1885		return 1;
1886	    }
1887	}
1888    }
1889
1890  return 0;
1891}
1892
1893/* Return the reg-note of kind KIND in insn INSN, if there is one.
1894   If DATUM is nonzero, look for one whose datum is DATUM.  */
1895
1896rtx
1897find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1898{
1899  rtx link;
1900
1901  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1902  if (! INSN_P (insn))
1903    return 0;
1904
1905  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1906    if (REG_NOTE_KIND (link) == kind
1907	&& (datum == 0 || datum == XEXP (link, 0)))
1908      return link;
1909  return 0;
1910}
1911
1912/* Return the reg-note of kind KIND in insn INSN which applies to register
1913   number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1914   the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1915   it might be the case that the note overlaps REGNO.  */
1916
1917rtx
1918find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1919{
1920  rtx link;
1921
1922  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1923  if (! INSN_P (insn))
1924    return 0;
1925
1926  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1927    if (REG_NOTE_KIND (link) == kind
1928	/* Verify that it is a register, so that scratch and MEM won't cause a
1929	   problem here.  */
1930	&& GET_CODE (XEXP (link, 0)) == REG
1931	&& REGNO (XEXP (link, 0)) <= regno
1932	&& ((REGNO (XEXP (link, 0))
1933	     + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1934		: HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1935				    GET_MODE (XEXP (link, 0)))))
1936	    > regno))
1937      return link;
1938  return 0;
1939}
1940
1941/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1942   has such a note.  */
1943
1944rtx
1945find_reg_equal_equiv_note (rtx insn)
1946{
1947  rtx link;
1948
1949  if (!INSN_P (insn))
1950    return 0;
1951  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1952    if (REG_NOTE_KIND (link) == REG_EQUAL
1953	|| REG_NOTE_KIND (link) == REG_EQUIV)
1954      {
1955	if (single_set (insn) == 0)
1956	  return 0;
1957	return link;
1958      }
1959  return NULL;
1960}
1961
1962/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1963   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1964
1965int
1966find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1967{
1968  /* If it's not a CALL_INSN, it can't possibly have a
1969     CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1970  if (GET_CODE (insn) != CALL_INSN)
1971    return 0;
1972
1973  if (! datum)
1974    abort ();
1975
1976  if (GET_CODE (datum) != REG)
1977    {
1978      rtx link;
1979
1980      for (link = CALL_INSN_FUNCTION_USAGE (insn);
1981	   link;
1982	   link = XEXP (link, 1))
1983	if (GET_CODE (XEXP (link, 0)) == code
1984	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1985	  return 1;
1986    }
1987  else
1988    {
1989      unsigned int regno = REGNO (datum);
1990
1991      /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1992	 to pseudo registers, so don't bother checking.  */
1993
1994      if (regno < FIRST_PSEUDO_REGISTER)
1995	{
1996	  unsigned int end_regno
1997	    = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1998	  unsigned int i;
1999
2000	  for (i = regno; i < end_regno; i++)
2001	    if (find_regno_fusage (insn, code, i))
2002	      return 1;
2003	}
2004    }
2005
2006  return 0;
2007}
2008
2009/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2010   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
2011
2012int
2013find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
2014{
2015  rtx link;
2016
2017  /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2018     to pseudo registers, so don't bother checking.  */
2019
2020  if (regno >= FIRST_PSEUDO_REGISTER
2021      || GET_CODE (insn) != CALL_INSN )
2022    return 0;
2023
2024  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2025    {
2026      unsigned int regnote;
2027      rtx op, reg;
2028
2029      if (GET_CODE (op = XEXP (link, 0)) == code
2030	  && GET_CODE (reg = XEXP (op, 0)) == REG
2031	  && (regnote = REGNO (reg)) <= regno
2032	  && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2033	return 1;
2034    }
2035
2036  return 0;
2037}
2038
2039/* Return true if INSN is a call to a pure function.  */
2040
2041int
2042pure_call_p (rtx insn)
2043{
2044  rtx link;
2045
2046  if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2047    return 0;
2048
2049  /* Look for the note that differentiates const and pure functions.  */
2050  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2051    {
2052      rtx u, m;
2053
2054      if (GET_CODE (u = XEXP (link, 0)) == USE
2055	  && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2056	  && GET_CODE (XEXP (m, 0)) == SCRATCH)
2057	return 1;
2058    }
2059
2060  return 0;
2061}
2062
2063/* Remove register note NOTE from the REG_NOTES of INSN.  */
2064
2065void
2066remove_note (rtx insn, rtx note)
2067{
2068  rtx link;
2069
2070  if (note == NULL_RTX)
2071    return;
2072
2073  if (REG_NOTES (insn) == note)
2074    {
2075      REG_NOTES (insn) = XEXP (note, 1);
2076      return;
2077    }
2078
2079  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2080    if (XEXP (link, 1) == note)
2081      {
2082	XEXP (link, 1) = XEXP (note, 1);
2083	return;
2084      }
2085
2086  abort ();
2087}
2088
2089/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2090   return 1 if it is found.  A simple equality test is used to determine if
2091   NODE matches.  */
2092
2093int
2094in_expr_list_p (rtx listp, rtx node)
2095{
2096  rtx x;
2097
2098  for (x = listp; x; x = XEXP (x, 1))
2099    if (node == XEXP (x, 0))
2100      return 1;
2101
2102  return 0;
2103}
2104
2105/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2106   remove that entry from the list if it is found.
2107
2108   A simple equality test is used to determine if NODE matches.  */
2109
2110void
2111remove_node_from_expr_list (rtx node, rtx *listp)
2112{
2113  rtx temp = *listp;
2114  rtx prev = NULL_RTX;
2115
2116  while (temp)
2117    {
2118      if (node == XEXP (temp, 0))
2119	{
2120	  /* Splice the node out of the list.  */
2121	  if (prev)
2122	    XEXP (prev, 1) = XEXP (temp, 1);
2123	  else
2124	    *listp = XEXP (temp, 1);
2125
2126	  return;
2127	}
2128
2129      prev = temp;
2130      temp = XEXP (temp, 1);
2131    }
2132}
2133
2134/* Nonzero if X contains any volatile instructions.  These are instructions
2135   which may cause unpredictable machine state instructions, and thus no
2136   instructions should be moved or combined across them.  This includes
2137   only volatile asms and UNSPEC_VOLATILE instructions.  */
2138
2139int
2140volatile_insn_p (rtx x)
2141{
2142  RTX_CODE code;
2143
2144  code = GET_CODE (x);
2145  switch (code)
2146    {
2147    case LABEL_REF:
2148    case SYMBOL_REF:
2149    case CONST_INT:
2150    case CONST:
2151    case CONST_DOUBLE:
2152    case CONST_VECTOR:
2153    case CC0:
2154    case PC:
2155    case REG:
2156    case SCRATCH:
2157    case CLOBBER:
2158    case ADDR_VEC:
2159    case ADDR_DIFF_VEC:
2160    case CALL:
2161    case MEM:
2162      return 0;
2163
2164    case UNSPEC_VOLATILE:
2165 /* case TRAP_IF: This isn't clear yet.  */
2166      return 1;
2167
2168    case ASM_INPUT:
2169    case ASM_OPERANDS:
2170      if (MEM_VOLATILE_P (x))
2171	return 1;
2172
2173    default:
2174      break;
2175    }
2176
2177  /* Recursively scan the operands of this expression.  */
2178
2179  {
2180    const char *fmt = GET_RTX_FORMAT (code);
2181    int i;
2182
2183    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2184      {
2185	if (fmt[i] == 'e')
2186	  {
2187	    if (volatile_insn_p (XEXP (x, i)))
2188	      return 1;
2189	  }
2190	else if (fmt[i] == 'E')
2191	  {
2192	    int j;
2193	    for (j = 0; j < XVECLEN (x, i); j++)
2194	      if (volatile_insn_p (XVECEXP (x, i, j)))
2195		return 1;
2196	  }
2197      }
2198  }
2199  return 0;
2200}
2201
2202/* Nonzero if X contains any volatile memory references
2203   UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2204
2205int
2206volatile_refs_p (rtx x)
2207{
2208  RTX_CODE code;
2209
2210  code = GET_CODE (x);
2211  switch (code)
2212    {
2213    case LABEL_REF:
2214    case SYMBOL_REF:
2215    case CONST_INT:
2216    case CONST:
2217    case CONST_DOUBLE:
2218    case CONST_VECTOR:
2219    case CC0:
2220    case PC:
2221    case REG:
2222    case SCRATCH:
2223    case CLOBBER:
2224    case ADDR_VEC:
2225    case ADDR_DIFF_VEC:
2226      return 0;
2227
2228    case UNSPEC_VOLATILE:
2229      return 1;
2230
2231    case MEM:
2232    case ASM_INPUT:
2233    case ASM_OPERANDS:
2234      if (MEM_VOLATILE_P (x))
2235	return 1;
2236
2237    default:
2238      break;
2239    }
2240
2241  /* Recursively scan the operands of this expression.  */
2242
2243  {
2244    const char *fmt = GET_RTX_FORMAT (code);
2245    int i;
2246
2247    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2248      {
2249	if (fmt[i] == 'e')
2250	  {
2251	    if (volatile_refs_p (XEXP (x, i)))
2252	      return 1;
2253	  }
2254	else if (fmt[i] == 'E')
2255	  {
2256	    int j;
2257	    for (j = 0; j < XVECLEN (x, i); j++)
2258	      if (volatile_refs_p (XVECEXP (x, i, j)))
2259		return 1;
2260	  }
2261      }
2262  }
2263  return 0;
2264}
2265
2266/* Similar to above, except that it also rejects register pre- and post-
2267   incrementing.  */
2268
2269int
2270side_effects_p (rtx x)
2271{
2272  RTX_CODE code;
2273
2274  code = GET_CODE (x);
2275  switch (code)
2276    {
2277    case LABEL_REF:
2278    case SYMBOL_REF:
2279    case CONST_INT:
2280    case CONST:
2281    case CONST_DOUBLE:
2282    case CONST_VECTOR:
2283    case CC0:
2284    case PC:
2285    case REG:
2286    case SCRATCH:
2287    case ADDR_VEC:
2288    case ADDR_DIFF_VEC:
2289      return 0;
2290
2291    case CLOBBER:
2292      /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2293	 when some combination can't be done.  If we see one, don't think
2294	 that we can simplify the expression.  */
2295      return (GET_MODE (x) != VOIDmode);
2296
2297    case PRE_INC:
2298    case PRE_DEC:
2299    case POST_INC:
2300    case POST_DEC:
2301    case PRE_MODIFY:
2302    case POST_MODIFY:
2303    case CALL:
2304    case UNSPEC_VOLATILE:
2305 /* case TRAP_IF: This isn't clear yet.  */
2306      return 1;
2307
2308    case MEM:
2309    case ASM_INPUT:
2310    case ASM_OPERANDS:
2311      if (MEM_VOLATILE_P (x))
2312	return 1;
2313
2314    default:
2315      break;
2316    }
2317
2318  /* Recursively scan the operands of this expression.  */
2319
2320  {
2321    const char *fmt = GET_RTX_FORMAT (code);
2322    int i;
2323
2324    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2325      {
2326	if (fmt[i] == 'e')
2327	  {
2328	    if (side_effects_p (XEXP (x, i)))
2329	      return 1;
2330	  }
2331	else if (fmt[i] == 'E')
2332	  {
2333	    int j;
2334	    for (j = 0; j < XVECLEN (x, i); j++)
2335	      if (side_effects_p (XVECEXP (x, i, j)))
2336		return 1;
2337	  }
2338      }
2339  }
2340  return 0;
2341}
2342
2343/* Return nonzero if evaluating rtx X might cause a trap.  */
2344
2345int
2346may_trap_p (rtx x)
2347{
2348  int i;
2349  enum rtx_code code;
2350  const char *fmt;
2351
2352  if (x == 0)
2353    return 0;
2354  code = GET_CODE (x);
2355  switch (code)
2356    {
2357      /* Handle these cases quickly.  */
2358    case CONST_INT:
2359    case CONST_DOUBLE:
2360    case CONST_VECTOR:
2361    case SYMBOL_REF:
2362    case LABEL_REF:
2363    case CONST:
2364    case PC:
2365    case CC0:
2366    case REG:
2367    case SCRATCH:
2368      return 0;
2369
2370    case ASM_INPUT:
2371    case UNSPEC_VOLATILE:
2372    case TRAP_IF:
2373      return 1;
2374
2375    case ASM_OPERANDS:
2376      return MEM_VOLATILE_P (x);
2377
2378      /* Memory ref can trap unless it's a static var or a stack slot.  */
2379    case MEM:
2380      if (MEM_NOTRAP_P (x))
2381	return 0;
2382      return rtx_addr_can_trap_p (XEXP (x, 0));
2383
2384      /* Division by a non-constant might trap.  */
2385    case DIV:
2386    case MOD:
2387    case UDIV:
2388    case UMOD:
2389      if (HONOR_SNANS (GET_MODE (x)))
2390	return 1;
2391      if (! CONSTANT_P (XEXP (x, 1))
2392	  || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2393	      && flag_trapping_math))
2394	return 1;
2395      if (XEXP (x, 1) == const0_rtx)
2396	return 1;
2397      break;
2398
2399    case EXPR_LIST:
2400      /* An EXPR_LIST is used to represent a function call.  This
2401	 certainly may trap.  */
2402      return 1;
2403
2404    case GE:
2405    case GT:
2406    case LE:
2407    case LT:
2408    case COMPARE:
2409      /* Some floating point comparisons may trap.  */
2410      if (!flag_trapping_math)
2411	break;
2412      /* ??? There is no machine independent way to check for tests that trap
2413	 when COMPARE is used, though many targets do make this distinction.
2414	 For instance, sparc uses CCFPE for compares which generate exceptions
2415	 and CCFP for compares which do not generate exceptions.  */
2416      if (HONOR_NANS (GET_MODE (x)))
2417	return 1;
2418      /* But often the compare has some CC mode, so check operand
2419	 modes as well.  */
2420      if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2421	  || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2422	return 1;
2423      break;
2424
2425    case EQ:
2426    case NE:
2427      if (HONOR_SNANS (GET_MODE (x)))
2428	return 1;
2429      /* Often comparison is CC mode, so check operand modes.  */
2430      if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2431	  || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2432	return 1;
2433      break;
2434
2435    case FIX:
2436      /* Conversion of floating point might trap.  */
2437      if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2438	return 1;
2439      break;
2440
2441    case NEG:
2442    case ABS:
2443      /* These operations don't trap even with floating point.  */
2444      break;
2445
2446    default:
2447      /* Any floating arithmetic may trap.  */
2448      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2449	  && flag_trapping_math)
2450	return 1;
2451    }
2452
2453  fmt = GET_RTX_FORMAT (code);
2454  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2455    {
2456      if (fmt[i] == 'e')
2457	{
2458	  if (may_trap_p (XEXP (x, i)))
2459	    return 1;
2460	}
2461      else if (fmt[i] == 'E')
2462	{
2463	  int j;
2464	  for (j = 0; j < XVECLEN (x, i); j++)
2465	    if (may_trap_p (XVECEXP (x, i, j)))
2466	      return 1;
2467	}
2468    }
2469  return 0;
2470}
2471
2472/* Return nonzero if X contains a comparison that is not either EQ or NE,
2473   i.e., an inequality.  */
2474
2475int
2476inequality_comparisons_p (rtx x)
2477{
2478  const char *fmt;
2479  int len, i;
2480  enum rtx_code code = GET_CODE (x);
2481
2482  switch (code)
2483    {
2484    case REG:
2485    case SCRATCH:
2486    case PC:
2487    case CC0:
2488    case CONST_INT:
2489    case CONST_DOUBLE:
2490    case CONST_VECTOR:
2491    case CONST:
2492    case LABEL_REF:
2493    case SYMBOL_REF:
2494      return 0;
2495
2496    case LT:
2497    case LTU:
2498    case GT:
2499    case GTU:
2500    case LE:
2501    case LEU:
2502    case GE:
2503    case GEU:
2504      return 1;
2505
2506    default:
2507      break;
2508    }
2509
2510  len = GET_RTX_LENGTH (code);
2511  fmt = GET_RTX_FORMAT (code);
2512
2513  for (i = 0; i < len; i++)
2514    {
2515      if (fmt[i] == 'e')
2516	{
2517	  if (inequality_comparisons_p (XEXP (x, i)))
2518	    return 1;
2519	}
2520      else if (fmt[i] == 'E')
2521	{
2522	  int j;
2523	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2524	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
2525	      return 1;
2526	}
2527    }
2528
2529  return 0;
2530}
2531
2532/* Replace any occurrence of FROM in X with TO.  The function does
2533   not enter into CONST_DOUBLE for the replace.
2534
2535   Note that copying is not done so X must not be shared unless all copies
2536   are to be modified.  */
2537
2538rtx
2539replace_rtx (rtx x, rtx from, rtx to)
2540{
2541  int i, j;
2542  const char *fmt;
2543
2544  /* The following prevents loops occurrence when we change MEM in
2545     CONST_DOUBLE onto the same CONST_DOUBLE.  */
2546  if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2547    return x;
2548
2549  if (x == from)
2550    return to;
2551
2552  /* Allow this function to make replacements in EXPR_LISTs.  */
2553  if (x == 0)
2554    return 0;
2555
2556  if (GET_CODE (x) == SUBREG)
2557    {
2558      rtx new = replace_rtx (SUBREG_REG (x), from, to);
2559
2560      if (GET_CODE (new) == CONST_INT)
2561	{
2562	  x = simplify_subreg (GET_MODE (x), new,
2563			       GET_MODE (SUBREG_REG (x)),
2564			       SUBREG_BYTE (x));
2565	  if (! x)
2566	    abort ();
2567	}
2568      else
2569	SUBREG_REG (x) = new;
2570
2571      return x;
2572    }
2573  else if (GET_CODE (x) == ZERO_EXTEND)
2574    {
2575      rtx new = replace_rtx (XEXP (x, 0), from, to);
2576
2577      if (GET_CODE (new) == CONST_INT)
2578	{
2579	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2580					new, GET_MODE (XEXP (x, 0)));
2581	  if (! x)
2582	    abort ();
2583	}
2584      else
2585	XEXP (x, 0) = new;
2586
2587      return x;
2588    }
2589
2590  fmt = GET_RTX_FORMAT (GET_CODE (x));
2591  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2592    {
2593      if (fmt[i] == 'e')
2594	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2595      else if (fmt[i] == 'E')
2596	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2597	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2598    }
2599
2600  return x;
2601}
2602
2603/* Throughout the rtx X, replace many registers according to REG_MAP.
2604   Return the replacement for X (which may be X with altered contents).
2605   REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2606   NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2607
2608   We only support REG_MAP entries of REG or SUBREG.  Also, hard registers
2609   should not be mapped to pseudos or vice versa since validate_change
2610   is not called.
2611
2612   If REPLACE_DEST is 1, replacements are also done in destinations;
2613   otherwise, only sources are replaced.  */
2614
2615rtx
2616replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2617{
2618  enum rtx_code code;
2619  int i;
2620  const char *fmt;
2621
2622  if (x == 0)
2623    return x;
2624
2625  code = GET_CODE (x);
2626  switch (code)
2627    {
2628    case SCRATCH:
2629    case PC:
2630    case CC0:
2631    case CONST_INT:
2632    case CONST_DOUBLE:
2633    case CONST_VECTOR:
2634    case CONST:
2635    case SYMBOL_REF:
2636    case LABEL_REF:
2637      return x;
2638
2639    case REG:
2640      /* Verify that the register has an entry before trying to access it.  */
2641      if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2642	{
2643	  /* SUBREGs can't be shared.  Always return a copy to ensure that if
2644	     this replacement occurs more than once then each instance will
2645	     get distinct rtx.  */
2646	  if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2647	    return copy_rtx (reg_map[REGNO (x)]);
2648	  return reg_map[REGNO (x)];
2649	}
2650      return x;
2651
2652    case SUBREG:
2653      /* Prevent making nested SUBREGs.  */
2654      if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2655	  && reg_map[REGNO (SUBREG_REG (x))] != 0
2656	  && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2657	{
2658	  rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2659	  return simplify_gen_subreg (GET_MODE (x), map_val,
2660				      GET_MODE (SUBREG_REG (x)),
2661				      SUBREG_BYTE (x));
2662	}
2663      break;
2664
2665    case SET:
2666      if (replace_dest)
2667	SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2668
2669      else if (GET_CODE (SET_DEST (x)) == MEM
2670	       || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2671	/* Even if we are not to replace destinations, replace register if it
2672	   is CONTAINED in destination (destination is memory or
2673	   STRICT_LOW_PART).  */
2674	XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2675					       reg_map, nregs, 0);
2676      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2677	/* Similarly, for ZERO_EXTRACT we replace all operands.  */
2678	break;
2679
2680      SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2681      return x;
2682
2683    default:
2684      break;
2685    }
2686
2687  fmt = GET_RTX_FORMAT (code);
2688  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2689    {
2690      if (fmt[i] == 'e')
2691	XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2692      else if (fmt[i] == 'E')
2693	{
2694	  int j;
2695	  for (j = 0; j < XVECLEN (x, i); j++)
2696	    XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2697					      nregs, replace_dest);
2698	}
2699    }
2700  return x;
2701}
2702
2703/* Replace occurrences of the old label in *X with the new one.
2704   DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2705
2706int
2707replace_label (rtx *x, void *data)
2708{
2709  rtx l = *x;
2710  rtx old_label = ((replace_label_data *) data)->r1;
2711  rtx new_label = ((replace_label_data *) data)->r2;
2712  bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2713
2714  if (l == NULL_RTX)
2715    return 0;
2716
2717  if (GET_CODE (l) == SYMBOL_REF
2718      && CONSTANT_POOL_ADDRESS_P (l))
2719    {
2720      rtx c = get_pool_constant (l);
2721      if (rtx_referenced_p (old_label, c))
2722	{
2723	  rtx new_c, new_l;
2724	  replace_label_data *d = (replace_label_data *) data;
2725
2726	  /* Create a copy of constant C; replace the label inside
2727	     but do not update LABEL_NUSES because uses in constant pool
2728	     are not counted.  */
2729	  new_c = copy_rtx (c);
2730	  d->update_label_nuses = false;
2731	  for_each_rtx (&new_c, replace_label, data);
2732	  d->update_label_nuses = update_label_nuses;
2733
2734	  /* Add the new constant NEW_C to constant pool and replace
2735	     the old reference to constant by new reference.  */
2736	  new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2737	  *x = replace_rtx (l, l, new_l);
2738	}
2739      return 0;
2740    }
2741
2742  /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2743     field.  This is not handled by for_each_rtx because it doesn't
2744     handle unprinted ('0') fields.  */
2745  if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2746    JUMP_LABEL (l) = new_label;
2747
2748  if ((GET_CODE (l) == LABEL_REF
2749       || GET_CODE (l) == INSN_LIST)
2750      && XEXP (l, 0) == old_label)
2751    {
2752      XEXP (l, 0) = new_label;
2753      if (update_label_nuses)
2754	{
2755	  ++LABEL_NUSES (new_label);
2756	  --LABEL_NUSES (old_label);
2757	}
2758      return 0;
2759    }
2760
2761  return 0;
2762}
2763
2764/* When *BODY is equal to X or X is directly referenced by *BODY
2765   return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2766   too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2767
2768static int
2769rtx_referenced_p_1 (rtx *body, void *x)
2770{
2771  rtx y = (rtx) x;
2772
2773  if (*body == NULL_RTX)
2774    return y == NULL_RTX;
2775
2776  /* Return true if a label_ref *BODY refers to label Y.  */
2777  if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2778    return XEXP (*body, 0) == y;
2779
2780  /* If *BODY is a reference to pool constant traverse the constant.  */
2781  if (GET_CODE (*body) == SYMBOL_REF
2782      && CONSTANT_POOL_ADDRESS_P (*body))
2783    return rtx_referenced_p (y, get_pool_constant (*body));
2784
2785  /* By default, compare the RTL expressions.  */
2786  return rtx_equal_p (*body, y);
2787}
2788
2789/* Return true if X is referenced in BODY.  */
2790
2791int
2792rtx_referenced_p (rtx x, rtx body)
2793{
2794  return for_each_rtx (&body, rtx_referenced_p_1, x);
2795}
2796
2797/* If INSN is a tablejump return true and store the label (before jump table) to
2798   *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2799
2800bool
2801tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2802{
2803  rtx label, table;
2804
2805  if (GET_CODE (insn) == JUMP_INSN
2806      && (label = JUMP_LABEL (insn)) != NULL_RTX
2807      && (table = next_active_insn (label)) != NULL_RTX
2808      && GET_CODE (table) == JUMP_INSN
2809      && (GET_CODE (PATTERN (table)) == ADDR_VEC
2810	  || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2811    {
2812      if (labelp)
2813	*labelp = label;
2814      if (tablep)
2815	*tablep = table;
2816      return true;
2817    }
2818  return false;
2819}
2820
2821/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2822   constant that is not in the constant pool and not in the condition
2823   of an IF_THEN_ELSE.  */
2824
2825static int
2826computed_jump_p_1 (rtx x)
2827{
2828  enum rtx_code code = GET_CODE (x);
2829  int i, j;
2830  const char *fmt;
2831
2832  switch (code)
2833    {
2834    case LABEL_REF:
2835    case PC:
2836      return 0;
2837
2838    case CONST:
2839    case CONST_INT:
2840    case CONST_DOUBLE:
2841    case CONST_VECTOR:
2842    case SYMBOL_REF:
2843    case REG:
2844      return 1;
2845
2846    case MEM:
2847      return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2848		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2849
2850    case IF_THEN_ELSE:
2851      return (computed_jump_p_1 (XEXP (x, 1))
2852	      || computed_jump_p_1 (XEXP (x, 2)));
2853
2854    default:
2855      break;
2856    }
2857
2858  fmt = GET_RTX_FORMAT (code);
2859  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2860    {
2861      if (fmt[i] == 'e'
2862	  && computed_jump_p_1 (XEXP (x, i)))
2863	return 1;
2864
2865      else if (fmt[i] == 'E')
2866	for (j = 0; j < XVECLEN (x, i); j++)
2867	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
2868	    return 1;
2869    }
2870
2871  return 0;
2872}
2873
2874/* Return nonzero if INSN is an indirect jump (aka computed jump).
2875
2876   Tablejumps and casesi insns are not considered indirect jumps;
2877   we can recognize them by a (use (label_ref)).  */
2878
2879int
2880computed_jump_p (rtx insn)
2881{
2882  int i;
2883  if (GET_CODE (insn) == JUMP_INSN)
2884    {
2885      rtx pat = PATTERN (insn);
2886
2887      if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2888	return 0;
2889      else if (GET_CODE (pat) == PARALLEL)
2890	{
2891	  int len = XVECLEN (pat, 0);
2892	  int has_use_labelref = 0;
2893
2894	  for (i = len - 1; i >= 0; i--)
2895	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2896		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2897		    == LABEL_REF))
2898	      has_use_labelref = 1;
2899
2900	  if (! has_use_labelref)
2901	    for (i = len - 1; i >= 0; i--)
2902	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2903		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2904		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2905		return 1;
2906	}
2907      else if (GET_CODE (pat) == SET
2908	       && SET_DEST (pat) == pc_rtx
2909	       && computed_jump_p_1 (SET_SRC (pat)))
2910	return 1;
2911    }
2912  return 0;
2913}
2914
2915/* Traverse X via depth-first search, calling F for each
2916   sub-expression (including X itself).  F is also passed the DATA.
2917   If F returns -1, do not traverse sub-expressions, but continue
2918   traversing the rest of the tree.  If F ever returns any other
2919   nonzero value, stop the traversal, and return the value returned
2920   by F.  Otherwise, return 0.  This function does not traverse inside
2921   tree structure that contains RTX_EXPRs, or into sub-expressions
2922   whose format code is `0' since it is not known whether or not those
2923   codes are actually RTL.
2924
2925   This routine is very general, and could (should?) be used to
2926   implement many of the other routines in this file.  */
2927
2928int
2929for_each_rtx (rtx *x, rtx_function f, void *data)
2930{
2931  int result;
2932  int length;
2933  const char *format;
2934  int i;
2935
2936  /* Call F on X.  */
2937  result = (*f) (x, data);
2938  if (result == -1)
2939    /* Do not traverse sub-expressions.  */
2940    return 0;
2941  else if (result != 0)
2942    /* Stop the traversal.  */
2943    return result;
2944
2945  if (*x == NULL_RTX)
2946    /* There are no sub-expressions.  */
2947    return 0;
2948
2949  length = GET_RTX_LENGTH (GET_CODE (*x));
2950  format = GET_RTX_FORMAT (GET_CODE (*x));
2951
2952  for (i = 0; i < length; ++i)
2953    {
2954      switch (format[i])
2955	{
2956	case 'e':
2957	  result = for_each_rtx (&XEXP (*x, i), f, data);
2958	  if (result != 0)
2959	    return result;
2960	  break;
2961
2962	case 'V':
2963	case 'E':
2964	  if (XVEC (*x, i) != 0)
2965	    {
2966	      int j;
2967	      for (j = 0; j < XVECLEN (*x, i); ++j)
2968		{
2969		  result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2970		  if (result != 0)
2971		    return result;
2972		}
2973	    }
2974	  break;
2975
2976	default:
2977	  /* Nothing to do.  */
2978	  break;
2979	}
2980
2981    }
2982
2983  return 0;
2984}
2985
2986/* Searches X for any reference to REGNO, returning the rtx of the
2987   reference found if any.  Otherwise, returns NULL_RTX.  */
2988
2989rtx
2990regno_use_in (unsigned int regno, rtx x)
2991{
2992  const char *fmt;
2993  int i, j;
2994  rtx tem;
2995
2996  if (GET_CODE (x) == REG && REGNO (x) == regno)
2997    return x;
2998
2999  fmt = GET_RTX_FORMAT (GET_CODE (x));
3000  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3001    {
3002      if (fmt[i] == 'e')
3003	{
3004	  if ((tem = regno_use_in (regno, XEXP (x, i))))
3005	    return tem;
3006	}
3007      else if (fmt[i] == 'E')
3008	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3009	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3010	    return tem;
3011    }
3012
3013  return NULL_RTX;
3014}
3015
3016/* Return a value indicating whether OP, an operand of a commutative
3017   operation, is preferred as the first or second operand.  The higher
3018   the value, the stronger the preference for being the first operand.
3019   We use negative values to indicate a preference for the first operand
3020   and positive values for the second operand.  */
3021
3022int
3023commutative_operand_precedence (rtx op)
3024{
3025  /* Constants always come the second operand.  Prefer "nice" constants.  */
3026  if (GET_CODE (op) == CONST_INT)
3027    return -5;
3028  if (GET_CODE (op) == CONST_DOUBLE)
3029    return -4;
3030  if (CONSTANT_P (op))
3031    return -3;
3032
3033  /* SUBREGs of objects should come second.  */
3034  if (GET_CODE (op) == SUBREG
3035      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3036    return -2;
3037
3038  /* If only one operand is a `neg', `not',
3039    `mult', `plus', or `minus' expression, it will be the first
3040    operand.  */
3041  if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3042      || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3043      || GET_CODE (op) == MINUS)
3044    return 2;
3045
3046  /* Complex expressions should be the first, so decrease priority
3047     of objects.  */
3048  if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3049    return -1;
3050  return 0;
3051}
3052
3053/* Return 1 iff it is necessary to swap operands of commutative operation
3054   in order to canonicalize expression.  */
3055
3056int
3057swap_commutative_operands_p (rtx x, rtx y)
3058{
3059  return (commutative_operand_precedence (x)
3060	  < commutative_operand_precedence (y));
3061}
3062
3063/* Return 1 if X is an autoincrement side effect and the register is
3064   not the stack pointer.  */
3065int
3066auto_inc_p (rtx x)
3067{
3068  switch (GET_CODE (x))
3069    {
3070    case PRE_INC:
3071    case POST_INC:
3072    case PRE_DEC:
3073    case POST_DEC:
3074    case PRE_MODIFY:
3075    case POST_MODIFY:
3076      /* There are no REG_INC notes for SP.  */
3077      if (XEXP (x, 0) != stack_pointer_rtx)
3078	return 1;
3079    default:
3080      break;
3081    }
3082  return 0;
3083}
3084
3085/* Return 1 if the sequence of instructions beginning with FROM and up
3086   to and including TO is safe to move.  If NEW_TO is non-NULL, and
3087   the sequence is not already safe to move, but can be easily
3088   extended to a sequence which is safe, then NEW_TO will point to the
3089   end of the extended sequence.
3090
3091   For now, this function only checks that the region contains whole
3092   exception regions, but it could be extended to check additional
3093   conditions as well.  */
3094
3095int
3096insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3097{
3098  int eh_region_count = 0;
3099  int past_to_p = 0;
3100  rtx r = from;
3101
3102  /* By default, assume the end of the region will be what was
3103     suggested.  */
3104  if (new_to)
3105    *new_to = to;
3106
3107  while (r)
3108    {
3109      if (GET_CODE (r) == NOTE)
3110	{
3111	  switch (NOTE_LINE_NUMBER (r))
3112	    {
3113	    case NOTE_INSN_EH_REGION_BEG:
3114	      ++eh_region_count;
3115	      break;
3116
3117	    case NOTE_INSN_EH_REGION_END:
3118	      if (eh_region_count == 0)
3119		/* This sequence of instructions contains the end of
3120		   an exception region, but not he beginning.  Moving
3121		   it will cause chaos.  */
3122		return 0;
3123
3124	      --eh_region_count;
3125	      break;
3126
3127	    default:
3128	      break;
3129	    }
3130	}
3131      else if (past_to_p)
3132	/* If we've passed TO, and we see a non-note instruction, we
3133	   can't extend the sequence to a movable sequence.  */
3134	return 0;
3135
3136      if (r == to)
3137	{
3138	  if (!new_to)
3139	    /* It's OK to move the sequence if there were matched sets of
3140	       exception region notes.  */
3141	    return eh_region_count == 0;
3142
3143	  past_to_p = 1;
3144	}
3145
3146      /* It's OK to move the sequence if there were matched sets of
3147	 exception region notes.  */
3148      if (past_to_p && eh_region_count == 0)
3149	{
3150	  *new_to = r;
3151	  return 1;
3152	}
3153
3154      /* Go to the next instruction.  */
3155      r = NEXT_INSN (r);
3156    }
3157
3158  return 0;
3159}
3160
3161/* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
3162int
3163loc_mentioned_in_p (rtx *loc, rtx in)
3164{
3165  enum rtx_code code = GET_CODE (in);
3166  const char *fmt = GET_RTX_FORMAT (code);
3167  int i, j;
3168
3169  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3170    {
3171      if (loc == &in->u.fld[i].rtx)
3172	return 1;
3173      if (fmt[i] == 'e')
3174	{
3175	  if (loc_mentioned_in_p (loc, XEXP (in, i)))
3176	    return 1;
3177	}
3178      else if (fmt[i] == 'E')
3179	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3180	  if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3181	    return 1;
3182    }
3183  return 0;
3184}
3185
3186/* Given a subreg X, return the bit offset where the subreg begins
3187   (counting from the least significant bit of the reg).  */
3188
3189unsigned int
3190subreg_lsb (rtx x)
3191{
3192  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3193  enum machine_mode mode = GET_MODE (x);
3194  unsigned int bitpos;
3195  unsigned int byte;
3196  unsigned int word;
3197
3198  /* A paradoxical subreg begins at bit position 0.  */
3199  if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3200    return 0;
3201
3202  if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3203    /* If the subreg crosses a word boundary ensure that
3204       it also begins and ends on a word boundary.  */
3205    if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3206	 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3207	&& (SUBREG_BYTE (x) % UNITS_PER_WORD
3208	    || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3209	abort ();
3210
3211  if (WORDS_BIG_ENDIAN)
3212    word = (GET_MODE_SIZE (inner_mode)
3213	    - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3214  else
3215    word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3216  bitpos = word * BITS_PER_WORD;
3217
3218  if (BYTES_BIG_ENDIAN)
3219    byte = (GET_MODE_SIZE (inner_mode)
3220	    - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3221  else
3222    byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3223  bitpos += byte * BITS_PER_UNIT;
3224
3225  return bitpos;
3226}
3227
3228/* This function returns the regno offset of a subreg expression.
3229   xregno - A regno of an inner hard subreg_reg (or what will become one).
3230   xmode  - The mode of xregno.
3231   offset - The byte offset.
3232   ymode  - The mode of a top level SUBREG (or what may become one).
3233   RETURN - The regno offset which would be used.  */
3234unsigned int
3235subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3236		     unsigned int offset, enum machine_mode ymode)
3237{
3238  int nregs_xmode, nregs_ymode;
3239  int mode_multiple, nregs_multiple;
3240  int y_offset;
3241
3242  if (xregno >= FIRST_PSEUDO_REGISTER)
3243    abort ();
3244
3245  nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3246  nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3247
3248  /* If this is a big endian paradoxical subreg, which uses more actual
3249     hard registers than the original register, we must return a negative
3250     offset so that we find the proper highpart of the register.  */
3251  if (offset == 0
3252      && nregs_ymode > nregs_xmode
3253      && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3254	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3255    return nregs_xmode - nregs_ymode;
3256
3257  if (offset == 0 || nregs_xmode == nregs_ymode)
3258    return 0;
3259
3260  /* size of ymode must not be greater than the size of xmode.  */
3261  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3262  if (mode_multiple == 0)
3263    abort ();
3264
3265  y_offset = offset / GET_MODE_SIZE (ymode);
3266  nregs_multiple =  nregs_xmode / nregs_ymode;
3267  return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3268}
3269
3270/* This function returns true when the offset is representable via
3271   subreg_offset in the given regno.
3272   xregno - A regno of an inner hard subreg_reg (or what will become one).
3273   xmode  - The mode of xregno.
3274   offset - The byte offset.
3275   ymode  - The mode of a top level SUBREG (or what may become one).
3276   RETURN - The regno offset which would be used.  */
3277bool
3278subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3279			       unsigned int offset, enum machine_mode ymode)
3280{
3281  int nregs_xmode, nregs_ymode;
3282  int mode_multiple, nregs_multiple;
3283  int y_offset;
3284
3285  if (xregno >= FIRST_PSEUDO_REGISTER)
3286    abort ();
3287
3288  nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3289  nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3290
3291  /* paradoxical subregs are always valid.  */
3292  if (offset == 0
3293      && nregs_ymode > nregs_xmode
3294      && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3295	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3296    return true;
3297
3298  /* Lowpart subregs are always valid.  */
3299  if (offset == subreg_lowpart_offset (ymode, xmode))
3300    return true;
3301
3302#ifdef ENABLE_CHECKING
3303  /* This should always pass, otherwise we don't know how to verify the
3304     constraint.  These conditions may be relaxed but subreg_offset would
3305     need to be redesigned.  */
3306  if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3307      || GET_MODE_SIZE (ymode) % nregs_ymode
3308      || nregs_xmode % nregs_ymode)
3309    abort ();
3310#endif
3311
3312  /* The XMODE value can be seen as a vector of NREGS_XMODE
3313     values.  The subreg must represent a lowpart of given field.
3314     Compute what field it is.  */
3315  offset -= subreg_lowpart_offset (ymode,
3316				   mode_for_size (GET_MODE_BITSIZE (xmode)
3317						  / nregs_xmode,
3318						  MODE_INT, 0));
3319
3320  /* size of ymode must not be greater than the size of xmode.  */
3321  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3322  if (mode_multiple == 0)
3323    abort ();
3324
3325  y_offset = offset / GET_MODE_SIZE (ymode);
3326  nregs_multiple =  nregs_xmode / nregs_ymode;
3327#ifdef ENABLE_CHECKING
3328  if (offset % GET_MODE_SIZE (ymode)
3329      || mode_multiple % nregs_multiple)
3330    abort ();
3331#endif
3332  return (!(y_offset % (mode_multiple / nregs_multiple)));
3333}
3334
3335/* Return the final regno that a subreg expression refers to.  */
3336unsigned int
3337subreg_regno (rtx x)
3338{
3339  unsigned int ret;
3340  rtx subreg = SUBREG_REG (x);
3341  int regno = REGNO (subreg);
3342
3343  ret = regno + subreg_regno_offset (regno,
3344				     GET_MODE (subreg),
3345				     SUBREG_BYTE (x),
3346				     GET_MODE (x));
3347  return ret;
3348
3349}
3350struct parms_set_data
3351{
3352  int nregs;
3353  HARD_REG_SET regs;
3354};
3355
3356/* Helper function for noticing stores to parameter registers.  */
3357static void
3358parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3359{
3360  struct parms_set_data *d = data;
3361  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3362      && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3363    {
3364      CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3365      d->nregs--;
3366    }
3367}
3368
3369/* Look backward for first parameter to be loaded.
3370   Do not skip BOUNDARY.  */
3371rtx
3372find_first_parameter_load (rtx call_insn, rtx boundary)
3373{
3374  struct parms_set_data parm;
3375  rtx p, before;
3376
3377  /* Since different machines initialize their parameter registers
3378     in different orders, assume nothing.  Collect the set of all
3379     parameter registers.  */
3380  CLEAR_HARD_REG_SET (parm.regs);
3381  parm.nregs = 0;
3382  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3383    if (GET_CODE (XEXP (p, 0)) == USE
3384	&& GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3385      {
3386	if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3387	  abort ();
3388
3389	/* We only care about registers which can hold function
3390	   arguments.  */
3391	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3392	  continue;
3393
3394	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3395	parm.nregs++;
3396      }
3397  before = call_insn;
3398
3399  /* Search backward for the first set of a register in this set.  */
3400  while (parm.nregs && before != boundary)
3401    {
3402      before = PREV_INSN (before);
3403
3404      /* It is possible that some loads got CSEed from one call to
3405         another.  Stop in that case.  */
3406      if (GET_CODE (before) == CALL_INSN)
3407	break;
3408
3409      /* Our caller needs either ensure that we will find all sets
3410         (in case code has not been optimized yet), or take care
3411         for possible labels in a way by setting boundary to preceding
3412         CODE_LABEL.  */
3413      if (GET_CODE (before) == CODE_LABEL)
3414	{
3415	  if (before != boundary)
3416	    abort ();
3417	  break;
3418	}
3419
3420      if (INSN_P (before))
3421	note_stores (PATTERN (before), parms_set, &parm);
3422    }
3423  return before;
3424}
3425
3426/* Return true if we should avoid inserting code between INSN and preceding
3427   call instruction.  */
3428
3429bool
3430keep_with_call_p (rtx insn)
3431{
3432  rtx set;
3433
3434  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3435    {
3436      if (GET_CODE (SET_DEST (set)) == REG
3437	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3438	  && fixed_regs[REGNO (SET_DEST (set))]
3439	  && general_operand (SET_SRC (set), VOIDmode))
3440	return true;
3441      if (GET_CODE (SET_SRC (set)) == REG
3442	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3443	  && GET_CODE (SET_DEST (set)) == REG
3444	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3445	return true;
3446      /* There may be a stack pop just after the call and before the store
3447	 of the return register.  Search for the actual store when deciding
3448	 if we can break or not.  */
3449      if (SET_DEST (set) == stack_pointer_rtx)
3450	{
3451	  rtx i2 = next_nonnote_insn (insn);
3452	  if (i2 && keep_with_call_p (i2))
3453	    return true;
3454	}
3455    }
3456  return false;
3457}
3458
3459/* Return true when store to register X can be hoisted to the place
3460   with LIVE registers (can be NULL).  Value VAL contains destination
3461   whose value will be used.  */
3462
3463static bool
3464hoist_test_store (rtx x, rtx val, regset live)
3465{
3466  if (GET_CODE (x) == SCRATCH)
3467    return true;
3468
3469  if (rtx_equal_p (x, val))
3470    return true;
3471
3472  /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3473     Then we would need to update all users to care hoisting the store too.
3474     Caller may represent that by specifying whole subreg as val.  */
3475
3476  if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3477    {
3478      if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3479	  && GET_MODE_BITSIZE (GET_MODE (x)) <
3480	  GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3481	return false;
3482      return true;
3483    }
3484  if (GET_CODE (x) == SUBREG)
3485    x = SUBREG_REG (x);
3486
3487  /* Anything except register store is not hoistable.  This includes the
3488     partial stores to registers.  */
3489
3490  if (!REG_P (x))
3491    return false;
3492
3493  /* Pseudo registers can be always replaced by another pseudo to avoid
3494     the side effect, for hard register we must ensure that they are dead.
3495     Eventually we may want to add code to try turn pseudos to hards, but it
3496     is unlikely useful.  */
3497
3498  if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3499    {
3500      int regno = REGNO (x);
3501      int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3502
3503      if (!live)
3504	return false;
3505      if (REGNO_REG_SET_P (live, regno))
3506	return false;
3507      while (--n > 0)
3508	if (REGNO_REG_SET_P (live, regno + n))
3509	  return false;
3510    }
3511  return true;
3512}
3513
3514
3515/* Return true if INSN can be hoisted to place with LIVE hard registers
3516   (LIVE can be NULL when unknown).  VAL is expected to be stored by the insn
3517   and used by the hoisting pass.  */
3518
3519bool
3520can_hoist_insn_p (rtx insn, rtx val, regset live)
3521{
3522  rtx pat = PATTERN (insn);
3523  int i;
3524
3525  /* It probably does not worth the complexity to handle multiple
3526     set stores.  */
3527  if (!single_set (insn))
3528    return false;
3529  /* We can move CALL_INSN, but we need to check that all caller clobbered
3530     regs are dead.  */
3531  if (GET_CODE (insn) == CALL_INSN)
3532    return false;
3533  /* In future we will handle hoisting of libcall sequences, but
3534     give up for now.  */
3535  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3536    return false;
3537  switch (GET_CODE (pat))
3538    {
3539    case SET:
3540      if (!hoist_test_store (SET_DEST (pat), val, live))
3541	return false;
3542      break;
3543    case USE:
3544      /* USES do have sick semantics, so do not move them.  */
3545      return false;
3546      break;
3547    case CLOBBER:
3548      if (!hoist_test_store (XEXP (pat, 0), val, live))
3549	return false;
3550      break;
3551    case PARALLEL:
3552      for (i = 0; i < XVECLEN (pat, 0); i++)
3553	{
3554	  rtx x = XVECEXP (pat, 0, i);
3555	  switch (GET_CODE (x))
3556	    {
3557	    case SET:
3558	      if (!hoist_test_store (SET_DEST (x), val, live))
3559		return false;
3560	      break;
3561	    case USE:
3562	      /* We need to fix callers to really ensure availability
3563	         of all values insn uses, but for now it is safe to prohibit
3564		 hoisting of any insn having such a hidden uses.  */
3565	      return false;
3566	      break;
3567	    case CLOBBER:
3568	      if (!hoist_test_store (SET_DEST (x), val, live))
3569		return false;
3570	      break;
3571	    default:
3572	      break;
3573	    }
3574	}
3575      break;
3576    default:
3577      abort ();
3578    }
3579  return true;
3580}
3581
3582/* Update store after hoisting - replace all stores to pseudo registers
3583   by new ones to avoid clobbering of values except for store to VAL that will
3584   be updated to NEW.  */
3585
3586static void
3587hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3588{
3589  rtx x = *xp;
3590
3591  if (GET_CODE (x) == SCRATCH)
3592    return;
3593
3594  if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3595    validate_change (insn, xp,
3596		     simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3597					  SUBREG_BYTE (x)), 1);
3598  if (rtx_equal_p (x, val))
3599    {
3600      validate_change (insn, xp, new, 1);
3601      return;
3602    }
3603  if (GET_CODE (x) == SUBREG)
3604    {
3605      xp = &SUBREG_REG (x);
3606      x = *xp;
3607    }
3608
3609  if (!REG_P (x))
3610    abort ();
3611
3612  /* We've verified that hard registers are dead, so we may keep the side
3613     effect.  Otherwise replace it by new pseudo.  */
3614  if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3615    validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3616  REG_NOTES (insn)
3617    = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3618}
3619
3620/* Create a copy of INSN after AFTER replacing store of VAL to NEW
3621   and each other side effect to pseudo register by new pseudo register.  */
3622
3623rtx
3624hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3625{
3626  rtx pat;
3627  int i;
3628  rtx note;
3629
3630  insn = emit_copy_of_insn_after (insn, after);
3631  pat = PATTERN (insn);
3632
3633  /* Remove REG_UNUSED notes as we will re-emit them.  */
3634  while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3635    remove_note (insn, note);
3636
3637  /* To get this working callers must ensure to move everything referenced
3638     by REG_EQUAL/REG_EQUIV notes too.  Lets remove them, it is probably
3639     easier.  */
3640  while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3641    remove_note (insn, note);
3642  while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3643    remove_note (insn, note);
3644
3645  /* Remove REG_DEAD notes as they might not be valid anymore in case
3646     we create redundancy.  */
3647  while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3648    remove_note (insn, note);
3649  switch (GET_CODE (pat))
3650    {
3651    case SET:
3652      hoist_update_store (insn, &SET_DEST (pat), val, new);
3653      break;
3654    case USE:
3655      break;
3656    case CLOBBER:
3657      hoist_update_store (insn, &XEXP (pat, 0), val, new);
3658      break;
3659    case PARALLEL:
3660      for (i = 0; i < XVECLEN (pat, 0); i++)
3661	{
3662	  rtx x = XVECEXP (pat, 0, i);
3663	  switch (GET_CODE (x))
3664	    {
3665	    case SET:
3666	      hoist_update_store (insn, &SET_DEST (x), val, new);
3667	      break;
3668	    case USE:
3669	      break;
3670	    case CLOBBER:
3671	      hoist_update_store (insn, &SET_DEST (x), val, new);
3672	      break;
3673	    default:
3674	      break;
3675	    }
3676	}
3677      break;
3678    default:
3679      abort ();
3680    }
3681  if (!apply_change_group ())
3682    abort ();
3683
3684  return insn;
3685}
3686
3687rtx
3688hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3689{
3690  rtx new_insn;
3691
3692  /* We cannot insert instructions on an abnormal critical edge.
3693     It will be easier to find the culprit if we die now.  */
3694  if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3695    abort ();
3696
3697  /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3698     stuff.  We also emit CALL_INSNS and firends.  */
3699  if (e->insns == NULL_RTX)
3700    {
3701      start_sequence ();
3702      emit_note (NOTE_INSN_DELETED);
3703    }
3704  else
3705    push_to_sequence (e->insns);
3706
3707  new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3708
3709  e->insns = get_insns ();
3710  end_sequence ();
3711  return new_insn;
3712}
3713
3714/* Return true if LABEL is a target of JUMP_INSN.  This applies only
3715   to non-complex jumps.  That is, direct unconditional, conditional,
3716   and tablejumps, but not computed jumps or returns.  It also does
3717   not apply to the fallthru case of a conditional jump.  */
3718
3719bool
3720label_is_jump_target_p (rtx label, rtx jump_insn)
3721{
3722  rtx tmp = JUMP_LABEL (jump_insn);
3723
3724  if (label == tmp)
3725    return true;
3726
3727  if (tablejump_p (jump_insn, NULL, &tmp))
3728    {
3729      rtvec vec = XVEC (PATTERN (tmp),
3730			GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3731      int i, veclen = GET_NUM_ELEM (vec);
3732
3733      for (i = 0; i < veclen; ++i)
3734	if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3735	  return true;
3736    }
3737
3738  return false;
3739}
3740
3741