jump.c revision 1.10
1/* Optimize jump instructions, for GNU compiler.
2   Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20/* This is the pathetic reminder of old fame of the jump-optimization pass
21   of the compiler.  Now it contains basically a set of utility functions to
22   operate with jumps.
23
24   Each CODE_LABEL has a count of the times it is used
25   stored in the LABEL_NUSES internal field, and each JUMP_INSN
26   has one label that it refers to stored in the
27   JUMP_LABEL internal field.  With this we can detect labels that
28   become unused because of the deletion of all the jumps that
29   formerly used them.  The JUMP_LABEL info is sometimes looked
30   at by later passes.  For return insns, it contains either a
31   RETURN or a SIMPLE_RETURN rtx.
32
33   The subroutines redirect_jump and invert_jump are used
34   from other passes as well.  */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "backend.h"
40#include "target.h"
41#include "rtl.h"
42#include "tree.h"
43#include "cfghooks.h"
44#include "tree-pass.h"
45#include "memmodel.h"
46#include "tm_p.h"
47#include "insn-config.h"
48#include "regs.h"
49#include "emit-rtl.h"
50#include "recog.h"
51#include "cfgrtl.h"
52#include "rtl-iter.h"
53
54/* Optimize jump y; x: ... y: jumpif... x?
55   Don't know if it is worth bothering with.  */
56/* Optimize two cases of conditional jump to conditional jump?
57   This can never delete any instruction or make anything dead,
58   or even change what is live at any point.
59   So perhaps let combiner do it.  */
60
61static void init_label_info (rtx_insn *);
62static void mark_all_labels (rtx_insn *);
63static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
64static void mark_jump_label_asm (rtx, rtx_insn *);
65static void redirect_exp_1 (rtx *, rtx, rtx, rtx_insn *);
66static int invert_exp_1 (rtx, rtx_insn *);
67
68/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
69static void
70rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
71{
72  timevar_push (TV_REBUILD_JUMP);
73  init_label_info (f);
74  mark_all_labels (f);
75
76  /* Keep track of labels used from static data; we don't track them
77     closely enough to delete them here, so make sure their reference
78     count doesn't drop to zero.  */
79
80  if (count_forced)
81    {
82      rtx_insn *insn;
83      unsigned int i;
84      FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
85	if (LABEL_P (insn))
86	  LABEL_NUSES (insn)++;
87    }
88  timevar_pop (TV_REBUILD_JUMP);
89}
90
91/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
92   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
93   instructions and jumping insns that have labels as operands
94   (e.g. cbranchsi4).  */
95void
96rebuild_jump_labels (rtx_insn *f)
97{
98  rebuild_jump_labels_1 (f, true);
99}
100
101/* This function is like rebuild_jump_labels, but doesn't run over
102   forced_labels.  It can be used on insn chains that aren't the
103   main function chain.  */
104void
105rebuild_jump_labels_chain (rtx_insn *chain)
106{
107  rebuild_jump_labels_1 (chain, false);
108}
109
110/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
111   non-fallthru insn.  This is not generally true, as multiple barriers
112   may have crept in, or the BARRIER may be separated from the last
113   real insn by one or more NOTEs.
114
115   This simple pass moves barriers and removes duplicates so that the
116   old code is happy.
117 */
118static unsigned int
119cleanup_barriers (void)
120{
121  rtx_insn *insn;
122  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
123    {
124      if (BARRIER_P (insn))
125	{
126	  rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
127	  if (!prev)
128	    continue;
129
130	  if (BARRIER_P (prev))
131	    delete_insn (insn);
132	  else if (prev != PREV_INSN (insn))
133	    {
134	      basic_block bb = BLOCK_FOR_INSN (prev);
135	      rtx_insn *end = PREV_INSN (insn);
136	      reorder_insns_nobb (insn, insn, prev);
137	      if (bb)
138		{
139		  /* If the backend called in machine reorg compute_bb_for_insn
140		     and didn't free_bb_for_insn again, preserve basic block
141		     boundaries.  Move the end of basic block to PREV since
142		     it is followed by a barrier now, and clear BLOCK_FOR_INSN
143		     on the following notes.
144		     ???  Maybe the proper solution for the targets that have
145		     cfg around after machine reorg is not to run cleanup_barriers
146		     pass at all.  */
147		  BB_END (bb) = prev;
148		  do
149		    {
150		      prev = NEXT_INSN (prev);
151		      if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
152			BLOCK_FOR_INSN (prev) = NULL;
153		    }
154		  while (prev != end);
155		}
156	    }
157	}
158    }
159  return 0;
160}
161
162namespace {
163
164const pass_data pass_data_cleanup_barriers =
165{
166  RTL_PASS, /* type */
167  "barriers", /* name */
168  OPTGROUP_NONE, /* optinfo_flags */
169  TV_NONE, /* tv_id */
170  0, /* properties_required */
171  0, /* properties_provided */
172  0, /* properties_destroyed */
173  0, /* todo_flags_start */
174  0, /* todo_flags_finish */
175};
176
177class pass_cleanup_barriers : public rtl_opt_pass
178{
179public:
180  pass_cleanup_barriers (gcc::context *ctxt)
181    : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
182  {}
183
184  /* opt_pass methods: */
185  virtual unsigned int execute (function *) { return cleanup_barriers (); }
186
187}; // class pass_cleanup_barriers
188
189} // anon namespace
190
191rtl_opt_pass *
192make_pass_cleanup_barriers (gcc::context *ctxt)
193{
194  return new pass_cleanup_barriers (ctxt);
195}
196
197
198/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
199   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
200   notes whose labels don't occur in the insn any more.  */
201
202static void
203init_label_info (rtx_insn *f)
204{
205  rtx_insn *insn;
206
207  for (insn = f; insn; insn = NEXT_INSN (insn))
208    {
209      if (LABEL_P (insn))
210	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
211
212      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
213	 sticky and not reset here; that way we won't lose association
214	 with a label when e.g. the source for a target register
215	 disappears out of reach for targets that may use jump-target
216	 registers.  Jump transformations are supposed to transform
217	 any REG_LABEL_TARGET notes.  The target label reference in a
218	 branch may disappear from the branch (and from the
219	 instruction before it) for other reasons, like register
220	 allocation.  */
221
222      if (INSN_P (insn))
223	{
224	  rtx note, next;
225
226	  for (note = REG_NOTES (insn); note; note = next)
227	    {
228	      next = XEXP (note, 1);
229	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
230		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
231		remove_note (insn, note);
232	    }
233	}
234    }
235}
236
237/* A subroutine of mark_all_labels.  Trivially propagate a simple label
238   load into a jump_insn that uses it.  */
239
240static void
241maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
242{
243  rtx label_note, pc, pc_src;
244
245  pc = pc_set (jump_insn);
246  pc_src = pc != NULL ? SET_SRC (pc) : NULL;
247  label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
248
249  /* If the previous non-jump insn sets something to a label,
250     something that this jump insn uses, make that label the primary
251     target of this insn if we don't yet have any.  That previous
252     insn must be a single_set and not refer to more than one label.
253     The jump insn must not refer to other labels as jump targets
254     and must be a plain (set (pc) ...), maybe in a parallel, and
255     may refer to the item being set only directly or as one of the
256     arms in an IF_THEN_ELSE.  */
257
258  if (label_note != NULL && pc_src != NULL)
259    {
260      rtx label_set = single_set (prev_nonjump_insn);
261      rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
262
263      if (label_set != NULL
264	  /* The source must be the direct LABEL_REF, not a
265	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
266	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
267	  && (rtx_equal_p (label_dest, pc_src)
268	      || (GET_CODE (pc_src) == IF_THEN_ELSE
269		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
270		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
271	{
272	  /* The CODE_LABEL referred to in the note must be the
273	     CODE_LABEL in the LABEL_REF of the "set".  We can
274	     conveniently use it for the marker function, which
275	     requires a LABEL_REF wrapping.  */
276	  gcc_assert (XEXP (label_note, 0) == label_ref_label (SET_SRC (label_set)));
277
278	  mark_jump_label_1 (label_set, jump_insn, false, true);
279
280	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
281	}
282    }
283}
284
285/* Mark the label each jump jumps to.
286   Combine consecutive labels, and count uses of labels.  */
287
288static void
289mark_all_labels (rtx_insn *f)
290{
291  rtx_insn *insn;
292
293  if (current_ir_type () == IR_RTL_CFGLAYOUT)
294    {
295      basic_block bb;
296      FOR_EACH_BB_FN (bb, cfun)
297	{
298	  /* In cfglayout mode, we don't bother with trivial next-insn
299	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
300	     handled by other optimizers using better algorithms.  */
301	  FOR_BB_INSNS (bb, insn)
302	    {
303	      gcc_assert (! insn->deleted ());
304	      if (NONDEBUG_INSN_P (insn))
305	        mark_jump_label (PATTERN (insn), insn, 0);
306	    }
307
308	  /* In cfglayout mode, there may be non-insns between the
309	     basic blocks.  If those non-insns represent tablejump data,
310	     they contain label references that we must record.  */
311	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
312	    if (JUMP_TABLE_DATA_P (insn))
313	      mark_jump_label (PATTERN (insn), insn, 0);
314	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
315	    if (JUMP_TABLE_DATA_P (insn))
316	      mark_jump_label (PATTERN (insn), insn, 0);
317	}
318    }
319  else
320    {
321      rtx_insn *prev_nonjump_insn = NULL;
322      for (insn = f; insn; insn = NEXT_INSN (insn))
323	{
324	  if (insn->deleted ())
325	    ;
326	  else if (LABEL_P (insn))
327	    prev_nonjump_insn = NULL;
328	  else if (JUMP_TABLE_DATA_P (insn))
329	    mark_jump_label (PATTERN (insn), insn, 0);
330	  else if (NONDEBUG_INSN_P (insn))
331	    {
332	      mark_jump_label (PATTERN (insn), insn, 0);
333	      if (JUMP_P (insn))
334		{
335		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
336		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
337		}
338	      else
339		prev_nonjump_insn = insn;
340	    }
341	}
342    }
343}
344
345/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
346   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
347   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
348   know whether it's source is floating point or integer comparison.  Machine
349   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
350   to help this function avoid overhead in these cases.  */
351enum rtx_code
352reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
353				const_rtx arg1, const rtx_insn *insn)
354{
355  machine_mode mode;
356
357  /* If this is not actually a comparison, we can't reverse it.  */
358  if (GET_RTX_CLASS (code) != RTX_COMPARE
359      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
360    return UNKNOWN;
361
362  mode = GET_MODE (arg0);
363  if (mode == VOIDmode)
364    mode = GET_MODE (arg1);
365
366  /* First see if machine description supplies us way to reverse the
367     comparison.  Give it priority over everything else to allow
368     machine description to do tricks.  */
369  if (GET_MODE_CLASS (mode) == MODE_CC
370      && REVERSIBLE_CC_MODE (mode))
371    return REVERSE_CONDITION (code, mode);
372
373  /* Try a few special cases based on the comparison code.  */
374  switch (code)
375    {
376    case GEU:
377    case GTU:
378    case LEU:
379    case LTU:
380    case NE:
381    case EQ:
382      /* It is always safe to reverse EQ and NE, even for the floating
383	 point.  Similarly the unsigned comparisons are never used for
384	 floating point so we can reverse them in the default way.  */
385      return reverse_condition (code);
386    case ORDERED:
387    case UNORDERED:
388    case LTGT:
389    case UNEQ:
390      /* In case we already see unordered comparison, we can be sure to
391	 be dealing with floating point so we don't need any more tests.  */
392      return reverse_condition_maybe_unordered (code);
393    case UNLT:
394    case UNLE:
395    case UNGT:
396    case UNGE:
397      /* We don't have safe way to reverse these yet.  */
398      return UNKNOWN;
399    default:
400      break;
401    }
402
403  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
404    {
405      /* Try to search for the comparison to determine the real mode.
406         This code is expensive, but with sane machine description it
407         will be never used, since REVERSIBLE_CC_MODE will return true
408         in all cases.  */
409      if (! insn)
410	return UNKNOWN;
411
412      /* These CONST_CAST's are okay because prev_nonnote_insn just
413	 returns its argument and we assign it to a const_rtx
414	 variable.  */
415      for (rtx_insn *prev = prev_nonnote_insn (const_cast<rtx_insn *> (insn));
416	   prev != 0 && !LABEL_P (prev);
417	   prev = prev_nonnote_insn (prev))
418	{
419	  const_rtx set = set_of (arg0, prev);
420	  if (set && GET_CODE (set) == SET
421	      && rtx_equal_p (SET_DEST (set), arg0))
422	    {
423	      rtx src = SET_SRC (set);
424
425	      if (GET_CODE (src) == COMPARE)
426		{
427		  rtx comparison = src;
428		  arg0 = XEXP (src, 0);
429		  mode = GET_MODE (arg0);
430		  if (mode == VOIDmode)
431		    mode = GET_MODE (XEXP (comparison, 1));
432		  break;
433		}
434	      /* We can get past reg-reg moves.  This may be useful for model
435	         of i387 comparisons that first move flag registers around.  */
436	      if (REG_P (src))
437		{
438		  arg0 = src;
439		  continue;
440		}
441	    }
442	  /* If register is clobbered in some ununderstandable way,
443	     give up.  */
444	  if (set)
445	    return UNKNOWN;
446	}
447    }
448
449  /* Test for an integer condition, or a floating-point comparison
450     in which NaNs can be ignored.  */
451  if (CONST_INT_P (arg0)
452      || (GET_MODE (arg0) != VOIDmode
453	  && GET_MODE_CLASS (mode) != MODE_CC
454	  && !HONOR_NANS (mode)))
455    return reverse_condition (code);
456
457  return UNKNOWN;
458}
459
460/* A wrapper around the previous function to take COMPARISON as rtx
461   expression.  This simplifies many callers.  */
462enum rtx_code
463reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
464{
465  if (!COMPARISON_P (comparison))
466    return UNKNOWN;
467  return reversed_comparison_code_parts (GET_CODE (comparison),
468					 XEXP (comparison, 0),
469					 XEXP (comparison, 1), insn);
470}
471
472/* Return comparison with reversed code of EXP.
473   Return NULL_RTX in case we fail to do the reversal.  */
474rtx
475reversed_comparison (const_rtx exp, machine_mode mode)
476{
477  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL);
478  if (reversed_code == UNKNOWN)
479    return NULL_RTX;
480  else
481    return simplify_gen_relational (reversed_code, mode, VOIDmode,
482                                    XEXP (exp, 0), XEXP (exp, 1));
483}
484
485
486/* Given an rtx-code for a comparison, return the code for the negated
487   comparison.  If no such code exists, return UNKNOWN.
488
489   WATCH OUT!  reverse_condition is not safe to use on a jump that might
490   be acting on the results of an IEEE floating point comparison, because
491   of the special treatment of non-signaling nans in comparisons.
492   Use reversed_comparison_code instead.  */
493
494enum rtx_code
495reverse_condition (enum rtx_code code)
496{
497  switch (code)
498    {
499    case EQ:
500      return NE;
501    case NE:
502      return EQ;
503    case GT:
504      return LE;
505    case GE:
506      return LT;
507    case LT:
508      return GE;
509    case LE:
510      return GT;
511    case GTU:
512      return LEU;
513    case GEU:
514      return LTU;
515    case LTU:
516      return GEU;
517    case LEU:
518      return GTU;
519    case UNORDERED:
520      return ORDERED;
521    case ORDERED:
522      return UNORDERED;
523
524    case UNLT:
525    case UNLE:
526    case UNGT:
527    case UNGE:
528    case UNEQ:
529    case LTGT:
530      return UNKNOWN;
531
532    default:
533      gcc_unreachable ();
534    }
535}
536
537/* Similar, but we're allowed to generate unordered comparisons, which
538   makes it safe for IEEE floating-point.  Of course, we have to recognize
539   that the target will support them too...  */
540
541enum rtx_code
542reverse_condition_maybe_unordered (enum rtx_code code)
543{
544  switch (code)
545    {
546    case EQ:
547      return NE;
548    case NE:
549      return EQ;
550    case GT:
551      return UNLE;
552    case GE:
553      return UNLT;
554    case LT:
555      return UNGE;
556    case LE:
557      return UNGT;
558    case LTGT:
559      return UNEQ;
560    case UNORDERED:
561      return ORDERED;
562    case ORDERED:
563      return UNORDERED;
564    case UNLT:
565      return GE;
566    case UNLE:
567      return GT;
568    case UNGT:
569      return LE;
570    case UNGE:
571      return LT;
572    case UNEQ:
573      return LTGT;
574
575    default:
576      gcc_unreachable ();
577    }
578}
579
580/* Similar, but return the code when two operands of a comparison are swapped.
581   This IS safe for IEEE floating-point.  */
582
583enum rtx_code
584swap_condition (enum rtx_code code)
585{
586  switch (code)
587    {
588    case EQ:
589    case NE:
590    case UNORDERED:
591    case ORDERED:
592    case UNEQ:
593    case LTGT:
594      return code;
595
596    case GT:
597      return LT;
598    case GE:
599      return LE;
600    case LT:
601      return GT;
602    case LE:
603      return GE;
604    case GTU:
605      return LTU;
606    case GEU:
607      return LEU;
608    case LTU:
609      return GTU;
610    case LEU:
611      return GEU;
612    case UNLT:
613      return UNGT;
614    case UNLE:
615      return UNGE;
616    case UNGT:
617      return UNLT;
618    case UNGE:
619      return UNLE;
620
621    default:
622      gcc_unreachable ();
623    }
624}
625
626/* Given a comparison CODE, return the corresponding unsigned comparison.
627   If CODE is an equality comparison or already an unsigned comparison,
628   CODE is returned.  */
629
630enum rtx_code
631unsigned_condition (enum rtx_code code)
632{
633  switch (code)
634    {
635    case EQ:
636    case NE:
637    case GTU:
638    case GEU:
639    case LTU:
640    case LEU:
641      return code;
642
643    case GT:
644      return GTU;
645    case GE:
646      return GEU;
647    case LT:
648      return LTU;
649    case LE:
650      return LEU;
651
652    default:
653      gcc_unreachable ();
654    }
655}
656
657/* Similarly, return the signed version of a comparison.  */
658
659enum rtx_code
660signed_condition (enum rtx_code code)
661{
662  switch (code)
663    {
664    case EQ:
665    case NE:
666    case GT:
667    case GE:
668    case LT:
669    case LE:
670      return code;
671
672    case GTU:
673      return GT;
674    case GEU:
675      return GE;
676    case LTU:
677      return LT;
678    case LEU:
679      return LE;
680
681    default:
682      gcc_unreachable ();
683    }
684}
685
686/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
687   truth of CODE1 implies the truth of CODE2.  */
688
689int
690comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
691{
692  /* UNKNOWN comparison codes can happen as a result of trying to revert
693     comparison codes.
694     They can't match anything, so we have to reject them here.  */
695  if (code1 == UNKNOWN || code2 == UNKNOWN)
696    return 0;
697
698  if (code1 == code2)
699    return 1;
700
701  switch (code1)
702    {
703    case UNEQ:
704      if (code2 == UNLE || code2 == UNGE)
705	return 1;
706      break;
707
708    case EQ:
709      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
710	  || code2 == ORDERED)
711	return 1;
712      break;
713
714    case UNLT:
715      if (code2 == UNLE || code2 == NE)
716	return 1;
717      break;
718
719    case LT:
720      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
721	return 1;
722      break;
723
724    case UNGT:
725      if (code2 == UNGE || code2 == NE)
726	return 1;
727      break;
728
729    case GT:
730      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
731	return 1;
732      break;
733
734    case GE:
735    case LE:
736      if (code2 == ORDERED)
737	return 1;
738      break;
739
740    case LTGT:
741      if (code2 == NE || code2 == ORDERED)
742	return 1;
743      break;
744
745    case LTU:
746      if (code2 == LEU || code2 == NE)
747	return 1;
748      break;
749
750    case GTU:
751      if (code2 == GEU || code2 == NE)
752	return 1;
753      break;
754
755    case UNORDERED:
756      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
757	  || code2 == UNGE || code2 == UNGT)
758	return 1;
759      break;
760
761    default:
762      break;
763    }
764
765  return 0;
766}
767
768/* Return 1 if INSN is an unconditional jump and nothing else.  */
769
770int
771simplejump_p (const rtx_insn *insn)
772{
773  return (JUMP_P (insn)
774	  && GET_CODE (PATTERN (insn)) == SET
775	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
776	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
777}
778
779/* Return nonzero if INSN is a (possibly) conditional jump
780   and nothing more.
781
782   Use of this function is deprecated, since we need to support combined
783   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
784
785int
786condjump_p (const rtx_insn *insn)
787{
788  const_rtx x = PATTERN (insn);
789
790  if (GET_CODE (x) != SET
791      || GET_CODE (SET_DEST (x)) != PC)
792    return 0;
793
794  x = SET_SRC (x);
795  if (GET_CODE (x) == LABEL_REF)
796    return 1;
797  else
798    return (GET_CODE (x) == IF_THEN_ELSE
799	    && ((GET_CODE (XEXP (x, 2)) == PC
800		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
801		     || ANY_RETURN_P (XEXP (x, 1))))
802		|| (GET_CODE (XEXP (x, 1)) == PC
803		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
804			|| ANY_RETURN_P (XEXP (x, 2))))));
805}
806
807/* Return nonzero if INSN is a (possibly) conditional jump inside a
808   PARALLEL.
809
810   Use this function is deprecated, since we need to support combined
811   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
812
813int
814condjump_in_parallel_p (const rtx_insn *insn)
815{
816  const_rtx x = PATTERN (insn);
817
818  if (GET_CODE (x) != PARALLEL)
819    return 0;
820  else
821    x = XVECEXP (x, 0, 0);
822
823  if (GET_CODE (x) != SET)
824    return 0;
825  if (GET_CODE (SET_DEST (x)) != PC)
826    return 0;
827  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
828    return 1;
829  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
830    return 0;
831  if (XEXP (SET_SRC (x), 2) == pc_rtx
832      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
833	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
834    return 1;
835  if (XEXP (SET_SRC (x), 1) == pc_rtx
836      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
837	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
838    return 1;
839  return 0;
840}
841
842/* Return set of PC, otherwise NULL.  */
843
844rtx
845pc_set (const rtx_insn *insn)
846{
847  rtx pat;
848  if (!JUMP_P (insn))
849    return NULL_RTX;
850  pat = PATTERN (insn);
851
852  /* The set is allowed to appear either as the insn pattern or
853     the first set in a PARALLEL.  */
854  if (GET_CODE (pat) == PARALLEL)
855    pat = XVECEXP (pat, 0, 0);
856  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
857    return pat;
858
859  return NULL_RTX;
860}
861
862/* Return true when insn is an unconditional direct jump,
863   possibly bundled inside a PARALLEL.  */
864
865int
866any_uncondjump_p (const rtx_insn *insn)
867{
868  const_rtx x = pc_set (insn);
869  if (!x)
870    return 0;
871  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
872    return 0;
873  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
874    return 0;
875  return 1;
876}
877
878/* Return true when insn is a conditional jump.  This function works for
879   instructions containing PC sets in PARALLELs.  The instruction may have
880   various other effects so before removing the jump you must verify
881   onlyjump_p.
882
883   Note that unlike condjump_p it returns false for unconditional jumps.  */
884
885int
886any_condjump_p (const rtx_insn *insn)
887{
888  const_rtx x = pc_set (insn);
889  enum rtx_code a, b;
890
891  if (!x)
892    return 0;
893  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
894    return 0;
895
896  a = GET_CODE (XEXP (SET_SRC (x), 1));
897  b = GET_CODE (XEXP (SET_SRC (x), 2));
898
899  return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
900	  || (a == PC
901	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
902}
903
904/* Return the label of a conditional jump.  */
905
906rtx
907condjump_label (const rtx_insn *insn)
908{
909  rtx x = pc_set (insn);
910
911  if (!x)
912    return NULL_RTX;
913  x = SET_SRC (x);
914  if (GET_CODE (x) == LABEL_REF)
915    return x;
916  if (GET_CODE (x) != IF_THEN_ELSE)
917    return NULL_RTX;
918  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
919    return XEXP (x, 1);
920  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
921    return XEXP (x, 2);
922  return NULL_RTX;
923}
924
925/* Return TRUE if INSN is a return jump.  */
926
927int
928returnjump_p (const rtx_insn *insn)
929{
930  if (JUMP_P (insn))
931    {
932      subrtx_iterator::array_type array;
933      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
934	{
935	  const_rtx x = *iter;
936	  switch (GET_CODE (x))
937	    {
938	    case RETURN:
939	    case SIMPLE_RETURN:
940	    case EH_RETURN:
941	      return true;
942
943	    case SET:
944	      if (SET_IS_RETURN_P (x))
945		return true;
946	      break;
947
948	    default:
949	      break;
950	    }
951	}
952    }
953  return false;
954}
955
956/* Return true if INSN is a (possibly conditional) return insn.  */
957
958int
959eh_returnjump_p (rtx_insn *insn)
960{
961  if (JUMP_P (insn))
962    {
963      subrtx_iterator::array_type array;
964      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
965	if (GET_CODE (*iter) == EH_RETURN)
966	  return true;
967    }
968  return false;
969}
970
971/* Return true if INSN is a jump that only transfers control and
972   nothing more.  */
973
974int
975onlyjump_p (const rtx_insn *insn)
976{
977  rtx set;
978
979  if (!JUMP_P (insn))
980    return 0;
981
982  set = single_set (insn);
983  if (set == NULL)
984    return 0;
985  if (GET_CODE (SET_DEST (set)) != PC)
986    return 0;
987  if (side_effects_p (SET_SRC (set)))
988    return 0;
989
990  return 1;
991}
992
993/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
994   NULL or a return.  */
995bool
996jump_to_label_p (const rtx_insn *insn)
997{
998  return (JUMP_P (insn)
999	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1000}
1001
1002/* Return nonzero if X is an RTX that only sets the condition codes
1003   and has no side effects.  */
1004
1005int
1006only_sets_cc0_p (const_rtx x)
1007{
1008  if (! x)
1009    return 0;
1010
1011  if (INSN_P (x))
1012    x = PATTERN (x);
1013
1014  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1015}
1016
1017/* Return 1 if X is an RTX that does nothing but set the condition codes
1018   and CLOBBER or USE registers.
1019   Return -1 if X does explicitly set the condition codes,
1020   but also does other things.  */
1021
1022int
1023sets_cc0_p (const_rtx x)
1024{
1025  if (! x)
1026    return 0;
1027
1028  if (INSN_P (x))
1029    x = PATTERN (x);
1030
1031  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1032    return 1;
1033  if (GET_CODE (x) == PARALLEL)
1034    {
1035      int i;
1036      int sets_cc0 = 0;
1037      int other_things = 0;
1038      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1039	{
1040	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1041	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1042	    sets_cc0 = 1;
1043	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1044	    other_things = 1;
1045	}
1046      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1047    }
1048  return 0;
1049}
1050
1051/* Find all CODE_LABELs referred to in X, and increment their use
1052   counts.  If INSN is a JUMP_INSN and there is at least one
1053   CODE_LABEL referenced in INSN as a jump target, then store the last
1054   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
1055   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
1056   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
1057   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1058   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1059   For returnjumps, the JUMP_LABEL will also be set as appropriate.
1060
1061   Note that two labels separated by a loop-beginning note
1062   must be kept distinct if we have not yet done loop-optimization,
1063   because the gap between them is where loop-optimize
1064   will want to move invariant code to.  CROSS_JUMP tells us
1065   that loop-optimization is done with.  */
1066
1067void
1068mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
1069{
1070  rtx asmop = extract_asm_operands (x);
1071  if (asmop)
1072    mark_jump_label_asm (asmop, insn);
1073  else
1074    mark_jump_label_1 (x, insn, in_mem != 0,
1075		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1076}
1077
1078/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1079   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
1080   jump-target; when the JUMP_LABEL field of INSN should be set or a
1081   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1082   note.  */
1083
1084static void
1085mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1086{
1087  RTX_CODE code = GET_CODE (x);
1088  int i;
1089  const char *fmt;
1090
1091  switch (code)
1092    {
1093    case PC:
1094    case CC0:
1095    case REG:
1096    case CLOBBER:
1097    case CLOBBER_HIGH:
1098    case CALL:
1099      return;
1100
1101    case RETURN:
1102    case SIMPLE_RETURN:
1103      if (is_target)
1104	{
1105	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1106	  JUMP_LABEL (insn) = x;
1107	}
1108      return;
1109
1110    case MEM:
1111      in_mem = true;
1112      break;
1113
1114    case SEQUENCE:
1115      {
1116	rtx_sequence *seq = as_a <rtx_sequence *> (x);
1117	for (i = 0; i < seq->len (); i++)
1118	  mark_jump_label (PATTERN (seq->insn (i)),
1119			   seq->insn (i), 0);
1120      }
1121      return;
1122
1123    case SYMBOL_REF:
1124      if (!in_mem)
1125	return;
1126
1127      /* If this is a constant-pool reference, see if it is a label.  */
1128      if (CONSTANT_POOL_ADDRESS_P (x))
1129	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1130      break;
1131
1132      /* Handle operands in the condition of an if-then-else as for a
1133	 non-jump insn.  */
1134    case IF_THEN_ELSE:
1135      if (!is_target)
1136	break;
1137      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1138      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1139      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1140      return;
1141
1142    case LABEL_REF:
1143      {
1144	rtx_insn *label = label_ref_label (x);
1145
1146	/* Ignore remaining references to unreachable labels that
1147	   have been deleted.  */
1148	if (NOTE_P (label)
1149	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1150	  break;
1151
1152	gcc_assert (LABEL_P (label));
1153
1154	/* Ignore references to labels of containing functions.  */
1155	if (LABEL_REF_NONLOCAL_P (x))
1156	  break;
1157
1158	set_label_ref_label (x, label);
1159	if (! insn || ! insn->deleted ())
1160	  ++LABEL_NUSES (label);
1161
1162	if (insn)
1163	  {
1164	    if (is_target
1165		/* Do not change a previous setting of JUMP_LABEL.  If the
1166		   JUMP_LABEL slot is occupied by a different label,
1167		   create a note for this label.  */
1168		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1169	      JUMP_LABEL (insn) = label;
1170	    else
1171	      {
1172		enum reg_note kind
1173		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1174
1175		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1176		   for LABEL unless there already is one.  All uses of
1177		   a label, except for the primary target of a jump,
1178		   must have such a note.  */
1179		if (! find_reg_note (insn, kind, label))
1180		  add_reg_note (insn, kind, label);
1181	      }
1182	  }
1183	return;
1184      }
1185
1186    /* Do walk the labels in a vector, but not the first operand of an
1187       ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1188    case ADDR_VEC:
1189    case ADDR_DIFF_VEC:
1190      if (! insn->deleted ())
1191	{
1192	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1193
1194	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1195	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1196			       is_target);
1197	}
1198      return;
1199
1200    default:
1201      break;
1202    }
1203
1204  fmt = GET_RTX_FORMAT (code);
1205
1206  /* The primary target of a tablejump is the label of the ADDR_VEC,
1207     which is canonically mentioned *last* in the insn.  To get it
1208     marked as JUMP_LABEL, we iterate over items in reverse order.  */
1209  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1210    {
1211      if (fmt[i] == 'e')
1212	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1213      else if (fmt[i] == 'E')
1214	{
1215	  int j;
1216
1217	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1218	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1219			       is_target);
1220	}
1221    }
1222}
1223
1224/* Worker function for mark_jump_label.  Handle asm insns specially.
1225   In particular, output operands need not be considered so we can
1226   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
1227   need to be considered targets.  */
1228
1229static void
1230mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1231{
1232  int i;
1233
1234  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1235    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1236
1237  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1238    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1239}
1240
1241/* Delete insn INSN from the chain of insns and update label ref counts
1242   and delete insns now unreachable.
1243
1244   Returns the first insn after INSN that was not deleted.
1245
1246   Usage of this instruction is deprecated.  Use delete_insn instead and
1247   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1248
1249rtx_insn *
1250delete_related_insns (rtx uncast_insn)
1251{
1252  rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1253  int was_code_label = (LABEL_P (insn));
1254  rtx note;
1255  rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
1256
1257  while (next && next->deleted ())
1258    next = NEXT_INSN (next);
1259
1260  /* This insn is already deleted => return first following nondeleted.  */
1261  if (insn->deleted ())
1262    return next;
1263
1264  delete_insn (insn);
1265
1266  /* If instruction is followed by a barrier,
1267     delete the barrier too.  */
1268
1269  if (next != 0 && BARRIER_P (next))
1270    delete_insn (next);
1271
1272  /* If deleting a jump, decrement the count of the label,
1273     and delete the label if it is now unused.  */
1274
1275  if (jump_to_label_p (insn))
1276    {
1277      rtx lab = JUMP_LABEL (insn);
1278      rtx_jump_table_data *lab_next;
1279
1280      if (LABEL_NUSES (lab) == 0)
1281	/* This can delete NEXT or PREV,
1282	   either directly if NEXT is JUMP_LABEL (INSN),
1283	   or indirectly through more levels of jumps.  */
1284	delete_related_insns (lab);
1285      else if (tablejump_p (insn, NULL, &lab_next))
1286	{
1287	  /* If we're deleting the tablejump, delete the dispatch table.
1288	     We may not be able to kill the label immediately preceding
1289	     just yet, as it might be referenced in code leading up to
1290	     the tablejump.  */
1291	  delete_related_insns (lab_next);
1292	}
1293    }
1294
1295  /* Likewise if we're deleting a dispatch table.  */
1296
1297  if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1298    {
1299      rtvec labels = table->get_labels ();
1300      int i;
1301      int len = GET_NUM_ELEM (labels);
1302
1303      for (i = 0; i < len; i++)
1304	if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
1305	  delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1306      while (next && next->deleted ())
1307	next = NEXT_INSN (next);
1308      return next;
1309    }
1310
1311  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1312     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
1313  if (INSN_P (insn))
1314    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1315      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1316	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1317	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1318	  && LABEL_P (XEXP (note, 0)))
1319	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1320	  delete_related_insns (XEXP (note, 0));
1321
1322  while (prev && (prev->deleted () || NOTE_P (prev)))
1323    prev = PREV_INSN (prev);
1324
1325  /* If INSN was a label and a dispatch table follows it,
1326     delete the dispatch table.  The tablejump must have gone already.
1327     It isn't useful to fall through into a table.  */
1328
1329  if (was_code_label
1330      && NEXT_INSN (insn) != 0
1331      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1332    next = delete_related_insns (NEXT_INSN (insn));
1333
1334  /* If INSN was a label, delete insns following it if now unreachable.  */
1335
1336  if (was_code_label && prev && BARRIER_P (prev))
1337    {
1338      enum rtx_code code;
1339      while (next)
1340	{
1341	  code = GET_CODE (next);
1342	  if (code == NOTE)
1343	    next = NEXT_INSN (next);
1344	  /* Keep going past other deleted labels to delete what follows.  */
1345	  else if (code == CODE_LABEL && next->deleted ())
1346	    next = NEXT_INSN (next);
1347	  /* Keep the (use (insn))s created by dbr_schedule, which needs
1348	     them in order to track liveness relative to a previous
1349	     barrier.  */
1350	  else if (INSN_P (next)
1351		   && GET_CODE (PATTERN (next)) == USE
1352		   && INSN_P (XEXP (PATTERN (next), 0)))
1353	    next = NEXT_INSN (next);
1354	  else if (code == BARRIER || INSN_P (next))
1355	    /* Note: if this deletes a jump, it can cause more
1356	       deletion of unreachable code, after a different label.
1357	       As long as the value from this recursive call is correct,
1358	       this invocation functions correctly.  */
1359	    next = delete_related_insns (next);
1360	  else
1361	    break;
1362	}
1363    }
1364
1365  /* I feel a little doubtful about this loop,
1366     but I see no clean and sure alternative way
1367     to find the first insn after INSN that is not now deleted.
1368     I hope this works.  */
1369  while (next && next->deleted ())
1370    next = NEXT_INSN (next);
1371  return next;
1372}
1373
1374/* Delete a range of insns from FROM to TO, inclusive.
1375   This is for the sake of peephole optimization, so assume
1376   that whatever these insns do will still be done by a new
1377   peephole insn that will replace them.  */
1378
1379void
1380delete_for_peephole (rtx_insn *from, rtx_insn *to)
1381{
1382  rtx_insn *insn = from;
1383
1384  while (1)
1385    {
1386      rtx_insn *next = NEXT_INSN (insn);
1387      rtx_insn *prev = PREV_INSN (insn);
1388
1389      if (!NOTE_P (insn))
1390	{
1391	  insn->set_deleted();
1392
1393	  /* Patch this insn out of the chain.  */
1394	  /* We don't do this all at once, because we
1395	     must preserve all NOTEs.  */
1396	  if (prev)
1397	    SET_NEXT_INSN (prev) = next;
1398
1399	  if (next)
1400	    SET_PREV_INSN (next) = prev;
1401	}
1402
1403      if (insn == to)
1404	break;
1405      insn = next;
1406    }
1407
1408  /* Note that if TO is an unconditional jump
1409     we *do not* delete the BARRIER that follows,
1410     since the peephole that replaces this sequence
1411     is also an unconditional jump in that case.  */
1412}
1413
1414/* A helper function for redirect_exp_1; examines its input X and returns
1415   either a LABEL_REF around a label, or a RETURN if X was NULL.  */
1416static rtx
1417redirect_target (rtx x)
1418{
1419  if (x == NULL_RTX)
1420    return ret_rtx;
1421  if (!ANY_RETURN_P (x))
1422    return gen_rtx_LABEL_REF (Pmode, x);
1423  return x;
1424}
1425
1426/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1427   NLABEL as a return.  Accrue modifications into the change group.  */
1428
1429static void
1430redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx_insn *insn)
1431{
1432  rtx x = *loc;
1433  RTX_CODE code = GET_CODE (x);
1434  int i;
1435  const char *fmt;
1436
1437  if ((code == LABEL_REF && label_ref_label (x) == olabel)
1438      || x == olabel)
1439    {
1440      x = redirect_target (nlabel);
1441      if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1442 	x = gen_rtx_SET (pc_rtx, x);
1443      validate_change (insn, loc, x, 1);
1444      return;
1445    }
1446
1447  if (code == SET && SET_DEST (x) == pc_rtx
1448      && ANY_RETURN_P (nlabel)
1449      && GET_CODE (SET_SRC (x)) == LABEL_REF
1450      && label_ref_label (SET_SRC (x)) == olabel)
1451    {
1452      validate_change (insn, loc, nlabel, 1);
1453      return;
1454    }
1455
1456  if (code == IF_THEN_ELSE)
1457    {
1458      /* Skip the condition of an IF_THEN_ELSE.  We only want to
1459         change jump destinations, not eventual label comparisons.  */
1460      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1461      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1462      return;
1463    }
1464
1465  fmt = GET_RTX_FORMAT (code);
1466  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1467    {
1468      if (fmt[i] == 'e')
1469	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1470      else if (fmt[i] == 'E')
1471	{
1472	  int j;
1473	  for (j = 0; j < XVECLEN (x, i); j++)
1474	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1475	}
1476    }
1477}
1478
1479/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1480   the modifications into the change group.  Return false if we did
1481   not see how to do that.  */
1482
1483int
1484redirect_jump_1 (rtx_insn *jump, rtx nlabel)
1485{
1486  int ochanges = num_validated_changes ();
1487  rtx *loc, asmop;
1488
1489  gcc_assert (nlabel != NULL_RTX);
1490  asmop = extract_asm_operands (PATTERN (jump));
1491  if (asmop)
1492    {
1493      if (nlabel == NULL)
1494	return 0;
1495      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1496      loc = &ASM_OPERANDS_LABEL (asmop, 0);
1497    }
1498  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1499    loc = &XVECEXP (PATTERN (jump), 0, 0);
1500  else
1501    loc = &PATTERN (jump);
1502
1503  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1504  return num_validated_changes () > ochanges;
1505}
1506
1507/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1508   jump target label is unused as a result, it and the code following
1509   it may be deleted.
1510
1511   Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1512   in that case we are to turn the jump into a (possibly conditional)
1513   return insn.
1514
1515   The return value will be 1 if the change was made, 0 if it wasn't
1516   (this can only occur when trying to produce return insns).  */
1517
1518int
1519redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1520{
1521  rtx olabel = jump->jump_label ();
1522
1523  if (!nlabel)
1524    {
1525      /* If there is no label, we are asked to redirect to the EXIT block.
1526	 When before the epilogue is emitted, return/simple_return cannot be
1527	 created so we return 0 immediately.  After the epilogue is emitted,
1528	 we always expect a label, either a non-null label, or a
1529	 return/simple_return RTX.  */
1530
1531      if (!epilogue_completed)
1532	return 0;
1533      gcc_unreachable ();
1534    }
1535
1536  if (nlabel == olabel)
1537    return 1;
1538
1539  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1540    return 0;
1541
1542  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1543  return 1;
1544}
1545
1546/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1547   NLABEL in JUMP.
1548   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1549   count has dropped to zero.  */
1550void
1551redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
1552		 int invert)
1553{
1554  rtx note;
1555
1556  gcc_assert (JUMP_LABEL (jump) == olabel);
1557
1558  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1559     moving FUNCTION_END note.  Just sanity check that no user still worry
1560     about this.  */
1561  gcc_assert (delete_unused >= 0);
1562  JUMP_LABEL (jump) = nlabel;
1563  if (!ANY_RETURN_P (nlabel))
1564    ++LABEL_NUSES (nlabel);
1565
1566  /* Update labels in any REG_EQUAL note.  */
1567  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1568    {
1569      if (ANY_RETURN_P (nlabel)
1570	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1571	remove_note (jump, note);
1572      else
1573	{
1574	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1575	  confirm_change_group ();
1576	}
1577    }
1578
1579  /* Handle the case where we had a conditional crossing jump to a return
1580     label and are now changing it into a direct conditional return.
1581     The jump is no longer crossing in that case.  */
1582  if (ANY_RETURN_P (nlabel))
1583    CROSSING_JUMP_P (jump) = 0;
1584
1585  if (!ANY_RETURN_P (olabel)
1586      && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1587      /* Undefined labels will remain outside the insn stream.  */
1588      && INSN_UID (olabel))
1589    delete_related_insns (olabel);
1590  if (invert)
1591    invert_br_probabilities (jump);
1592}
1593
1594/* Invert the jump condition X contained in jump insn INSN.  Accrue the
1595   modifications into the change group.  Return nonzero for success.  */
1596static int
1597invert_exp_1 (rtx x, rtx_insn *insn)
1598{
1599  RTX_CODE code = GET_CODE (x);
1600
1601  if (code == IF_THEN_ELSE)
1602    {
1603      rtx comp = XEXP (x, 0);
1604      rtx tem;
1605      enum rtx_code reversed_code;
1606
1607      /* We can do this in two ways:  The preferable way, which can only
1608	 be done if this is not an integer comparison, is to reverse
1609	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1610	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1611
1612      reversed_code = reversed_comparison_code (comp, insn);
1613
1614      if (reversed_code != UNKNOWN)
1615	{
1616	  validate_change (insn, &XEXP (x, 0),
1617			   gen_rtx_fmt_ee (reversed_code,
1618					   GET_MODE (comp), XEXP (comp, 0),
1619					   XEXP (comp, 1)),
1620			   1);
1621	  return 1;
1622	}
1623
1624      tem = XEXP (x, 1);
1625      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1626      validate_change (insn, &XEXP (x, 2), tem, 1);
1627      return 1;
1628    }
1629  else
1630    return 0;
1631}
1632
1633/* Invert the condition of the jump JUMP, and make it jump to label
1634   NLABEL instead of where it jumps now.  Accrue changes into the
1635   change group.  Return false if we didn't see how to perform the
1636   inversion and redirection.  */
1637
1638int
1639invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
1640{
1641  rtx x = pc_set (jump);
1642  int ochanges;
1643  int ok;
1644
1645  ochanges = num_validated_changes ();
1646  if (x == NULL)
1647    return 0;
1648  ok = invert_exp_1 (SET_SRC (x), jump);
1649  gcc_assert (ok);
1650
1651  if (num_validated_changes () == ochanges)
1652    return 0;
1653
1654  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1655     in Pmode, so checking this is not merely an optimization.  */
1656  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1657}
1658
1659/* Invert the condition of the jump JUMP, and make it jump to label
1660   NLABEL instead of where it jumps now.  Return true if successful.  */
1661
1662int
1663invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1664{
1665  rtx olabel = JUMP_LABEL (jump);
1666
1667  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1668    {
1669      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1670      return 1;
1671    }
1672  cancel_changes (0);
1673  return 0;
1674}
1675
1676
1677/* Like rtx_equal_p except that it considers two REGs as equal
1678   if they renumber to the same value and considers two commutative
1679   operations to be the same if the order of the operands has been
1680   reversed.  */
1681
1682int
1683rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1684{
1685  int i;
1686  const enum rtx_code code = GET_CODE (x);
1687  const char *fmt;
1688
1689  if (x == y)
1690    return 1;
1691
1692  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1693      && (REG_P (y) || (GET_CODE (y) == SUBREG
1694				  && REG_P (SUBREG_REG (y)))))
1695    {
1696      int reg_x = -1, reg_y = -1;
1697      poly_int64 byte_x = 0, byte_y = 0;
1698      struct subreg_info info;
1699
1700      if (GET_MODE (x) != GET_MODE (y))
1701	return 0;
1702
1703      /* If we haven't done any renumbering, don't
1704	 make any assumptions.  */
1705      if (reg_renumber == 0)
1706	return rtx_equal_p (x, y);
1707
1708      if (code == SUBREG)
1709	{
1710	  reg_x = REGNO (SUBREG_REG (x));
1711	  byte_x = SUBREG_BYTE (x);
1712
1713	  if (reg_renumber[reg_x] >= 0)
1714	    {
1715	      subreg_get_info (reg_renumber[reg_x],
1716			       GET_MODE (SUBREG_REG (x)), byte_x,
1717			       GET_MODE (x), &info);
1718	      if (!info.representable_p)
1719		return 0;
1720	      reg_x = info.offset;
1721	      byte_x = 0;
1722	    }
1723	}
1724      else
1725	{
1726	  reg_x = REGNO (x);
1727	  if (reg_renumber[reg_x] >= 0)
1728	    reg_x = reg_renumber[reg_x];
1729	}
1730
1731      if (GET_CODE (y) == SUBREG)
1732	{
1733	  reg_y = REGNO (SUBREG_REG (y));
1734	  byte_y = SUBREG_BYTE (y);
1735
1736	  if (reg_renumber[reg_y] >= 0)
1737	    {
1738	      subreg_get_info (reg_renumber[reg_y],
1739			       GET_MODE (SUBREG_REG (y)), byte_y,
1740			       GET_MODE (y), &info);
1741	      if (!info.representable_p)
1742		return 0;
1743	      reg_y = info.offset;
1744	      byte_y = 0;
1745	    }
1746	}
1747      else
1748	{
1749	  reg_y = REGNO (y);
1750	  if (reg_renumber[reg_y] >= 0)
1751	    reg_y = reg_renumber[reg_y];
1752	}
1753
1754      return reg_x >= 0 && reg_x == reg_y && known_eq (byte_x, byte_y);
1755    }
1756
1757  /* Now we have disposed of all the cases
1758     in which different rtx codes can match.  */
1759  if (code != GET_CODE (y))
1760    return 0;
1761
1762  switch (code)
1763    {
1764    case PC:
1765    case CC0:
1766    case ADDR_VEC:
1767    case ADDR_DIFF_VEC:
1768    CASE_CONST_UNIQUE:
1769      return 0;
1770
1771    case LABEL_REF:
1772      /* We can't assume nonlocal labels have their following insns yet.  */
1773      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1774	return label_ref_label (x) == label_ref_label (y);
1775
1776      /* Two label-refs are equivalent if they point at labels
1777	 in the same position in the instruction stream.  */
1778      else
1779	{
1780	  rtx_insn *xi = next_nonnote_nondebug_insn (label_ref_label (x));
1781	  rtx_insn *yi = next_nonnote_nondebug_insn (label_ref_label (y));
1782	  while (xi && LABEL_P (xi))
1783	    xi = next_nonnote_nondebug_insn (xi);
1784	  while (yi && LABEL_P (yi))
1785	    yi = next_nonnote_nondebug_insn (yi);
1786	  return xi == yi;
1787	}
1788
1789    case SYMBOL_REF:
1790      return XSTR (x, 0) == XSTR (y, 0);
1791
1792    case CODE_LABEL:
1793      /* If we didn't match EQ equality above, they aren't the same.  */
1794      return 0;
1795
1796    default:
1797      break;
1798    }
1799
1800  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1801
1802  if (GET_MODE (x) != GET_MODE (y))
1803    return 0;
1804
1805  /* MEMs referring to different address space are not equivalent.  */
1806  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1807    return 0;
1808
1809  /* For commutative operations, the RTX match if the operand match in any
1810     order.  Also handle the simple binary and unary cases without a loop.  */
1811  if (targetm.commutative_p (x, UNKNOWN))
1812    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1813	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1814	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1815		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1816  else if (NON_COMMUTATIVE_P (x))
1817    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1818	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1819  else if (UNARY_P (x))
1820    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1821
1822  /* Compare the elements.  If any pair of corresponding elements
1823     fail to match, return 0 for the whole things.  */
1824
1825  fmt = GET_RTX_FORMAT (code);
1826  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1827    {
1828      int j;
1829      switch (fmt[i])
1830	{
1831	case 'w':
1832	  if (XWINT (x, i) != XWINT (y, i))
1833	    return 0;
1834	  break;
1835
1836	case 'i':
1837	  if (XINT (x, i) != XINT (y, i))
1838	    {
1839	      if (((code == ASM_OPERANDS && i == 6)
1840		   || (code == ASM_INPUT && i == 1)))
1841		break;
1842	      return 0;
1843	    }
1844	  break;
1845
1846	case 'p':
1847	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1848	    return 0;
1849	  break;
1850
1851	case 't':
1852	  if (XTREE (x, i) != XTREE (y, i))
1853	    return 0;
1854	  break;
1855
1856	case 's':
1857	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1858	    return 0;
1859	  break;
1860
1861	case 'e':
1862	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1863	    return 0;
1864	  break;
1865
1866	case 'u':
1867	  if (XEXP (x, i) != XEXP (y, i))
1868	    return 0;
1869	  /* Fall through.  */
1870	case '0':
1871	  break;
1872
1873	case 'E':
1874	  if (XVECLEN (x, i) != XVECLEN (y, i))
1875	    return 0;
1876	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1877	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1878	      return 0;
1879	  break;
1880
1881	default:
1882	  gcc_unreachable ();
1883	}
1884    }
1885  return 1;
1886}
1887
1888/* If X is a hard register or equivalent to one or a subregister of one,
1889   return the hard register number.  If X is a pseudo register that was not
1890   assigned a hard register, return the pseudo register number.  Otherwise,
1891   return -1.  Any rtx is valid for X.  */
1892
1893int
1894true_regnum (const_rtx x)
1895{
1896  if (REG_P (x))
1897    {
1898      if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1899	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1900	return reg_renumber[REGNO (x)];
1901      return REGNO (x);
1902    }
1903  if (GET_CODE (x) == SUBREG)
1904    {
1905      int base = true_regnum (SUBREG_REG (x));
1906      if (base >= 0
1907	  && base < FIRST_PSEUDO_REGISTER)
1908	{
1909	  struct subreg_info info;
1910
1911	  subreg_get_info (lra_in_progress
1912			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
1913			   GET_MODE (SUBREG_REG (x)),
1914			   SUBREG_BYTE (x), GET_MODE (x), &info);
1915
1916	  if (info.representable_p)
1917	    return base + info.offset;
1918	}
1919    }
1920  return -1;
1921}
1922
1923/* Return regno of the register REG and handle subregs too.  */
1924unsigned int
1925reg_or_subregno (const_rtx reg)
1926{
1927  if (GET_CODE (reg) == SUBREG)
1928    reg = SUBREG_REG (reg);
1929  gcc_assert (REG_P (reg));
1930  return REGNO (reg);
1931}
1932