1/* Optimize jump instructions, for GNU compiler.
2   Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20/* This is the pathetic reminder of old fame of the jump-optimization pass
21   of the compiler.  Now it contains basically a set of utility functions to
22   operate with jumps.
23
24   Each CODE_LABEL has a count of the times it is used
25   stored in the LABEL_NUSES internal field, and each JUMP_INSN
26   has one label that it refers to stored in the
27   JUMP_LABEL internal field.  With this we can detect labels that
28   become unused because of the deletion of all the jumps that
29   formerly used them.  The JUMP_LABEL info is sometimes looked
30   at by later passes.  For return insns, it contains either a
31   RETURN or a SIMPLE_RETURN rtx.
32
33   The subroutines redirect_jump and invert_jump are used
34   from other passes as well.  */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "backend.h"
40#include "target.h"
41#include "rtl.h"
42#include "tree.h"
43#include "cfghooks.h"
44#include "tree-pass.h"
45#include "memmodel.h"
46#include "tm_p.h"
47#include "insn-config.h"
48#include "regs.h"
49#include "emit-rtl.h"
50#include "recog.h"
51#include "cfgrtl.h"
52#include "rtl-iter.h"
53
54/* Optimize jump y; x: ... y: jumpif... x?
55   Don't know if it is worth bothering with.  */
56/* Optimize two cases of conditional jump to conditional jump?
57   This can never delete any instruction or make anything dead,
58   or even change what is live at any point.
59   So perhaps let combiner do it.  */
60
61static void init_label_info (rtx_insn *);
62static void mark_all_labels (rtx_insn *);
63static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
64static void mark_jump_label_asm (rtx, rtx_insn *);
65static void redirect_exp_1 (rtx *, rtx, rtx, rtx_insn *);
66static int invert_exp_1 (rtx, rtx_insn *);
67
68/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
69static void
70rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
71{
72  timevar_push (TV_REBUILD_JUMP);
73  init_label_info (f);
74  mark_all_labels (f);
75
76  /* Keep track of labels used from static data; we don't track them
77     closely enough to delete them here, so make sure their reference
78     count doesn't drop to zero.  */
79
80  if (count_forced)
81    {
82      rtx_insn *insn;
83      unsigned int i;
84      FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
85	if (LABEL_P (insn))
86	  LABEL_NUSES (insn)++;
87    }
88  timevar_pop (TV_REBUILD_JUMP);
89}
90
91/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
92   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
93   instructions and jumping insns that have labels as operands
94   (e.g. cbranchsi4).  */
95void
96rebuild_jump_labels (rtx_insn *f)
97{
98  rebuild_jump_labels_1 (f, true);
99}
100
101/* This function is like rebuild_jump_labels, but doesn't run over
102   forced_labels.  It can be used on insn chains that aren't the
103   main function chain.  */
104void
105rebuild_jump_labels_chain (rtx_insn *chain)
106{
107  rebuild_jump_labels_1 (chain, false);
108}
109
110/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
111   non-fallthru insn.  This is not generally true, as multiple barriers
112   may have crept in, or the BARRIER may be separated from the last
113   real insn by one or more NOTEs.
114
115   This simple pass moves barriers and removes duplicates so that the
116   old code is happy.
117 */
118static unsigned int
119cleanup_barriers (void)
120{
121  rtx_insn *insn;
122  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
123    {
124      if (BARRIER_P (insn))
125	{
126	  rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
127	  if (!prev)
128	    continue;
129
130	  if (BARRIER_P (prev))
131	    delete_insn (insn);
132	  else if (prev != PREV_INSN (insn))
133	    {
134	      basic_block bb = BLOCK_FOR_INSN (prev);
135	      rtx_insn *end = PREV_INSN (insn);
136	      reorder_insns_nobb (insn, insn, prev);
137	      if (bb)
138		{
139		  /* If the backend called in machine reorg compute_bb_for_insn
140		     and didn't free_bb_for_insn again, preserve basic block
141		     boundaries.  Move the end of basic block to PREV since
142		     it is followed by a barrier now, and clear BLOCK_FOR_INSN
143		     on the following notes.
144		     ???  Maybe the proper solution for the targets that have
145		     cfg around after machine reorg is not to run cleanup_barriers
146		     pass at all.  */
147		  BB_END (bb) = prev;
148		  do
149		    {
150		      prev = NEXT_INSN (prev);
151		      if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
152			BLOCK_FOR_INSN (prev) = NULL;
153		    }
154		  while (prev != end);
155		}
156	    }
157	}
158    }
159  return 0;
160}
161
162namespace {
163
164const pass_data pass_data_cleanup_barriers =
165{
166  RTL_PASS, /* type */
167  "barriers", /* name */
168  OPTGROUP_NONE, /* optinfo_flags */
169  TV_NONE, /* tv_id */
170  0, /* properties_required */
171  0, /* properties_provided */
172  0, /* properties_destroyed */
173  0, /* todo_flags_start */
174  0, /* todo_flags_finish */
175};
176
177class pass_cleanup_barriers : public rtl_opt_pass
178{
179public:
180  pass_cleanup_barriers (gcc::context *ctxt)
181    : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
182  {}
183
184  /* opt_pass methods: */
185  virtual unsigned int execute (function *) { return cleanup_barriers (); }
186
187}; // class pass_cleanup_barriers
188
189} // anon namespace
190
191rtl_opt_pass *
192make_pass_cleanup_barriers (gcc::context *ctxt)
193{
194  return new pass_cleanup_barriers (ctxt);
195}
196
197
198/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
199   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
200   notes whose labels don't occur in the insn any more.  */
201
202static void
203init_label_info (rtx_insn *f)
204{
205  rtx_insn *insn;
206
207  for (insn = f; insn; insn = NEXT_INSN (insn))
208    {
209      if (LABEL_P (insn))
210	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
211
212      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
213	 sticky and not reset here; that way we won't lose association
214	 with a label when e.g. the source for a target register
215	 disappears out of reach for targets that may use jump-target
216	 registers.  Jump transformations are supposed to transform
217	 any REG_LABEL_TARGET notes.  The target label reference in a
218	 branch may disappear from the branch (and from the
219	 instruction before it) for other reasons, like register
220	 allocation.  */
221
222      if (INSN_P (insn))
223	{
224	  rtx note, next;
225
226	  for (note = REG_NOTES (insn); note; note = next)
227	    {
228	      next = XEXP (note, 1);
229	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
230		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
231		remove_note (insn, note);
232	    }
233	}
234    }
235}
236
237/* A subroutine of mark_all_labels.  Trivially propagate a simple label
238   load into a jump_insn that uses it.  */
239
240static void
241maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
242{
243  rtx label_note, pc, pc_src;
244
245  pc = pc_set (jump_insn);
246  pc_src = pc != NULL ? SET_SRC (pc) : NULL;
247  label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
248
249  /* If the previous non-jump insn sets something to a label,
250     something that this jump insn uses, make that label the primary
251     target of this insn if we don't yet have any.  That previous
252     insn must be a single_set and not refer to more than one label.
253     The jump insn must not refer to other labels as jump targets
254     and must be a plain (set (pc) ...), maybe in a parallel, and
255     may refer to the item being set only directly or as one of the
256     arms in an IF_THEN_ELSE.  */
257
258  if (label_note != NULL && pc_src != NULL)
259    {
260      rtx label_set = single_set (prev_nonjump_insn);
261      rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
262
263      if (label_set != NULL
264	  /* The source must be the direct LABEL_REF, not a
265	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
266	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
267	  && (rtx_equal_p (label_dest, pc_src)
268	      || (GET_CODE (pc_src) == IF_THEN_ELSE
269		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
270		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
271	{
272	  /* The CODE_LABEL referred to in the note must be the
273	     CODE_LABEL in the LABEL_REF of the "set".  We can
274	     conveniently use it for the marker function, which
275	     requires a LABEL_REF wrapping.  */
276	  gcc_assert (XEXP (label_note, 0) == label_ref_label (SET_SRC (label_set)));
277
278	  mark_jump_label_1 (label_set, jump_insn, false, true);
279
280	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
281	}
282    }
283}
284
285/* Mark the label each jump jumps to.
286   Combine consecutive labels, and count uses of labels.  */
287
288static void
289mark_all_labels (rtx_insn *f)
290{
291  rtx_insn *insn;
292
293  if (current_ir_type () == IR_RTL_CFGLAYOUT)
294    {
295      basic_block bb;
296      FOR_EACH_BB_FN (bb, cfun)
297	{
298	  /* In cfglayout mode, we don't bother with trivial next-insn
299	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
300	     handled by other optimizers using better algorithms.  */
301	  FOR_BB_INSNS (bb, insn)
302	    {
303	      gcc_assert (! insn->deleted ());
304	      if (NONDEBUG_INSN_P (insn))
305	        mark_jump_label (PATTERN (insn), insn, 0);
306	    }
307
308	  /* In cfglayout mode, there may be non-insns between the
309	     basic blocks.  If those non-insns represent tablejump data,
310	     they contain label references that we must record.  */
311	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
312	    if (JUMP_TABLE_DATA_P (insn))
313	      mark_jump_label (PATTERN (insn), insn, 0);
314	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
315	    if (JUMP_TABLE_DATA_P (insn))
316	      mark_jump_label (PATTERN (insn), insn, 0);
317	}
318    }
319  else
320    {
321      rtx_insn *prev_nonjump_insn = NULL;
322      for (insn = f; insn; insn = NEXT_INSN (insn))
323	{
324	  if (insn->deleted ())
325	    ;
326	  else if (LABEL_P (insn))
327	    prev_nonjump_insn = NULL;
328	  else if (JUMP_TABLE_DATA_P (insn))
329	    mark_jump_label (PATTERN (insn), insn, 0);
330	  else if (NONDEBUG_INSN_P (insn))
331	    {
332	      mark_jump_label (PATTERN (insn), insn, 0);
333	      if (JUMP_P (insn))
334		{
335		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
336		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
337		}
338	      else
339		prev_nonjump_insn = insn;
340	    }
341	}
342    }
343}
344
345/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
346   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
347   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
348   know whether it's source is floating point or integer comparison.  Machine
349   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
350   to help this function avoid overhead in these cases.  */
351enum rtx_code
352reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
353				const_rtx arg1, const rtx_insn *insn)
354{
355  machine_mode mode;
356
357  /* If this is not actually a comparison, we can't reverse it.  */
358  if (GET_RTX_CLASS (code) != RTX_COMPARE
359      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
360    return UNKNOWN;
361
362  mode = GET_MODE (arg0);
363  if (mode == VOIDmode)
364    mode = GET_MODE (arg1);
365
366  /* First see if machine description supplies us way to reverse the
367     comparison.  Give it priority over everything else to allow
368     machine description to do tricks.  */
369  if (GET_MODE_CLASS (mode) == MODE_CC
370      && REVERSIBLE_CC_MODE (mode))
371    return REVERSE_CONDITION (code, mode);
372
373  /* Try a few special cases based on the comparison code.  */
374  switch (code)
375    {
376    case GEU:
377    case GTU:
378    case LEU:
379    case LTU:
380    case NE:
381    case EQ:
382      /* It is always safe to reverse EQ and NE, even for the floating
383	 point.  Similarly the unsigned comparisons are never used for
384	 floating point so we can reverse them in the default way.  */
385      return reverse_condition (code);
386    case ORDERED:
387    case UNORDERED:
388    case LTGT:
389    case UNEQ:
390      /* In case we already see unordered comparison, we can be sure to
391	 be dealing with floating point so we don't need any more tests.  */
392      return reverse_condition_maybe_unordered (code);
393    case UNLT:
394    case UNLE:
395    case UNGT:
396    case UNGE:
397      /* We don't have safe way to reverse these yet.  */
398      return UNKNOWN;
399    default:
400      break;
401    }
402
403  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
404    {
405      /* Try to search for the comparison to determine the real mode.
406         This code is expensive, but with sane machine description it
407         will be never used, since REVERSIBLE_CC_MODE will return true
408         in all cases.  */
409      if (! insn)
410	return UNKNOWN;
411
412      /* These CONST_CAST's are okay because prev_nonnote_insn just
413	 returns its argument and we assign it to a const_rtx
414	 variable.  */
415      for (rtx_insn *prev = prev_nonnote_insn (const_cast<rtx_insn *> (insn));
416	   prev != 0 && !LABEL_P (prev);
417	   prev = prev_nonnote_insn (prev))
418	{
419	  const_rtx set = set_of (arg0, prev);
420	  if (set && GET_CODE (set) == SET
421	      && rtx_equal_p (SET_DEST (set), arg0))
422	    {
423	      rtx src = SET_SRC (set);
424
425	      if (GET_CODE (src) == COMPARE)
426		{
427		  rtx comparison = src;
428		  arg0 = XEXP (src, 0);
429		  mode = GET_MODE (arg0);
430		  if (mode == VOIDmode)
431		    mode = GET_MODE (XEXP (comparison, 1));
432		  break;
433		}
434	      /* We can get past reg-reg moves.  This may be useful for model
435	         of i387 comparisons that first move flag registers around.  */
436	      if (REG_P (src))
437		{
438		  arg0 = src;
439		  continue;
440		}
441	    }
442	  /* If register is clobbered in some ununderstandable way,
443	     give up.  */
444	  if (set)
445	    return UNKNOWN;
446	}
447    }
448
449  /* Test for an integer condition, or a floating-point comparison
450     in which NaNs can be ignored.  */
451  if (CONST_INT_P (arg0)
452      || (GET_MODE (arg0) != VOIDmode
453	  && GET_MODE_CLASS (mode) != MODE_CC
454	  && !HONOR_NANS (mode)))
455    return reverse_condition (code);
456
457  return UNKNOWN;
458}
459
460/* A wrapper around the previous function to take COMPARISON as rtx
461   expression.  This simplifies many callers.  */
462enum rtx_code
463reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
464{
465  if (!COMPARISON_P (comparison))
466    return UNKNOWN;
467  return reversed_comparison_code_parts (GET_CODE (comparison),
468					 XEXP (comparison, 0),
469					 XEXP (comparison, 1), insn);
470}
471
472/* Return comparison with reversed code of EXP.
473   Return NULL_RTX in case we fail to do the reversal.  */
474rtx
475reversed_comparison (const_rtx exp, machine_mode mode)
476{
477  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL);
478  if (reversed_code == UNKNOWN)
479    return NULL_RTX;
480  else
481    return simplify_gen_relational (reversed_code, mode, VOIDmode,
482                                    XEXP (exp, 0), XEXP (exp, 1));
483}
484
485
486/* Given an rtx-code for a comparison, return the code for the negated
487   comparison.  If no such code exists, return UNKNOWN.
488
489   WATCH OUT!  reverse_condition is not safe to use on a jump that might
490   be acting on the results of an IEEE floating point comparison, because
491   of the special treatment of non-signaling nans in comparisons.
492   Use reversed_comparison_code instead.  */
493
494enum rtx_code
495reverse_condition (enum rtx_code code)
496{
497  switch (code)
498    {
499    case EQ:
500      return NE;
501    case NE:
502      return EQ;
503    case GT:
504      return LE;
505    case GE:
506      return LT;
507    case LT:
508      return GE;
509    case LE:
510      return GT;
511    case GTU:
512      return LEU;
513    case GEU:
514      return LTU;
515    case LTU:
516      return GEU;
517    case LEU:
518      return GTU;
519    case UNORDERED:
520      return ORDERED;
521    case ORDERED:
522      return UNORDERED;
523
524    case UNLT:
525    case UNLE:
526    case UNGT:
527    case UNGE:
528    case UNEQ:
529    case LTGT:
530      return UNKNOWN;
531
532    default:
533      gcc_unreachable ();
534    }
535}
536
537/* Similar, but we're allowed to generate unordered comparisons, which
538   makes it safe for IEEE floating-point.  Of course, we have to recognize
539   that the target will support them too...  */
540
541enum rtx_code
542reverse_condition_maybe_unordered (enum rtx_code code)
543{
544  switch (code)
545    {
546    case EQ:
547      return NE;
548    case NE:
549      return EQ;
550    case GT:
551      return UNLE;
552    case GE:
553      return UNLT;
554    case LT:
555      return UNGE;
556    case LE:
557      return UNGT;
558    case LTGT:
559      return UNEQ;
560    case UNORDERED:
561      return ORDERED;
562    case ORDERED:
563      return UNORDERED;
564    case UNLT:
565      return GE;
566    case UNLE:
567      return GT;
568    case UNGT:
569      return LE;
570    case UNGE:
571      return LT;
572    case UNEQ:
573      return LTGT;
574
575    default:
576      gcc_unreachable ();
577    }
578}
579
580/* Similar, but return the code when two operands of a comparison are swapped.
581   This IS safe for IEEE floating-point.  */
582
583enum rtx_code
584swap_condition (enum rtx_code code)
585{
586  switch (code)
587    {
588    case EQ:
589    case NE:
590    case UNORDERED:
591    case ORDERED:
592    case UNEQ:
593    case LTGT:
594      return code;
595
596    case GT:
597      return LT;
598    case GE:
599      return LE;
600    case LT:
601      return GT;
602    case LE:
603      return GE;
604    case GTU:
605      return LTU;
606    case GEU:
607      return LEU;
608    case LTU:
609      return GTU;
610    case LEU:
611      return GEU;
612    case UNLT:
613      return UNGT;
614    case UNLE:
615      return UNGE;
616    case UNGT:
617      return UNLT;
618    case UNGE:
619      return UNLE;
620
621    default:
622      gcc_unreachable ();
623    }
624}
625
626/* Given a comparison CODE, return the corresponding unsigned comparison.
627   If CODE is an equality comparison or already an unsigned comparison,
628   CODE is returned.  */
629
630enum rtx_code
631unsigned_condition (enum rtx_code code)
632{
633  switch (code)
634    {
635    case EQ:
636    case NE:
637    case GTU:
638    case GEU:
639    case LTU:
640    case LEU:
641      return code;
642
643    case GT:
644      return GTU;
645    case GE:
646      return GEU;
647    case LT:
648      return LTU;
649    case LE:
650      return LEU;
651
652    default:
653      gcc_unreachable ();
654    }
655}
656
657/* Similarly, return the signed version of a comparison.  */
658
659enum rtx_code
660signed_condition (enum rtx_code code)
661{
662  switch (code)
663    {
664    case EQ:
665    case NE:
666    case GT:
667    case GE:
668    case LT:
669    case LE:
670      return code;
671
672    case GTU:
673      return GT;
674    case GEU:
675      return GE;
676    case LTU:
677      return LT;
678    case LEU:
679      return LE;
680
681    default:
682      gcc_unreachable ();
683    }
684}
685
686/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
687   truth of CODE1 implies the truth of CODE2.  */
688
689int
690comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
691{
692  /* UNKNOWN comparison codes can happen as a result of trying to revert
693     comparison codes.
694     They can't match anything, so we have to reject them here.  */
695  if (code1 == UNKNOWN || code2 == UNKNOWN)
696    return 0;
697
698  if (code1 == code2)
699    return 1;
700
701  switch (code1)
702    {
703    case UNEQ:
704      if (code2 == UNLE || code2 == UNGE)
705	return 1;
706      break;
707
708    case EQ:
709      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
710	  || code2 == ORDERED)
711	return 1;
712      break;
713
714    case UNLT:
715      if (code2 == UNLE || code2 == NE)
716	return 1;
717      break;
718
719    case LT:
720      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
721	return 1;
722      break;
723
724    case UNGT:
725      if (code2 == UNGE || code2 == NE)
726	return 1;
727      break;
728
729    case GT:
730      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
731	return 1;
732      break;
733
734    case GE:
735    case LE:
736      if (code2 == ORDERED)
737	return 1;
738      break;
739
740    case LTGT:
741      if (code2 == NE || code2 == ORDERED)
742	return 1;
743      break;
744
745    case LTU:
746      if (code2 == LEU || code2 == NE)
747	return 1;
748      break;
749
750    case GTU:
751      if (code2 == GEU || code2 == NE)
752	return 1;
753      break;
754
755    case UNORDERED:
756      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
757	  || code2 == UNGE || code2 == UNGT)
758	return 1;
759      break;
760
761    default:
762      break;
763    }
764
765  return 0;
766}
767
768/* Return 1 if INSN is an unconditional jump and nothing else.  */
769
770int
771simplejump_p (const rtx_insn *insn)
772{
773  return (JUMP_P (insn)
774	  && GET_CODE (PATTERN (insn)) == SET
775	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
776	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
777}
778
779/* Return nonzero if INSN is a (possibly) conditional jump
780   and nothing more.
781
782   Use of this function is deprecated, since we need to support combined
783   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
784
785int
786condjump_p (const rtx_insn *insn)
787{
788  const_rtx x = PATTERN (insn);
789
790  if (GET_CODE (x) != SET
791      || GET_CODE (SET_DEST (x)) != PC)
792    return 0;
793
794  x = SET_SRC (x);
795  if (GET_CODE (x) == LABEL_REF)
796    return 1;
797  else
798    return (GET_CODE (x) == IF_THEN_ELSE
799	    && ((GET_CODE (XEXP (x, 2)) == PC
800		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
801		     || ANY_RETURN_P (XEXP (x, 1))))
802		|| (GET_CODE (XEXP (x, 1)) == PC
803		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
804			|| ANY_RETURN_P (XEXP (x, 2))))));
805}
806
807/* Return nonzero if INSN is a (possibly) conditional jump inside a
808   PARALLEL.
809
810   Use this function is deprecated, since we need to support combined
811   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
812
813int
814condjump_in_parallel_p (const rtx_insn *insn)
815{
816  const_rtx x = PATTERN (insn);
817
818  if (GET_CODE (x) != PARALLEL)
819    return 0;
820  else
821    x = XVECEXP (x, 0, 0);
822
823  if (GET_CODE (x) != SET)
824    return 0;
825  if (GET_CODE (SET_DEST (x)) != PC)
826    return 0;
827  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
828    return 1;
829  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
830    return 0;
831  if (XEXP (SET_SRC (x), 2) == pc_rtx
832      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
833	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
834    return 1;
835  if (XEXP (SET_SRC (x), 1) == pc_rtx
836      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
837	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
838    return 1;
839  return 0;
840}
841
842/* Return set of PC, otherwise NULL.  */
843
844rtx
845pc_set (const rtx_insn *insn)
846{
847  rtx pat;
848  if (!JUMP_P (insn))
849    return NULL_RTX;
850  pat = PATTERN (insn);
851
852  /* The set is allowed to appear either as the insn pattern or
853     the first set in a PARALLEL.  */
854  if (GET_CODE (pat) == PARALLEL)
855    pat = XVECEXP (pat, 0, 0);
856  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
857    return pat;
858
859  return NULL_RTX;
860}
861
862/* Return true when insn is an unconditional direct jump,
863   possibly bundled inside a PARALLEL.  */
864
865int
866any_uncondjump_p (const rtx_insn *insn)
867{
868  const_rtx x = pc_set (insn);
869  if (!x)
870    return 0;
871  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
872    return 0;
873  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
874    return 0;
875  return 1;
876}
877
878/* Return true when insn is a conditional jump.  This function works for
879   instructions containing PC sets in PARALLELs.  The instruction may have
880   various other effects so before removing the jump you must verify
881   onlyjump_p.
882
883   Note that unlike condjump_p it returns false for unconditional jumps.  */
884
885int
886any_condjump_p (const rtx_insn *insn)
887{
888  const_rtx x = pc_set (insn);
889  enum rtx_code a, b;
890
891  if (!x)
892    return 0;
893  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
894    return 0;
895
896  a = GET_CODE (XEXP (SET_SRC (x), 1));
897  b = GET_CODE (XEXP (SET_SRC (x), 2));
898
899  return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
900	  || (a == PC
901	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
902}
903
904/* Return the label of a conditional jump.  */
905
906rtx
907condjump_label (const rtx_insn *insn)
908{
909  rtx x = pc_set (insn);
910
911  if (!x)
912    return NULL_RTX;
913  x = SET_SRC (x);
914  if (GET_CODE (x) == LABEL_REF)
915    return x;
916  if (GET_CODE (x) != IF_THEN_ELSE)
917    return NULL_RTX;
918  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
919    return XEXP (x, 1);
920  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
921    return XEXP (x, 2);
922  return NULL_RTX;
923}
924
925/* Return TRUE if INSN is a return jump.  */
926
927int
928returnjump_p (const rtx_insn *insn)
929{
930  if (JUMP_P (insn))
931    {
932      subrtx_iterator::array_type array;
933      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
934	{
935	  const_rtx x = *iter;
936	  switch (GET_CODE (x))
937	    {
938	    case RETURN:
939	    case SIMPLE_RETURN:
940	    case EH_RETURN:
941	      return true;
942
943	    case SET:
944	      if (SET_IS_RETURN_P (x))
945		return true;
946	      break;
947
948	    default:
949	      break;
950	    }
951	}
952    }
953  return false;
954}
955
956/* Return true if INSN is a (possibly conditional) return insn.  */
957
958int
959eh_returnjump_p (rtx_insn *insn)
960{
961  if (JUMP_P (insn))
962    {
963      subrtx_iterator::array_type array;
964      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
965	if (GET_CODE (*iter) == EH_RETURN)
966	  return true;
967    }
968  return false;
969}
970
971/* Return true if INSN is a jump that only transfers control and
972   nothing more.  */
973
974int
975onlyjump_p (const rtx_insn *insn)
976{
977  rtx set;
978
979  if (!JUMP_P (insn))
980    return 0;
981
982  set = single_set (insn);
983  if (set == NULL)
984    return 0;
985  if (GET_CODE (SET_DEST (set)) != PC)
986    return 0;
987  if (side_effects_p (SET_SRC (set)))
988    return 0;
989
990  return 1;
991}
992
993/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
994   NULL or a return.  */
995bool
996jump_to_label_p (const rtx_insn *insn)
997{
998  return (JUMP_P (insn)
999	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1000}
1001
1002/* Return nonzero if X is an RTX that only sets the condition codes
1003   and has no side effects.  */
1004
1005int
1006only_sets_cc0_p (const_rtx x)
1007{
1008  if (! x)
1009    return 0;
1010
1011  if (INSN_P (x))
1012    x = PATTERN (x);
1013
1014  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1015}
1016
1017/* Return 1 if X is an RTX that does nothing but set the condition codes
1018   and CLOBBER or USE registers.
1019   Return -1 if X does explicitly set the condition codes,
1020   but also does other things.  */
1021
1022int
1023sets_cc0_p (const_rtx x)
1024{
1025  if (! x)
1026    return 0;
1027
1028  if (INSN_P (x))
1029    x = PATTERN (x);
1030
1031  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1032    return 1;
1033  if (GET_CODE (x) == PARALLEL)
1034    {
1035      int i;
1036      int sets_cc0 = 0;
1037      int other_things = 0;
1038      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1039	{
1040	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1041	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1042	    sets_cc0 = 1;
1043	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1044	    other_things = 1;
1045	}
1046      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1047    }
1048  return 0;
1049}
1050
1051/* Find all CODE_LABELs referred to in X, and increment their use
1052   counts.  If INSN is a JUMP_INSN and there is at least one
1053   CODE_LABEL referenced in INSN as a jump target, then store the last
1054   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
1055   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
1056   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
1057   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1058   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1059   For returnjumps, the JUMP_LABEL will also be set as appropriate.
1060
1061   Note that two labels separated by a loop-beginning note
1062   must be kept distinct if we have not yet done loop-optimization,
1063   because the gap between them is where loop-optimize
1064   will want to move invariant code to.  CROSS_JUMP tells us
1065   that loop-optimization is done with.  */
1066
1067void
1068mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
1069{
1070  rtx asmop = extract_asm_operands (x);
1071  if (asmop)
1072    mark_jump_label_asm (asmop, insn);
1073  else
1074    mark_jump_label_1 (x, insn, in_mem != 0,
1075		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1076}
1077
1078/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1079   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
1080   jump-target; when the JUMP_LABEL field of INSN should be set or a
1081   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1082   note.  */
1083
1084static void
1085mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1086{
1087  RTX_CODE code = GET_CODE (x);
1088  int i;
1089  const char *fmt;
1090
1091  switch (code)
1092    {
1093    case PC:
1094    case CC0:
1095    case REG:
1096    case CLOBBER:
1097    case CALL:
1098      return;
1099
1100    case RETURN:
1101    case SIMPLE_RETURN:
1102      if (is_target)
1103	{
1104	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1105	  JUMP_LABEL (insn) = x;
1106	}
1107      return;
1108
1109    case MEM:
1110      in_mem = true;
1111      break;
1112
1113    case SEQUENCE:
1114      {
1115	rtx_sequence *seq = as_a <rtx_sequence *> (x);
1116	for (i = 0; i < seq->len (); i++)
1117	  mark_jump_label (PATTERN (seq->insn (i)),
1118			   seq->insn (i), 0);
1119      }
1120      return;
1121
1122    case SYMBOL_REF:
1123      if (!in_mem)
1124	return;
1125
1126      /* If this is a constant-pool reference, see if it is a label.  */
1127      if (CONSTANT_POOL_ADDRESS_P (x))
1128	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1129      break;
1130
1131      /* Handle operands in the condition of an if-then-else as for a
1132	 non-jump insn.  */
1133    case IF_THEN_ELSE:
1134      if (!is_target)
1135	break;
1136      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1137      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1138      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1139      return;
1140
1141    case LABEL_REF:
1142      {
1143	rtx_insn *label = label_ref_label (x);
1144
1145	/* Ignore remaining references to unreachable labels that
1146	   have been deleted.  */
1147	if (NOTE_P (label)
1148	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1149	  break;
1150
1151	gcc_assert (LABEL_P (label));
1152
1153	/* Ignore references to labels of containing functions.  */
1154	if (LABEL_REF_NONLOCAL_P (x))
1155	  break;
1156
1157	set_label_ref_label (x, label);
1158	if (! insn || ! insn->deleted ())
1159	  ++LABEL_NUSES (label);
1160
1161	if (insn)
1162	  {
1163	    if (is_target
1164		/* Do not change a previous setting of JUMP_LABEL.  If the
1165		   JUMP_LABEL slot is occupied by a different label,
1166		   create a note for this label.  */
1167		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1168	      JUMP_LABEL (insn) = label;
1169	    else
1170	      {
1171		enum reg_note kind
1172		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1173
1174		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1175		   for LABEL unless there already is one.  All uses of
1176		   a label, except for the primary target of a jump,
1177		   must have such a note.  */
1178		if (! find_reg_note (insn, kind, label))
1179		  add_reg_note (insn, kind, label);
1180	      }
1181	  }
1182	return;
1183      }
1184
1185    /* Do walk the labels in a vector, but not the first operand of an
1186       ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1187    case ADDR_VEC:
1188    case ADDR_DIFF_VEC:
1189      if (! insn->deleted ())
1190	{
1191	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1192
1193	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1194	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1195			       is_target);
1196	}
1197      return;
1198
1199    default:
1200      break;
1201    }
1202
1203  fmt = GET_RTX_FORMAT (code);
1204
1205  /* The primary target of a tablejump is the label of the ADDR_VEC,
1206     which is canonically mentioned *last* in the insn.  To get it
1207     marked as JUMP_LABEL, we iterate over items in reverse order.  */
1208  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1209    {
1210      if (fmt[i] == 'e')
1211	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1212      else if (fmt[i] == 'E')
1213	{
1214	  int j;
1215
1216	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1217	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1218			       is_target);
1219	}
1220    }
1221}
1222
1223/* Worker function for mark_jump_label.  Handle asm insns specially.
1224   In particular, output operands need not be considered so we can
1225   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
1226   need to be considered targets.  */
1227
1228static void
1229mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1230{
1231  int i;
1232
1233  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1234    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1235
1236  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1237    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1238}
1239
1240/* Delete insn INSN from the chain of insns and update label ref counts
1241   and delete insns now unreachable.
1242
1243   Returns the first insn after INSN that was not deleted.
1244
1245   Usage of this instruction is deprecated.  Use delete_insn instead and
1246   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1247
1248rtx_insn *
1249delete_related_insns (rtx uncast_insn)
1250{
1251  rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1252  int was_code_label = (LABEL_P (insn));
1253  rtx note;
1254  rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
1255
1256  while (next && next->deleted ())
1257    next = NEXT_INSN (next);
1258
1259  /* This insn is already deleted => return first following nondeleted.  */
1260  if (insn->deleted ())
1261    return next;
1262
1263  delete_insn (insn);
1264
1265  /* If instruction is followed by a barrier,
1266     delete the barrier too.  */
1267
1268  if (next != 0 && BARRIER_P (next))
1269    delete_insn (next);
1270
1271  /* If deleting a jump, decrement the count of the label,
1272     and delete the label if it is now unused.  */
1273
1274  if (jump_to_label_p (insn))
1275    {
1276      rtx lab = JUMP_LABEL (insn);
1277      rtx_jump_table_data *lab_next;
1278
1279      if (LABEL_NUSES (lab) == 0)
1280	/* This can delete NEXT or PREV,
1281	   either directly if NEXT is JUMP_LABEL (INSN),
1282	   or indirectly through more levels of jumps.  */
1283	delete_related_insns (lab);
1284      else if (tablejump_p (insn, NULL, &lab_next))
1285	{
1286	  /* If we're deleting the tablejump, delete the dispatch table.
1287	     We may not be able to kill the label immediately preceding
1288	     just yet, as it might be referenced in code leading up to
1289	     the tablejump.  */
1290	  delete_related_insns (lab_next);
1291	}
1292    }
1293
1294  /* Likewise if we're deleting a dispatch table.  */
1295
1296  if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1297    {
1298      rtvec labels = table->get_labels ();
1299      int i;
1300      int len = GET_NUM_ELEM (labels);
1301
1302      for (i = 0; i < len; i++)
1303	if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
1304	  delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1305      while (next && next->deleted ())
1306	next = NEXT_INSN (next);
1307      return next;
1308    }
1309
1310  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1311     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
1312  if (INSN_P (insn))
1313    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1314      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1315	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1316	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1317	  && LABEL_P (XEXP (note, 0)))
1318	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1319	  delete_related_insns (XEXP (note, 0));
1320
1321  while (prev && (prev->deleted () || NOTE_P (prev)))
1322    prev = PREV_INSN (prev);
1323
1324  /* If INSN was a label and a dispatch table follows it,
1325     delete the dispatch table.  The tablejump must have gone already.
1326     It isn't useful to fall through into a table.  */
1327
1328  if (was_code_label
1329      && NEXT_INSN (insn) != 0
1330      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1331    next = delete_related_insns (NEXT_INSN (insn));
1332
1333  /* If INSN was a label, delete insns following it if now unreachable.  */
1334
1335  if (was_code_label && prev && BARRIER_P (prev))
1336    {
1337      enum rtx_code code;
1338      while (next)
1339	{
1340	  code = GET_CODE (next);
1341	  if (code == NOTE)
1342	    next = NEXT_INSN (next);
1343	  /* Keep going past other deleted labels to delete what follows.  */
1344	  else if (code == CODE_LABEL && next->deleted ())
1345	    next = NEXT_INSN (next);
1346	  /* Keep the (use (insn))s created by dbr_schedule, which needs
1347	     them in order to track liveness relative to a previous
1348	     barrier.  */
1349	  else if (INSN_P (next)
1350		   && GET_CODE (PATTERN (next)) == USE
1351		   && INSN_P (XEXP (PATTERN (next), 0)))
1352	    next = NEXT_INSN (next);
1353	  else if (code == BARRIER || INSN_P (next))
1354	    /* Note: if this deletes a jump, it can cause more
1355	       deletion of unreachable code, after a different label.
1356	       As long as the value from this recursive call is correct,
1357	       this invocation functions correctly.  */
1358	    next = delete_related_insns (next);
1359	  else
1360	    break;
1361	}
1362    }
1363
1364  /* I feel a little doubtful about this loop,
1365     but I see no clean and sure alternative way
1366     to find the first insn after INSN that is not now deleted.
1367     I hope this works.  */
1368  while (next && next->deleted ())
1369    next = NEXT_INSN (next);
1370  return next;
1371}
1372
1373/* Delete a range of insns from FROM to TO, inclusive.
1374   This is for the sake of peephole optimization, so assume
1375   that whatever these insns do will still be done by a new
1376   peephole insn that will replace them.  */
1377
1378void
1379delete_for_peephole (rtx_insn *from, rtx_insn *to)
1380{
1381  rtx_insn *insn = from;
1382
1383  while (1)
1384    {
1385      rtx_insn *next = NEXT_INSN (insn);
1386      rtx_insn *prev = PREV_INSN (insn);
1387
1388      if (!NOTE_P (insn))
1389	{
1390	  insn->set_deleted();
1391
1392	  /* Patch this insn out of the chain.  */
1393	  /* We don't do this all at once, because we
1394	     must preserve all NOTEs.  */
1395	  if (prev)
1396	    SET_NEXT_INSN (prev) = next;
1397
1398	  if (next)
1399	    SET_PREV_INSN (next) = prev;
1400	}
1401
1402      if (insn == to)
1403	break;
1404      insn = next;
1405    }
1406
1407  /* Note that if TO is an unconditional jump
1408     we *do not* delete the BARRIER that follows,
1409     since the peephole that replaces this sequence
1410     is also an unconditional jump in that case.  */
1411}
1412
1413/* A helper function for redirect_exp_1; examines its input X and returns
1414   either a LABEL_REF around a label, or a RETURN if X was NULL.  */
1415static rtx
1416redirect_target (rtx x)
1417{
1418  if (x == NULL_RTX)
1419    return ret_rtx;
1420  if (!ANY_RETURN_P (x))
1421    return gen_rtx_LABEL_REF (Pmode, x);
1422  return x;
1423}
1424
1425/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1426   NLABEL as a return.  Accrue modifications into the change group.  */
1427
1428static void
1429redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx_insn *insn)
1430{
1431  rtx x = *loc;
1432  RTX_CODE code = GET_CODE (x);
1433  int i;
1434  const char *fmt;
1435
1436  if ((code == LABEL_REF && label_ref_label (x) == olabel)
1437      || x == olabel)
1438    {
1439      x = redirect_target (nlabel);
1440      if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1441 	x = gen_rtx_SET (pc_rtx, x);
1442      validate_change (insn, loc, x, 1);
1443      return;
1444    }
1445
1446  if (code == SET && SET_DEST (x) == pc_rtx
1447      && ANY_RETURN_P (nlabel)
1448      && GET_CODE (SET_SRC (x)) == LABEL_REF
1449      && label_ref_label (SET_SRC (x)) == olabel)
1450    {
1451      validate_change (insn, loc, nlabel, 1);
1452      return;
1453    }
1454
1455  if (code == IF_THEN_ELSE)
1456    {
1457      /* Skip the condition of an IF_THEN_ELSE.  We only want to
1458         change jump destinations, not eventual label comparisons.  */
1459      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1460      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1461      return;
1462    }
1463
1464  fmt = GET_RTX_FORMAT (code);
1465  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1466    {
1467      if (fmt[i] == 'e')
1468	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1469      else if (fmt[i] == 'E')
1470	{
1471	  int j;
1472	  for (j = 0; j < XVECLEN (x, i); j++)
1473	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1474	}
1475    }
1476}
1477
1478/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1479   the modifications into the change group.  Return false if we did
1480   not see how to do that.  */
1481
1482int
1483redirect_jump_1 (rtx_insn *jump, rtx nlabel)
1484{
1485  int ochanges = num_validated_changes ();
1486  rtx *loc, asmop;
1487
1488  gcc_assert (nlabel != NULL_RTX);
1489  asmop = extract_asm_operands (PATTERN (jump));
1490  if (asmop)
1491    {
1492      if (nlabel == NULL)
1493	return 0;
1494      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1495      loc = &ASM_OPERANDS_LABEL (asmop, 0);
1496    }
1497  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1498    loc = &XVECEXP (PATTERN (jump), 0, 0);
1499  else
1500    loc = &PATTERN (jump);
1501
1502  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1503  return num_validated_changes () > ochanges;
1504}
1505
1506/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1507   jump target label is unused as a result, it and the code following
1508   it may be deleted.
1509
1510   Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1511   in that case we are to turn the jump into a (possibly conditional)
1512   return insn.
1513
1514   The return value will be 1 if the change was made, 0 if it wasn't
1515   (this can only occur when trying to produce return insns).  */
1516
1517int
1518redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1519{
1520  rtx olabel = jump->jump_label ();
1521
1522  if (!nlabel)
1523    {
1524      /* If there is no label, we are asked to redirect to the EXIT block.
1525	 When before the epilogue is emitted, return/simple_return cannot be
1526	 created so we return 0 immediately.  After the epilogue is emitted,
1527	 we always expect a label, either a non-null label, or a
1528	 return/simple_return RTX.  */
1529
1530      if (!epilogue_completed)
1531	return 0;
1532      gcc_unreachable ();
1533    }
1534
1535  if (nlabel == olabel)
1536    return 1;
1537
1538  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1539    return 0;
1540
1541  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1542  return 1;
1543}
1544
1545/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1546   NLABEL in JUMP.
1547   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1548   count has dropped to zero.  */
1549void
1550redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
1551		 int invert)
1552{
1553  rtx note;
1554
1555  gcc_assert (JUMP_LABEL (jump) == olabel);
1556
1557  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1558     moving FUNCTION_END note.  Just sanity check that no user still worry
1559     about this.  */
1560  gcc_assert (delete_unused >= 0);
1561  JUMP_LABEL (jump) = nlabel;
1562  if (!ANY_RETURN_P (nlabel))
1563    ++LABEL_NUSES (nlabel);
1564
1565  /* Update labels in any REG_EQUAL note.  */
1566  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1567    {
1568      if (ANY_RETURN_P (nlabel)
1569	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1570	remove_note (jump, note);
1571      else
1572	{
1573	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1574	  confirm_change_group ();
1575	}
1576    }
1577
1578  /* Handle the case where we had a conditional crossing jump to a return
1579     label and are now changing it into a direct conditional return.
1580     The jump is no longer crossing in that case.  */
1581  if (ANY_RETURN_P (nlabel))
1582    CROSSING_JUMP_P (jump) = 0;
1583
1584  if (!ANY_RETURN_P (olabel)
1585      && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1586      /* Undefined labels will remain outside the insn stream.  */
1587      && INSN_UID (olabel))
1588    delete_related_insns (olabel);
1589  if (invert)
1590    invert_br_probabilities (jump);
1591}
1592
1593/* Invert the jump condition X contained in jump insn INSN.  Accrue the
1594   modifications into the change group.  Return nonzero for success.  */
1595static int
1596invert_exp_1 (rtx x, rtx_insn *insn)
1597{
1598  RTX_CODE code = GET_CODE (x);
1599
1600  if (code == IF_THEN_ELSE)
1601    {
1602      rtx comp = XEXP (x, 0);
1603      rtx tem;
1604      enum rtx_code reversed_code;
1605
1606      /* We can do this in two ways:  The preferable way, which can only
1607	 be done if this is not an integer comparison, is to reverse
1608	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1609	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1610
1611      reversed_code = reversed_comparison_code (comp, insn);
1612
1613      if (reversed_code != UNKNOWN)
1614	{
1615	  validate_change (insn, &XEXP (x, 0),
1616			   gen_rtx_fmt_ee (reversed_code,
1617					   GET_MODE (comp), XEXP (comp, 0),
1618					   XEXP (comp, 1)),
1619			   1);
1620	  return 1;
1621	}
1622
1623      tem = XEXP (x, 1);
1624      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1625      validate_change (insn, &XEXP (x, 2), tem, 1);
1626      return 1;
1627    }
1628  else
1629    return 0;
1630}
1631
1632/* Invert the condition of the jump JUMP, and make it jump to label
1633   NLABEL instead of where it jumps now.  Accrue changes into the
1634   change group.  Return false if we didn't see how to perform the
1635   inversion and redirection.  */
1636
1637int
1638invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
1639{
1640  rtx x = pc_set (jump);
1641  int ochanges;
1642  int ok;
1643
1644  ochanges = num_validated_changes ();
1645  if (x == NULL)
1646    return 0;
1647  ok = invert_exp_1 (SET_SRC (x), jump);
1648  gcc_assert (ok);
1649
1650  if (num_validated_changes () == ochanges)
1651    return 0;
1652
1653  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1654     in Pmode, so checking this is not merely an optimization.  */
1655  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1656}
1657
1658/* Invert the condition of the jump JUMP, and make it jump to label
1659   NLABEL instead of where it jumps now.  Return true if successful.  */
1660
1661int
1662invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1663{
1664  rtx olabel = JUMP_LABEL (jump);
1665
1666  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1667    {
1668      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1669      return 1;
1670    }
1671  cancel_changes (0);
1672  return 0;
1673}
1674
1675
1676/* Like rtx_equal_p except that it considers two REGs as equal
1677   if they renumber to the same value and considers two commutative
1678   operations to be the same if the order of the operands has been
1679   reversed.  */
1680
1681int
1682rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1683{
1684  int i;
1685  const enum rtx_code code = GET_CODE (x);
1686  const char *fmt;
1687
1688  if (x == y)
1689    return 1;
1690
1691  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1692      && (REG_P (y) || (GET_CODE (y) == SUBREG
1693				  && REG_P (SUBREG_REG (y)))))
1694    {
1695      int reg_x = -1, reg_y = -1;
1696      poly_int64 byte_x = 0, byte_y = 0;
1697      struct subreg_info info;
1698
1699      if (GET_MODE (x) != GET_MODE (y))
1700	return 0;
1701
1702      /* If we haven't done any renumbering, don't
1703	 make any assumptions.  */
1704      if (reg_renumber == 0)
1705	return rtx_equal_p (x, y);
1706
1707      if (code == SUBREG)
1708	{
1709	  reg_x = REGNO (SUBREG_REG (x));
1710	  byte_x = SUBREG_BYTE (x);
1711
1712	  if (reg_renumber[reg_x] >= 0)
1713	    {
1714	      subreg_get_info (reg_renumber[reg_x],
1715			       GET_MODE (SUBREG_REG (x)), byte_x,
1716			       GET_MODE (x), &info);
1717	      if (!info.representable_p)
1718		return 0;
1719	      reg_x = info.offset;
1720	      byte_x = 0;
1721	    }
1722	}
1723      else
1724	{
1725	  reg_x = REGNO (x);
1726	  if (reg_renumber[reg_x] >= 0)
1727	    reg_x = reg_renumber[reg_x];
1728	}
1729
1730      if (GET_CODE (y) == SUBREG)
1731	{
1732	  reg_y = REGNO (SUBREG_REG (y));
1733	  byte_y = SUBREG_BYTE (y);
1734
1735	  if (reg_renumber[reg_y] >= 0)
1736	    {
1737	      subreg_get_info (reg_renumber[reg_y],
1738			       GET_MODE (SUBREG_REG (y)), byte_y,
1739			       GET_MODE (y), &info);
1740	      if (!info.representable_p)
1741		return 0;
1742	      reg_y = info.offset;
1743	      byte_y = 0;
1744	    }
1745	}
1746      else
1747	{
1748	  reg_y = REGNO (y);
1749	  if (reg_renumber[reg_y] >= 0)
1750	    reg_y = reg_renumber[reg_y];
1751	}
1752
1753      return reg_x >= 0 && reg_x == reg_y && known_eq (byte_x, byte_y);
1754    }
1755
1756  /* Now we have disposed of all the cases
1757     in which different rtx codes can match.  */
1758  if (code != GET_CODE (y))
1759    return 0;
1760
1761  switch (code)
1762    {
1763    case PC:
1764    case CC0:
1765    case ADDR_VEC:
1766    case ADDR_DIFF_VEC:
1767    CASE_CONST_UNIQUE:
1768      return 0;
1769
1770    case CONST_VECTOR:
1771      if (!same_vector_encodings_p (x, y))
1772	return false;
1773      break;
1774
1775    case LABEL_REF:
1776      /* We can't assume nonlocal labels have their following insns yet.  */
1777      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1778	return label_ref_label (x) == label_ref_label (y);
1779
1780      /* Two label-refs are equivalent if they point at labels
1781	 in the same position in the instruction stream.  */
1782      else
1783	{
1784	  rtx_insn *xi = next_nonnote_nondebug_insn (label_ref_label (x));
1785	  rtx_insn *yi = next_nonnote_nondebug_insn (label_ref_label (y));
1786	  while (xi && LABEL_P (xi))
1787	    xi = next_nonnote_nondebug_insn (xi);
1788	  while (yi && LABEL_P (yi))
1789	    yi = next_nonnote_nondebug_insn (yi);
1790	  return xi == yi;
1791	}
1792
1793    case SYMBOL_REF:
1794      return XSTR (x, 0) == XSTR (y, 0);
1795
1796    case CODE_LABEL:
1797      /* If we didn't match EQ equality above, they aren't the same.  */
1798      return 0;
1799
1800    default:
1801      break;
1802    }
1803
1804  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1805
1806  if (GET_MODE (x) != GET_MODE (y))
1807    return 0;
1808
1809  /* MEMs referring to different address space are not equivalent.  */
1810  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1811    return 0;
1812
1813  /* For commutative operations, the RTX match if the operand match in any
1814     order.  Also handle the simple binary and unary cases without a loop.  */
1815  if (targetm.commutative_p (x, UNKNOWN))
1816    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1817	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1818	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1819		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1820  else if (NON_COMMUTATIVE_P (x))
1821    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1822	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1823  else if (UNARY_P (x))
1824    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1825
1826  /* Compare the elements.  If any pair of corresponding elements
1827     fail to match, return 0 for the whole things.  */
1828
1829  fmt = GET_RTX_FORMAT (code);
1830  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1831    {
1832      int j;
1833      switch (fmt[i])
1834	{
1835	case 'w':
1836	  if (XWINT (x, i) != XWINT (y, i))
1837	    return 0;
1838	  break;
1839
1840	case 'i':
1841	  if (XINT (x, i) != XINT (y, i))
1842	    {
1843	      if (((code == ASM_OPERANDS && i == 6)
1844		   || (code == ASM_INPUT && i == 1)))
1845		break;
1846	      return 0;
1847	    }
1848	  break;
1849
1850	case 'p':
1851	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1852	    return 0;
1853	  break;
1854
1855	case 't':
1856	  if (XTREE (x, i) != XTREE (y, i))
1857	    return 0;
1858	  break;
1859
1860	case 's':
1861	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1862	    return 0;
1863	  break;
1864
1865	case 'e':
1866	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1867	    return 0;
1868	  break;
1869
1870	case 'u':
1871	  if (XEXP (x, i) != XEXP (y, i))
1872	    return 0;
1873	  /* Fall through.  */
1874	case '0':
1875	  break;
1876
1877	case 'E':
1878	  if (XVECLEN (x, i) != XVECLEN (y, i))
1879	    return 0;
1880	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1881	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1882	      return 0;
1883	  break;
1884
1885	default:
1886	  gcc_unreachable ();
1887	}
1888    }
1889  return 1;
1890}
1891
1892/* If X is a hard register or equivalent to one or a subregister of one,
1893   return the hard register number.  If X is a pseudo register that was not
1894   assigned a hard register, return the pseudo register number.  Otherwise,
1895   return -1.  Any rtx is valid for X.  */
1896
1897int
1898true_regnum (const_rtx x)
1899{
1900  if (REG_P (x))
1901    {
1902      if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1903	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1904	return reg_renumber[REGNO (x)];
1905      return REGNO (x);
1906    }
1907  if (GET_CODE (x) == SUBREG)
1908    {
1909      int base = true_regnum (SUBREG_REG (x));
1910      if (base >= 0
1911	  && base < FIRST_PSEUDO_REGISTER)
1912	{
1913	  struct subreg_info info;
1914
1915	  subreg_get_info (lra_in_progress
1916			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
1917			   GET_MODE (SUBREG_REG (x)),
1918			   SUBREG_BYTE (x), GET_MODE (x), &info);
1919
1920	  if (info.representable_p)
1921	    return base + info.offset;
1922	}
1923    }
1924  return -1;
1925}
1926
1927/* Return regno of the register REG and handle subregs too.  */
1928unsigned int
1929reg_or_subregno (const_rtx reg)
1930{
1931  if (GET_CODE (reg) == SUBREG)
1932    reg = SUBREG_REG (reg);
1933  gcc_assert (REG_P (reg));
1934  return REGNO (reg);
1935}
1936