Deleted Added
full compact
1/* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22/* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
25
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
33
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
36
37#include "config.h"
38#include "system.h"
39#include "rtl.h"
40#include "tm_p.h"
41#include "flags.h"
42#include "hard-reg-set.h"
43#include "regs.h"
44#include "insn-config.h"
45#include "insn-attr.h"
46#include "recog.h"
47#include "function.h"
48#include "expr.h"
49#include "real.h"
50#include "except.h"
51#include "toplev.h"
52#include "reload.h"
53#include "predict.h"
54
55/* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57/* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
61
62static int init_label_info PARAMS ((rtx));
63static void mark_all_labels PARAMS ((rtx));
64static int duplicate_loop_exit_test PARAMS ((rtx));
65static void delete_computation PARAMS ((rtx));
66static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67static int redirect_exp PARAMS ((rtx, rtx, rtx));
68static void invert_exp_1 PARAMS ((rtx));
69static int invert_exp PARAMS ((rtx));
70static int returnjump_p_1 PARAMS ((rtx *, void *));
71static void delete_prior_computation PARAMS ((rtx, rtx));
72
73/* Alternate entry into the jump optimizer. This entry point only rebuilds
74 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
75 instructions. */
76void
77rebuild_jump_labels (f)
78 rtx f;
79{
80 rtx insn;
81 int max_uid = 0;
82
83 max_uid = init_label_info (f) + 1;
84
85 mark_all_labels (f);
86
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
90
91 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
92 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
93 LABEL_NUSES (XEXP (insn, 0))++;
94}
95
96/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
100
101 This simple pass moves barriers and removes duplicates so that the
102 old code is happy.
103 */
104void
105cleanup_barriers ()
106{
107 rtx insn, next, prev;
108 for (insn = get_insns (); insn; insn = next)
109 {
110 next = NEXT_INSN (insn);
111 if (GET_CODE (insn) == BARRIER)
112 {
113 prev = prev_nonnote_insn (insn);
114 if (GET_CODE (prev) == BARRIER)
115 delete_barrier (insn);
116 else if (prev != PREV_INSN (insn))
117 reorder_insns (insn, insn, prev);
118 }
119 }
120}
121
122void
123copy_loop_headers (f)
124 rtx f;
125{
126 rtx insn, next;
127 /* Now iterate optimizing jumps until nothing changes over one pass. */
128 for (insn = f; insn; insn = next)
129 {
130 rtx temp, temp1;
131
132 next = NEXT_INSN (insn);
133
134 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
135 jump. Try to optimize by duplicating the loop exit test if so.
136 This is only safe immediately after regscan, because it uses
137 the values of regno_first_uid and regno_last_uid. */
138 if (GET_CODE (insn) == NOTE
139 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
140 && (temp1 = next_nonnote_insn (insn)) != 0
141 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
142 {
143 temp = PREV_INSN (insn);
144 if (duplicate_loop_exit_test (insn))
145 {
146 next = NEXT_INSN (temp);
147 }
148 }
149 }
150}
151
152void
153purge_line_number_notes (f)
154 rtx f;
155{
156 rtx last_note = 0;
157 rtx insn;
158 /* Delete extraneous line number notes.
159 Note that two consecutive notes for different lines are not really
160 extraneous. There should be some indication where that line belonged,
161 even if it became empty. */
162
163 for (insn = f; insn; insn = NEXT_INSN (insn))
164 if (GET_CODE (insn) == NOTE)
165 {
166 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
167 /* Any previous line note was for the prologue; gdb wants a new
168 note after the prologue even if it is for the same line. */
169 last_note = NULL_RTX;
170 else if (NOTE_LINE_NUMBER (insn) >= 0)
171 {
172 /* Delete this note if it is identical to previous note. */
173 if (last_note
174 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
175 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
176 {
177 delete_related_insns (insn);
178 continue;
179 }
180
181 last_note = insn;
182 }
183 }
184}
185
186/* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
187 notes whose labels don't occur in the insn any more. Returns the
188 largest INSN_UID found. */
189static int
190init_label_info (f)
191 rtx f;
192{
193 int largest_uid = 0;
194 rtx insn;
195
196 for (insn = f; insn; insn = NEXT_INSN (insn))
197 {
198 if (GET_CODE (insn) == CODE_LABEL)
199 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
200 else if (GET_CODE (insn) == JUMP_INSN)
201 JUMP_LABEL (insn) = 0;
202 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
203 {
204 rtx note, next;
205
206 for (note = REG_NOTES (insn); note; note = next)
207 {
208 next = XEXP (note, 1);
209 if (REG_NOTE_KIND (note) == REG_LABEL
210 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
211 remove_note (insn, note);
212 }
213 }
214 if (INSN_UID (insn) > largest_uid)
215 largest_uid = INSN_UID (insn);
216 }
217
218 return largest_uid;
219}
220
221/* Mark the label each jump jumps to.
222 Combine consecutive labels, and count uses of labels. */
223
224static void
225mark_all_labels (f)
226 rtx f;
227{
228 rtx insn;
229
230 for (insn = f; insn; insn = NEXT_INSN (insn))
231 if (INSN_P (insn))
232 {
233 if (GET_CODE (insn) == CALL_INSN
234 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
235 {
236 mark_all_labels (XEXP (PATTERN (insn), 0));
237 mark_all_labels (XEXP (PATTERN (insn), 1));
238 mark_all_labels (XEXP (PATTERN (insn), 2));
239
240 /* Canonicalize the tail recursion label attached to the
241 CALL_PLACEHOLDER insn. */
242 if (XEXP (PATTERN (insn), 3))
243 {
244 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
245 XEXP (PATTERN (insn), 3));
246 mark_jump_label (label_ref, insn, 0);
247 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
248 }
249
250 continue;
251 }
252
253 mark_jump_label (PATTERN (insn), insn, 0);
254 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
255 {
256 /* When we know the LABEL_REF contained in a REG used in
257 an indirect jump, we'll have a REG_LABEL note so that
258 flow can tell where it's going. */
259 if (JUMP_LABEL (insn) == 0)
260 {
261 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
262 if (label_note)
263 {
264 /* But a LABEL_REF around the REG_LABEL note, so
265 that we can canonicalize it. */
266 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
267 XEXP (label_note, 0));
268
269 mark_jump_label (label_ref, insn, 0);
270 XEXP (label_note, 0) = XEXP (label_ref, 0);
271 JUMP_LABEL (insn) = XEXP (label_note, 0);
272 }
273 }
274 }
275 }
276}
277
278/* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
279 jump. Assume that this unconditional jump is to the exit test code. If
280 the code is sufficiently simple, make a copy of it before INSN,
281 followed by a jump to the exit of the loop. Then delete the unconditional
282 jump after INSN.
283
284 Return 1 if we made the change, else 0.
285
286 This is only safe immediately after a regscan pass because it uses the
287 values of regno_first_uid and regno_last_uid. */
288
289static int
290duplicate_loop_exit_test (loop_start)
291 rtx loop_start;
292{
293 rtx insn, set, reg, p, link;
294 rtx copy = 0, first_copy = 0;
295 int num_insns = 0;
296 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
297 rtx lastexit;
298 int max_reg = max_reg_num ();
299 rtx *reg_map = 0;
300 rtx loop_pre_header_label;
301
302 /* Scan the exit code. We do not perform this optimization if any insn:
303
304 is a CALL_INSN
305 is a CODE_LABEL
306 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
307 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
308 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
309 is not valid.
310
311 We also do not do this if we find an insn with ASM_OPERANDS. While
312 this restriction should not be necessary, copying an insn with
313 ASM_OPERANDS can confuse asm_noperands in some cases.
314
315 Also, don't do this if the exit code is more than 20 insns. */
316
317 for (insn = exitcode;
318 insn
319 && ! (GET_CODE (insn) == NOTE
320 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
321 insn = NEXT_INSN (insn))
322 {
323 switch (GET_CODE (insn))
324 {
325 case CODE_LABEL:
326 case CALL_INSN:
327 return 0;
328 case NOTE:
329 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
330 a jump immediately after the loop start that branches outside
331 the loop but within an outer loop, near the exit test.
332 If we copied this exit test and created a phony
333 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
334 before the exit test look like these could be safely moved
335 out of the loop even if they actually may be never executed.
336 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
337
338 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
339 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
340 return 0;
341
342 if (optimize < 2
343 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
344 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
345 /* If we were to duplicate this code, we would not move
346 the BLOCK notes, and so debugging the moved code would
347 be difficult. Thus, we only move the code with -O2 or
348 higher. */
349 return 0;
350
351 break;
352 case JUMP_INSN:
353 case INSN:
354 /* The code below would grossly mishandle REG_WAS_0 notes,
355 so get rid of them here. */
356 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
357 remove_note (insn, p);
358 if (++num_insns > 20
359 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
360 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
361 return 0;
362 break;
363 default:
364 break;
365 }
366 }
367
368 /* Unless INSN is zero, we can do the optimization. */
369 if (insn == 0)
370 return 0;
371
372 lastexit = insn;
373
374 /* See if any insn sets a register only used in the loop exit code and
375 not a user variable. If so, replace it with a new register. */
376 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
377 if (GET_CODE (insn) == INSN
378 && (set = single_set (insn)) != 0
379 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
380 || (GET_CODE (reg) == SUBREG
381 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
382 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
383 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
384 {
385 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
386 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
387 break;
388
389 if (p != lastexit)
390 {
391 /* We can do the replacement. Allocate reg_map if this is the
392 first replacement we found. */
393 if (reg_map == 0)
394 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
395
396 REG_LOOP_TEST_P (reg) = 1;
397
398 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
399 }
400 }
401 loop_pre_header_label = gen_label_rtx ();
402
403 /* Now copy each insn. */
404 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
405 {
406 switch (GET_CODE (insn))
407 {
408 case BARRIER:
409 copy = emit_barrier_before (loop_start);
410 break;
411 case NOTE:
412 /* Only copy line-number notes. */
413 if (NOTE_LINE_NUMBER (insn) >= 0)
414 {
415 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
416 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
417 }
418 break;
419
420 case INSN:
421 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
422 if (reg_map)
423 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
424
425 mark_jump_label (PATTERN (copy), copy, 0);
426
427 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
428 make them. */
429 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
430 if (REG_NOTE_KIND (link) != REG_LABEL)
431 {
432 if (GET_CODE (link) == EXPR_LIST)
433 REG_NOTES (copy)
434 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
435 XEXP (link, 0),
436 REG_NOTES (copy)));
437 else
438 REG_NOTES (copy)
439 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
440 XEXP (link, 0),
441 REG_NOTES (copy)));
442 }
443
444 if (reg_map && REG_NOTES (copy))
445 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
446 break;
447
448 case JUMP_INSN:
449 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
450 loop_start);
451 if (reg_map)
452 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
453 mark_jump_label (PATTERN (copy), copy, 0);
454 if (REG_NOTES (insn))
455 {
456 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
457 if (reg_map)
458 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
459 }
460
461 /* Predict conditional jump that do make loop looping as taken.
462 Other jumps are probably exit conditions, so predict
463 them as untaken. */
464 if (any_condjump_p (copy))
465 {
466 rtx label = JUMP_LABEL (copy);
467 if (label)
468 {
469 /* The jump_insn after loop_start should be followed
470 by barrier and loopback label. */
471 if (prev_nonnote_insn (label)
472 && (prev_nonnote_insn (prev_nonnote_insn (label))
473 == next_nonnote_insn (loop_start)))
474 {
475 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
476 /* To keep pre-header, we need to redirect all loop
477 entrances before the LOOP_BEG note. */
478 redirect_jump (copy, loop_pre_header_label, 0);
479 }
480 else
481 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
482 }
483 }
484 break;
485
486 default:
487 abort ();
488 }
489
490 /* Record the first insn we copied. We need it so that we can
491 scan the copied insns for new pseudo registers. */
492 if (! first_copy)
493 first_copy = copy;
494 }
495
496 /* Now clean up by emitting a jump to the end label and deleting the jump
497 at the start of the loop. */
498 if (! copy || GET_CODE (copy) != BARRIER)
499 {
500 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
501 loop_start);
502
503 /* Record the first insn we copied. We need it so that we can
504 scan the copied insns for new pseudo registers. This may not
505 be strictly necessary since we should have copied at least one
506 insn above. But I am going to be safe. */
507 if (! first_copy)
508 first_copy = copy;
509
510 mark_jump_label (PATTERN (copy), copy, 0);
511 emit_barrier_before (loop_start);
512 }
513
514 emit_label_before (loop_pre_header_label, loop_start);
515
516 /* Now scan from the first insn we copied to the last insn we copied
517 (copy) for new pseudo registers. Do this after the code to jump to
518 the end label since that might create a new pseudo too. */
519 reg_scan_update (first_copy, copy, max_reg);
520
521 /* Mark the exit code as the virtual top of the converted loop. */
522 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
523
524 delete_related_insns (next_nonnote_insn (loop_start));
525
526 /* Clean up. */
527 if (reg_map)
528 free (reg_map);
529
530 return 1;
531}
532
533/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
534 notes between START and END out before START. START and END may be such
535 notes. Returns the values of the new starting and ending insns, which
536 may be different if the original ones were such notes.
537 Return true if there were only such notes and no real instructions. */
538
539bool
540squeeze_notes (startp, endp)
541 rtx* startp;
542 rtx* endp;
543{
544 rtx start = *startp;
545 rtx end = *endp;
546
547 rtx insn;
548 rtx next;
549 rtx last = NULL;
550 rtx past_end = NEXT_INSN (end);
551
552 for (insn = start; insn != past_end; insn = next)
553 {
554 next = NEXT_INSN (insn);
555 if (GET_CODE (insn) == NOTE
556 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
557 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
558 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
559 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
560 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
561 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
562 {
563 if (insn == start)
564 start = next;
565 else
566 {
567 rtx prev = PREV_INSN (insn);
568 PREV_INSN (insn) = PREV_INSN (start);
569 NEXT_INSN (insn) = start;
570 NEXT_INSN (PREV_INSN (insn)) = insn;
571 PREV_INSN (NEXT_INSN (insn)) = insn;
572 NEXT_INSN (prev) = next;
573 PREV_INSN (next) = prev;
574 }
575 }
576 else
577 last = insn;
578 }
579
580 /* There were no real instructions. */
581 if (start == past_end)
582 return true;
583
584 end = last;
585
586 *startp = start;
587 *endp = end;
588 return false;
589}
590
591/* Return the label before INSN, or put a new label there. */
592
593rtx
594get_label_before (insn)
595 rtx insn;
596{
597 rtx label;
598
599 /* Find an existing label at this point
600 or make a new one if there is none. */
601 label = prev_nonnote_insn (insn);
602
603 if (label == 0 || GET_CODE (label) != CODE_LABEL)
604 {
605 rtx prev = PREV_INSN (insn);
606
607 label = gen_label_rtx ();
608 emit_label_after (label, prev);
609 LABEL_NUSES (label) = 0;
610 }
611 return label;
612}
613
614/* Return the label after INSN, or put a new label there. */
615
616rtx
617get_label_after (insn)
618 rtx insn;
619{
620 rtx label;
621
622 /* Find an existing label at this point
623 or make a new one if there is none. */
624 label = next_nonnote_insn (insn);
625
626 if (label == 0 || GET_CODE (label) != CODE_LABEL)
627 {
628 label = gen_label_rtx ();
629 emit_label_after (label, insn);
630 LABEL_NUSES (label) = 0;
631 }
632 return label;
633}
634
635/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
636 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
637 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
638 know whether it's source is floating point or integer comparison. Machine
639 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
640 to help this function avoid overhead in these cases. */
641enum rtx_code
642reversed_comparison_code_parts (code, arg0, arg1, insn)
643 rtx insn, arg0, arg1;
644 enum rtx_code code;
645{
646 enum machine_mode mode;
647
648 /* If this is not actually a comparison, we can't reverse it. */
649 if (GET_RTX_CLASS (code) != '<')
650 return UNKNOWN;
651
652 mode = GET_MODE (arg0);
653 if (mode == VOIDmode)
654 mode = GET_MODE (arg1);
655
656 /* First see if machine description supply us way to reverse the comparison.
657 Give it priority over everything else to allow machine description to do
658 tricks. */
659#ifdef REVERSIBLE_CC_MODE
660 if (GET_MODE_CLASS (mode) == MODE_CC
661 && REVERSIBLE_CC_MODE (mode))
662 {
663#ifdef REVERSE_CONDITION
664 return REVERSE_CONDITION (code, mode);
665#endif
666 return reverse_condition (code);
667 }
668#endif
669
670 /* Try a few special cases based on the comparison code. */
671 switch (code)
672 {
673 case GEU:
674 case GTU:
675 case LEU:
676 case LTU:
677 case NE:
678 case EQ:
679 /* It is always safe to reverse EQ and NE, even for the floating
680 point. Similary the unsigned comparisons are never used for
681 floating point so we can reverse them in the default way. */
682 return reverse_condition (code);
683 case ORDERED:
684 case UNORDERED:
685 case LTGT:
686 case UNEQ:
687 /* In case we already see unordered comparison, we can be sure to
688 be dealing with floating point so we don't need any more tests. */
689 return reverse_condition_maybe_unordered (code);
690 case UNLT:
691 case UNLE:
692 case UNGT:
693 case UNGE:
694 /* We don't have safe way to reverse these yet. */
695 return UNKNOWN;
696 default:
697 break;
698 }
699
700 /* In case we give up IEEE compatibility, all comparisons are reversible. */
701 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
702 || flag_unsafe_math_optimizations)
703 return reverse_condition (code);
704
705 if (GET_MODE_CLASS (mode) == MODE_CC
706#ifdef HAVE_cc0
707 || arg0 == cc0_rtx
708#endif
709 )
710 {
711 rtx prev;
712 /* Try to search for the comparison to determine the real mode.
713 This code is expensive, but with sane machine description it
714 will be never used, since REVERSIBLE_CC_MODE will return true
715 in all cases. */
716 if (! insn)
717 return UNKNOWN;
718
719 for (prev = prev_nonnote_insn (insn);
720 prev != 0 && GET_CODE (prev) != CODE_LABEL;
721 prev = prev_nonnote_insn (prev))
722 {
723 rtx set = set_of (arg0, prev);
724 if (set && GET_CODE (set) == SET
725 && rtx_equal_p (SET_DEST (set), arg0))
726 {
727 rtx src = SET_SRC (set);
728
729 if (GET_CODE (src) == COMPARE)
730 {
731 rtx comparison = src;
732 arg0 = XEXP (src, 0);
733 mode = GET_MODE (arg0);
734 if (mode == VOIDmode)
735 mode = GET_MODE (XEXP (comparison, 1));
736 break;
737 }
738 /* We can get past reg-reg moves. This may be useful for model
739 of i387 comparisons that first move flag registers around. */
740 if (REG_P (src))
741 {
742 arg0 = src;
743 continue;
744 }
745 }
746 /* If register is clobbered in some ununderstandable way,
747 give up. */
748 if (set)
749 return UNKNOWN;
750 }
751 }
752
753 /* An integer condition. */
754 if (GET_CODE (arg0) == CONST_INT
755 || (GET_MODE (arg0) != VOIDmode
756 && GET_MODE_CLASS (mode) != MODE_CC
757 && ! FLOAT_MODE_P (mode)))
758 return reverse_condition (code);
759
760 return UNKNOWN;
761}
762
763/* An wrapper around the previous function to take COMPARISON as rtx
764 expression. This simplifies many callers. */
765enum rtx_code
766reversed_comparison_code (comparison, insn)
767 rtx comparison, insn;
768{
769 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
770 return UNKNOWN;
771 return reversed_comparison_code_parts (GET_CODE (comparison),
772 XEXP (comparison, 0),
773 XEXP (comparison, 1), insn);
774}
775
776/* Given an rtx-code for a comparison, return the code for the negated
777 comparison. If no such code exists, return UNKNOWN.
778
779 WATCH OUT! reverse_condition is not safe to use on a jump that might
780 be acting on the results of an IEEE floating point comparison, because
781 of the special treatment of non-signaling nans in comparisons.
782 Use reversed_comparison_code instead. */
783
784enum rtx_code
785reverse_condition (code)
786 enum rtx_code code;
787{
788 switch (code)
789 {
790 case EQ:
791 return NE;
792 case NE:
793 return EQ;
794 case GT:
795 return LE;
796 case GE:
797 return LT;
798 case LT:
799 return GE;
800 case LE:
801 return GT;
802 case GTU:
803 return LEU;
804 case GEU:
805 return LTU;
806 case LTU:
807 return GEU;
808 case LEU:
809 return GTU;
810 case UNORDERED:
811 return ORDERED;
812 case ORDERED:
813 return UNORDERED;
814
815 case UNLT:
816 case UNLE:
817 case UNGT:
818 case UNGE:
819 case UNEQ:
820 case LTGT:
821 return UNKNOWN;
822
823 default:
824 abort ();
825 }
826}
827
828/* Similar, but we're allowed to generate unordered comparisons, which
829 makes it safe for IEEE floating-point. Of course, we have to recognize
830 that the target will support them too... */
831
832enum rtx_code
833reverse_condition_maybe_unordered (code)
834 enum rtx_code code;
835{
836 /* Non-IEEE formats don't have unordered conditions. */
837 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
838 return reverse_condition (code);
839
840 switch (code)
841 {
842 case EQ:
843 return NE;
844 case NE:
845 return EQ;
846 case GT:
847 return UNLE;
848 case GE:
849 return UNLT;
850 case LT:
851 return UNGE;
852 case LE:
853 return UNGT;
854 case LTGT:
855 return UNEQ;
856 case UNORDERED:
857 return ORDERED;
858 case ORDERED:
859 return UNORDERED;
860 case UNLT:
861 return GE;
862 case UNLE:
863 return GT;
864 case UNGT:
865 return LE;
866 case UNGE:
867 return LT;
868 case UNEQ:
869 return LTGT;
870
871 default:
872 abort ();
873 }
874}
875
876/* Similar, but return the code when two operands of a comparison are swapped.
877 This IS safe for IEEE floating-point. */
878
879enum rtx_code
880swap_condition (code)
881 enum rtx_code code;
882{
883 switch (code)
884 {
885 case EQ:
886 case NE:
887 case UNORDERED:
888 case ORDERED:
889 case UNEQ:
890 case LTGT:
891 return code;
892
893 case GT:
894 return LT;
895 case GE:
896 return LE;
897 case LT:
898 return GT;
899 case LE:
900 return GE;
901 case GTU:
902 return LTU;
903 case GEU:
904 return LEU;
905 case LTU:
906 return GTU;
907 case LEU:
908 return GEU;
909 case UNLT:
910 return UNGT;
911 case UNLE:
912 return UNGE;
913 case UNGT:
914 return UNLT;
915 case UNGE:
916 return UNLE;
917
918 default:
919 abort ();
920 }
921}
922
923/* Given a comparison CODE, return the corresponding unsigned comparison.
924 If CODE is an equality comparison or already an unsigned comparison,
925 CODE is returned. */
926
927enum rtx_code
928unsigned_condition (code)
929 enum rtx_code code;
930{
931 switch (code)
932 {
933 case EQ:
934 case NE:
935 case GTU:
936 case GEU:
937 case LTU:
938 case LEU:
939 return code;
940
941 case GT:
942 return GTU;
943 case GE:
944 return GEU;
945 case LT:
946 return LTU;
947 case LE:
948 return LEU;
949
950 default:
951 abort ();
952 }
953}
954
955/* Similarly, return the signed version of a comparison. */
956
957enum rtx_code
958signed_condition (code)
959 enum rtx_code code;
960{
961 switch (code)
962 {
963 case EQ:
964 case NE:
965 case GT:
966 case GE:
967 case LT:
968 case LE:
969 return code;
970
971 case GTU:
972 return GT;
973 case GEU:
974 return GE;
975 case LTU:
976 return LT;
977 case LEU:
978 return LE;
979
980 default:
981 abort ();
982 }
983}
984
985/* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
986 truth of CODE1 implies the truth of CODE2. */
987
988int
989comparison_dominates_p (code1, code2)
990 enum rtx_code code1, code2;
991{
992 /* UNKNOWN comparison codes can happen as a result of trying to revert
993 comparison codes.
994 They can't match anything, so we have to reject them here. */
995 if (code1 == UNKNOWN || code2 == UNKNOWN)
996 return 0;
997
998 if (code1 == code2)
999 return 1;
1000
1001 switch (code1)
1002 {
1003 case UNEQ:
1004 if (code2 == UNLE || code2 == UNGE)
1005 return 1;
1006 break;
1007
1008 case EQ:
1009 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1010 || code2 == ORDERED)
1011 return 1;
1012 break;
1013
1014 case UNLT:
1015 if (code2 == UNLE || code2 == NE)
1016 return 1;
1017 break;
1018
1019 case LT:
1020 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1021 return 1;
1022 break;
1023
1024 case UNGT:
1025 if (code2 == UNGE || code2 == NE)
1026 return 1;
1027 break;
1028
1029 case GT:
1030 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1031 return 1;
1032 break;
1033
1034 case GE:
1035 case LE:
1036 if (code2 == ORDERED)
1037 return 1;
1038 break;
1039
1040 case LTGT:
1041 if (code2 == NE || code2 == ORDERED)
1042 return 1;
1043 break;
1044
1045 case LTU:
1046 if (code2 == LEU || code2 == NE)
1047 return 1;
1048 break;
1049
1050 case GTU:
1051 if (code2 == GEU || code2 == NE)
1052 return 1;
1053 break;
1054
1055 case UNORDERED:
1056 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1057 || code2 == UNGE || code2 == UNGT)
1058 return 1;
1059 break;
1060
1061 default:
1062 break;
1063 }
1064
1065 return 0;
1066}
1067
1068/* Return 1 if INSN is an unconditional jump and nothing else. */
1069
1070int
1071simplejump_p (insn)
1072 rtx insn;
1073{
1074 return (GET_CODE (insn) == JUMP_INSN
1075 && GET_CODE (PATTERN (insn)) == SET
1076 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1077 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1078}
1079
1080/* Return nonzero if INSN is a (possibly) conditional jump
1081 and nothing more.
1082
1083 Use this function is deprecated, since we need to support combined
1084 branch and compare insns. Use any_condjump_p instead whenever possible. */
1085
1086int
1087condjump_p (insn)
1088 rtx insn;
1089{
1090 rtx x = PATTERN (insn);
1091
1092 if (GET_CODE (x) != SET
1093 || GET_CODE (SET_DEST (x)) != PC)
1094 return 0;
1095
1096 x = SET_SRC (x);
1097 if (GET_CODE (x) == LABEL_REF)
1098 return 1;
1099 else
1100 return (GET_CODE (x) == IF_THEN_ELSE
1101 && ((GET_CODE (XEXP (x, 2)) == PC
1102 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1103 || GET_CODE (XEXP (x, 1)) == RETURN))
1104 || (GET_CODE (XEXP (x, 1)) == PC
1105 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1106 || GET_CODE (XEXP (x, 2)) == RETURN))));
1107
1108 return 0;
1109}
1110
1111/* Return nonzero if INSN is a (possibly) conditional jump inside a
1112 PARALLEL.
1113
1114 Use this function is deprecated, since we need to support combined
1115 branch and compare insns. Use any_condjump_p instead whenever possible. */
1116
1117int
1118condjump_in_parallel_p (insn)
1119 rtx insn;
1120{
1121 rtx x = PATTERN (insn);
1122
1123 if (GET_CODE (x) != PARALLEL)
1124 return 0;
1125 else
1126 x = XVECEXP (x, 0, 0);
1127
1128 if (GET_CODE (x) != SET)
1129 return 0;
1130 if (GET_CODE (SET_DEST (x)) != PC)
1131 return 0;
1132 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1133 return 1;
1134 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1135 return 0;
1136 if (XEXP (SET_SRC (x), 2) == pc_rtx
1137 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1138 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1139 return 1;
1140 if (XEXP (SET_SRC (x), 1) == pc_rtx
1141 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1142 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1143 return 1;
1144 return 0;
1145}
1146
1147/* Return set of PC, otherwise NULL. */
1148
1149rtx
1150pc_set (insn)
1151 rtx insn;
1152{
1153 rtx pat;
1154 if (GET_CODE (insn) != JUMP_INSN)
1155 return NULL_RTX;
1156 pat = PATTERN (insn);
1157
1158 /* The set is allowed to appear either as the insn pattern or
1159 the first set in a PARALLEL. */
1160 if (GET_CODE (pat) == PARALLEL)
1161 pat = XVECEXP (pat, 0, 0);
1162 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1163 return pat;
1164
1165 return NULL_RTX;
1166}
1167
1168/* Return true when insn is an unconditional direct jump,
1169 possibly bundled inside a PARALLEL. */
1170
1171int
1172any_uncondjump_p (insn)
1173 rtx insn;
1174{
1175 rtx x = pc_set (insn);
1176 if (!x)
1177 return 0;
1178 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1179 return 0;
1180 return 1;
1181}
1182
1183/* Return true when insn is a conditional jump. This function works for
1184 instructions containing PC sets in PARALLELs. The instruction may have
1185 various other effects so before removing the jump you must verify
1186 onlyjump_p.
1187
1188 Note that unlike condjump_p it returns false for unconditional jumps. */
1189
1190int
1191any_condjump_p (insn)
1192 rtx insn;
1193{
1194 rtx x = pc_set (insn);
1195 enum rtx_code a, b;
1196
1197 if (!x)
1198 return 0;
1199 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1200 return 0;
1201
1202 a = GET_CODE (XEXP (SET_SRC (x), 1));
1203 b = GET_CODE (XEXP (SET_SRC (x), 2));
1204
1205 return ((b == PC && (a == LABEL_REF || a == RETURN))
1206 || (a == PC && (b == LABEL_REF || b == RETURN)));
1207}
1208
1209/* Return the label of a conditional jump. */
1210
1211rtx
1212condjump_label (insn)
1213 rtx insn;
1214{
1215 rtx x = pc_set (insn);
1216
1217 if (!x)
1218 return NULL_RTX;
1219 x = SET_SRC (x);
1220 if (GET_CODE (x) == LABEL_REF)
1221 return x;
1222 if (GET_CODE (x) != IF_THEN_ELSE)
1223 return NULL_RTX;
1224 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1225 return XEXP (x, 1);
1226 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1227 return XEXP (x, 2);
1228 return NULL_RTX;
1229}
1230
1231/* Return true if INSN is a (possibly conditional) return insn. */
1232
1233static int
1234returnjump_p_1 (loc, data)
1235 rtx *loc;
1236 void *data ATTRIBUTE_UNUSED;
1237{
1238 rtx x = *loc;
1239
1240 return x && (GET_CODE (x) == RETURN
1241 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1242}
1243
1244int
1245returnjump_p (insn)
1246 rtx insn;
1247{
1248 if (GET_CODE (insn) != JUMP_INSN)
1249 return 0;
1250 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1251}
1252
1253/* Return true if INSN is a jump that only transfers control and
1254 nothing more. */
1255
1256int
1257onlyjump_p (insn)
1258 rtx insn;
1259{
1260 rtx set;
1261
1262 if (GET_CODE (insn) != JUMP_INSN)
1263 return 0;
1264
1265 set = single_set (insn);
1266 if (set == NULL)
1267 return 0;
1268 if (GET_CODE (SET_DEST (set)) != PC)
1269 return 0;
1270 if (side_effects_p (SET_SRC (set)))
1271 return 0;
1272
1273 return 1;
1274}
1275
1276#ifdef HAVE_cc0
1277
1278/* Return non-zero if X is an RTX that only sets the condition codes
1279 and has no side effects. */
1280
1281int
1282only_sets_cc0_p (x)
1283 rtx x;
1284{
1285
1286 if (! x)
1287 return 0;
1288
1289 if (INSN_P (x))
1290 x = PATTERN (x);
1291
1292 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1293}
1294
1295/* Return 1 if X is an RTX that does nothing but set the condition codes
1296 and CLOBBER or USE registers.
1297 Return -1 if X does explicitly set the condition codes,
1298 but also does other things. */
1299
1300int
1301sets_cc0_p (x)
1302 rtx x;
1303{
1304
1305 if (! x)
1306 return 0;
1307
1308 if (INSN_P (x))
1309 x = PATTERN (x);
1310
1311 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1312 return 1;
1313 if (GET_CODE (x) == PARALLEL)
1314 {
1315 int i;
1316 int sets_cc0 = 0;
1317 int other_things = 0;
1318 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1319 {
1320 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1321 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1322 sets_cc0 = 1;
1323 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1324 other_things = 1;
1325 }
1326 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1327 }
1328 return 0;
1329}
1330#endif
1331
1332/* Follow any unconditional jump at LABEL;
1333 return the ultimate label reached by any such chain of jumps.
1334 If LABEL is not followed by a jump, return LABEL.
1335 If the chain loops or we can't find end, return LABEL,
1336 since that tells caller to avoid changing the insn.
1337
1338 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1339 a USE or CLOBBER. */
1340
1341rtx
1342follow_jumps (label)
1343 rtx label;
1344{
1345 rtx insn;
1346 rtx next;
1347 rtx value = label;
1348 int depth;
1349
1350 for (depth = 0;
1351 (depth < 10
1352 && (insn = next_active_insn (value)) != 0
1353 && GET_CODE (insn) == JUMP_INSN
1354 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1355 && onlyjump_p (insn))
1356 || GET_CODE (PATTERN (insn)) == RETURN)
1357 && (next = NEXT_INSN (insn))
1358 && GET_CODE (next) == BARRIER);
1359 depth++)
1360 {
1361 /* Don't chain through the insn that jumps into a loop
1362 from outside the loop,
1363 since that would create multiple loop entry jumps
1364 and prevent loop optimization. */
1365 rtx tem;
1366 if (!reload_completed)
1367 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1368 if (GET_CODE (tem) == NOTE
1369 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1370 /* ??? Optional. Disables some optimizations, but makes
1371 gcov output more accurate with -O. */
1372 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1373 return value;
1374
1375 /* If we have found a cycle, make the insn jump to itself. */
1376 if (JUMP_LABEL (insn) == label)
1377 return label;
1378
1379 tem = next_active_insn (JUMP_LABEL (insn));
1380 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1381 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1382 break;
1383
1384 value = JUMP_LABEL (insn);
1385 }
1386 if (depth == 10)
1387 return label;
1388 return value;
1389}
1390
1391
1392/* Find all CODE_LABELs referred to in X, and increment their use counts.
1393 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1394 in INSN, then store one of them in JUMP_LABEL (INSN).
1395 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1396 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1397 Also, when there are consecutive labels, canonicalize on the last of them.
1398
1399 Note that two labels separated by a loop-beginning note
1400 must be kept distinct if we have not yet done loop-optimization,
1401 because the gap between them is where loop-optimize
1402 will want to move invariant code to. CROSS_JUMP tells us
1403 that loop-optimization is done with. */
1404
1405void
1406mark_jump_label (x, insn, in_mem)
1407 rtx x;
1408 rtx insn;
1409 int in_mem;
1410{
1411 RTX_CODE code = GET_CODE (x);
1412 int i;
1413 const char *fmt;
1414
1415 switch (code)
1416 {
1417 case PC:
1418 case CC0:
1419 case REG:
1420 case SUBREG:
1421 case CONST_INT:
1422 case CONST_DOUBLE:
1423 case CLOBBER:
1424 case CALL:
1425 return;
1426
1427 case MEM:
1428 in_mem = 1;
1429 break;
1430
1431 case SYMBOL_REF:
1432 if (!in_mem)
1433 return;
1434
1435 /* If this is a constant-pool reference, see if it is a label. */
1436 if (CONSTANT_POOL_ADDRESS_P (x))
1437 mark_jump_label (get_pool_constant (x), insn, in_mem);
1438 break;
1439
1440 case LABEL_REF:
1441 {
1442 rtx label = XEXP (x, 0);
1443
1444 /* Ignore remaining references to unreachable labels that
1445 have been deleted. */
1446 if (GET_CODE (label) == NOTE
1447 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1448 break;
1449
1450 if (GET_CODE (label) != CODE_LABEL)
1451 abort ();
1452
1453 /* Ignore references to labels of containing functions. */
1454 if (LABEL_REF_NONLOCAL_P (x))
1455 break;
1456
1457 XEXP (x, 0) = label;
1458 if (! insn || ! INSN_DELETED_P (insn))
1459 ++LABEL_NUSES (label);
1460
1461 if (insn)
1462 {
1463 if (GET_CODE (insn) == JUMP_INSN)
1464 JUMP_LABEL (insn) = label;
1465 else
1466 {
1467 /* Add a REG_LABEL note for LABEL unless there already
1468 is one. All uses of a label, except for labels
1469 that are the targets of jumps, must have a
1470 REG_LABEL note. */
1471 if (! find_reg_note (insn, REG_LABEL, label))
1472 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1473 REG_NOTES (insn));
1474 }
1475 }
1476 return;
1477 }
1478
1479 /* Do walk the labels in a vector, but not the first operand of an
1480 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1481 case ADDR_VEC:
1482 case ADDR_DIFF_VEC:
1483 if (! INSN_DELETED_P (insn))
1484 {
1485 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1486
1487 for (i = 0; i < XVECLEN (x, eltnum); i++)
1488 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1489 }
1490 return;
1491
1492 default:
1493 break;
1494 }
1495
1496 fmt = GET_RTX_FORMAT (code);
1497 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1498 {
1499 if (fmt[i] == 'e')
1500 mark_jump_label (XEXP (x, i), insn, in_mem);
1501 else if (fmt[i] == 'E')
1502 {
1503 int j;
1504 for (j = 0; j < XVECLEN (x, i); j++)
1505 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1506 }
1507 }
1508}
1509
1510/* If all INSN does is set the pc, delete it,
1511 and delete the insn that set the condition codes for it
1512 if that's what the previous thing was. */
1513
1514void
1515delete_jump (insn)
1516 rtx insn;
1517{
1518 rtx set = single_set (insn);
1519
1520 if (set && GET_CODE (SET_DEST (set)) == PC)
1521 delete_computation (insn);
1522}
1523
1524/* Verify INSN is a BARRIER and delete it. */
1525
1526void
1527delete_barrier (insn)
1528 rtx insn;
1529{
1530 if (GET_CODE (insn) != BARRIER)
1531 abort ();
1532
1533 delete_insn (insn);
1534}
1535
1536/* Recursively delete prior insns that compute the value (used only by INSN
1537 which the caller is deleting) stored in the register mentioned by NOTE
1538 which is a REG_DEAD note associated with INSN. */
1539
1540static void
1541delete_prior_computation (note, insn)
1542 rtx note;
1543 rtx insn;
1544{
1545 rtx our_prev;
1546 rtx reg = XEXP (note, 0);
1547
1548 for (our_prev = prev_nonnote_insn (insn);
1549 our_prev && (GET_CODE (our_prev) == INSN
1550 || GET_CODE (our_prev) == CALL_INSN);
1551 our_prev = prev_nonnote_insn (our_prev))
1552 {
1553 rtx pat = PATTERN (our_prev);
1554
1555 /* If we reach a CALL which is not calling a const function
1556 or the callee pops the arguments, then give up. */
1557 if (GET_CODE (our_prev) == CALL_INSN
1558 && (! CONST_OR_PURE_CALL_P (our_prev)
1559 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1560 break;
1561
1562 /* If we reach a SEQUENCE, it is too complex to try to
1563 do anything with it, so give up. */
1564 if (GET_CODE (pat) == SEQUENCE)
1565 break;
1566
1567 if (GET_CODE (pat) == USE
1568 && GET_CODE (XEXP (pat, 0)) == INSN)
1569 /* reorg creates USEs that look like this. We leave them
1570 alone because reorg needs them for its own purposes. */
1571 break;
1572
1573 if (reg_set_p (reg, pat))
1574 {
1575 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1576 break;
1577
1578 if (GET_CODE (pat) == PARALLEL)
1579 {
1580 /* If we find a SET of something else, we can't
1581 delete the insn. */
1582
1583 int i;
1584
1585 for (i = 0; i < XVECLEN (pat, 0); i++)
1586 {
1587 rtx part = XVECEXP (pat, 0, i);
1588
1589 if (GET_CODE (part) == SET
1590 && SET_DEST (part) != reg)
1591 break;
1592 }
1593
1594 if (i == XVECLEN (pat, 0))
1595 delete_computation (our_prev);
1596 }
1597 else if (GET_CODE (pat) == SET
1598 && GET_CODE (SET_DEST (pat)) == REG)
1599 {
1600 int dest_regno = REGNO (SET_DEST (pat));
1601 int dest_endregno
1602 = (dest_regno
1603 + (dest_regno < FIRST_PSEUDO_REGISTER
1604 ? HARD_REGNO_NREGS (dest_regno,
1605 GET_MODE (SET_DEST (pat))) : 1));
1606 int regno = REGNO (reg);
1607 int endregno
1608 = (regno
1609 + (regno < FIRST_PSEUDO_REGISTER
1610 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1611
1612 if (dest_regno >= regno
1613 && dest_endregno <= endregno)
1614 delete_computation (our_prev);
1615
1616 /* We may have a multi-word hard register and some, but not
1617 all, of the words of the register are needed in subsequent
1618 insns. Write REG_UNUSED notes for those parts that were not
1619 needed. */
1620 else if (dest_regno <= regno
1621 && dest_endregno >= endregno)
1622 {
1623 int i;
1624
1625 REG_NOTES (our_prev)
1626 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1627 REG_NOTES (our_prev));
1628
1629 for (i = dest_regno; i < dest_endregno; i++)
1630 if (! find_regno_note (our_prev, REG_UNUSED, i))
1631 break;
1632
1633 if (i == dest_endregno)
1634 delete_computation (our_prev);
1635 }
1636 }
1637
1638 break;
1639 }
1640
1641 /* If PAT references the register that dies here, it is an
1642 additional use. Hence any prior SET isn't dead. However, this
1643 insn becomes the new place for the REG_DEAD note. */
1644 if (reg_overlap_mentioned_p (reg, pat))
1645 {
1646 XEXP (note, 1) = REG_NOTES (our_prev);
1647 REG_NOTES (our_prev) = note;
1648 break;
1649 }
1650 }
1651}
1652
1653/* Delete INSN and recursively delete insns that compute values used only
1654 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1655 If we are running before flow.c, we need do nothing since flow.c will
1656 delete dead code. We also can't know if the registers being used are
1657 dead or not at this point.
1658
1659 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1660 nothing other than set a register that dies in this insn, we can delete
1661 that insn as well.
1662
1663 On machines with CC0, if CC0 is used in this insn, we may be able to
1664 delete the insn that set it. */
1665
1666static void
1667delete_computation (insn)
1668 rtx insn;
1669{
1670 rtx note, next;
1671
1672#ifdef HAVE_cc0
1673 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1674 {
1675 rtx prev = prev_nonnote_insn (insn);
1676 /* We assume that at this stage
1677 CC's are always set explicitly
1678 and always immediately before the jump that
1679 will use them. So if the previous insn
1680 exists to set the CC's, delete it
1681 (unless it performs auto-increments, etc.). */
1682 if (prev && GET_CODE (prev) == INSN
1683 && sets_cc0_p (PATTERN (prev)))
1684 {
1685 if (sets_cc0_p (PATTERN (prev)) > 0
1686 && ! side_effects_p (PATTERN (prev)))
1687 delete_computation (prev);
1688 else
1689 /* Otherwise, show that cc0 won't be used. */
1690 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1691 cc0_rtx, REG_NOTES (prev));
1692 }
1693 }
1694#endif
1695
1696 for (note = REG_NOTES (insn); note; note = next)
1697 {
1698 next = XEXP (note, 1);
1699
1700 if (REG_NOTE_KIND (note) != REG_DEAD
1701 /* Verify that the REG_NOTE is legitimate. */
1702 || GET_CODE (XEXP (note, 0)) != REG)
1703 continue;
1704
1705 delete_prior_computation (note, insn);
1706 }
1707
1708 delete_related_insns (insn);
1709}
1710
1711/* Delete insn INSN from the chain of insns and update label ref counts
1712 and delete insns now unreachable.
1713
1714 Returns the first insn after INSN that was not deleted.
1715
1716 Usage of this instruction is deprecated. Use delete_insn instead and
1717 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1718
1719rtx
1720delete_related_insns (insn)
1721 rtx insn;
1722{
1723 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1724 rtx note;
1725 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1726
1727 while (next && INSN_DELETED_P (next))
1728 next = NEXT_INSN (next);
1729
1730 /* This insn is already deleted => return first following nondeleted. */
1731 if (INSN_DELETED_P (insn))
1732 return next;
1733
1734 delete_insn (insn);
1735
1736 /* If instruction is followed by a barrier,
1737 delete the barrier too. */
1738
1739 if (next != 0 && GET_CODE (next) == BARRIER)
1740 delete_insn (next);
1741
1742 /* If deleting a jump, decrement the count of the label,
1743 and delete the label if it is now unused. */
1744
1745 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1746 {
1747 rtx lab = JUMP_LABEL (insn), lab_next;
1748
1749 if (LABEL_NUSES (lab) == 0)
1750 {
1751 /* This can delete NEXT or PREV,
1752 either directly if NEXT is JUMP_LABEL (INSN),
1753 or indirectly through more levels of jumps. */
1754 delete_related_insns (lab);
1755
1756 /* I feel a little doubtful about this loop,
1757 but I see no clean and sure alternative way
1758 to find the first insn after INSN that is not now deleted.
1759 I hope this works. */
1760 while (next && INSN_DELETED_P (next))
1761 next = NEXT_INSN (next);
1762 return next;
1763 }
1764 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1765 && GET_CODE (lab_next) == JUMP_INSN
1766 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1767 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1768 {
1769 /* If we're deleting the tablejump, delete the dispatch table.
1770 We may not be able to kill the label immediately preceding
1771 just yet, as it might be referenced in code leading up to
1772 the tablejump. */
1773 delete_related_insns (lab_next);
1774 }
1775 }
1776
1777 /* Likewise if we're deleting a dispatch table. */
1778
1779 if (GET_CODE (insn) == JUMP_INSN
1780 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1781 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1782 {
1783 rtx pat = PATTERN (insn);
1784 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1785 int len = XVECLEN (pat, diff_vec_p);
1786
1787 for (i = 0; i < len; i++)
1788 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1789 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1790 while (next && INSN_DELETED_P (next))
1791 next = NEXT_INSN (next);
1792 return next;
1793 }
1794
1795 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1796 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1797 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1798 if (REG_NOTE_KIND (note) == REG_LABEL
1799 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1800 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1801 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1802 delete_related_insns (XEXP (note, 0));
1803
1804 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1805 prev = PREV_INSN (prev);
1806
1807 /* If INSN was a label and a dispatch table follows it,
1808 delete the dispatch table. The tablejump must have gone already.
1809 It isn't useful to fall through into a table. */
1810
1811 if (was_code_label
1812 && NEXT_INSN (insn) != 0
1813 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1814 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1815 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1816 next = delete_related_insns (NEXT_INSN (insn));
1817
1818 /* If INSN was a label, delete insns following it if now unreachable. */
1819
1820 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1821 {
1822 RTX_CODE code;
1823 while (next != 0
1824 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1825 || code == NOTE || code == BARRIER
1826 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1827 {
1828 if (code == NOTE
1829 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1830 next = NEXT_INSN (next);
1831 /* Keep going past other deleted labels to delete what follows. */
1832 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1833 next = NEXT_INSN (next);
1834 else
1835 /* Note: if this deletes a jump, it can cause more
1836 deletion of unreachable code, after a different label.
1837 As long as the value from this recursive call is correct,
1838 this invocation functions correctly. */
1839 next = delete_related_insns (next);
1840 }
1841 }
1842
1843 return next;
1844}
1845
1846/* Advance from INSN till reaching something not deleted
1847 then return that. May return INSN itself. */
1848
1849rtx
1850next_nondeleted_insn (insn)
1851 rtx insn;
1852{
1853 while (INSN_DELETED_P (insn))
1854 insn = NEXT_INSN (insn);
1855 return insn;
1856}
1857
1858/* Delete a range of insns from FROM to TO, inclusive.
1859 This is for the sake of peephole optimization, so assume
1860 that whatever these insns do will still be done by a new
1861 peephole insn that will replace them. */
1862
1863void
1864delete_for_peephole (from, to)
1865 rtx from, to;
1866{
1867 rtx insn = from;
1868
1869 while (1)
1870 {
1871 rtx next = NEXT_INSN (insn);
1872 rtx prev = PREV_INSN (insn);
1873
1874 if (GET_CODE (insn) != NOTE)
1875 {
1876 INSN_DELETED_P (insn) = 1;
1877
1878 /* Patch this insn out of the chain. */
1879 /* We don't do this all at once, because we
1880 must preserve all NOTEs. */
1881 if (prev)
1882 NEXT_INSN (prev) = next;
1883
1884 if (next)
1885 PREV_INSN (next) = prev;
1886 }
1887
1888 if (insn == to)
1889 break;
1890 insn = next;
1891 }
1892
1893 /* Note that if TO is an unconditional jump
1894 we *do not* delete the BARRIER that follows,
1895 since the peephole that replaces this sequence
1896 is also an unconditional jump in that case. */
1897}
1898
1899/* We have determined that INSN is never reached, and are about to
1900 delete it. Print a warning if the user asked for one.
1901
1902 To try to make this warning more useful, this should only be called
1903 once per basic block not reached, and it only warns when the basic
1904 block contains more than one line from the current function, and
1905 contains at least one operation. CSE and inlining can duplicate insns,
1906 so it's possible to get spurious warnings from this. */
1907
1908void
1909never_reached_warning (avoided_insn, finish)
1910 rtx avoided_insn, finish;
1911{
1912 rtx insn;
1913 rtx a_line_note = NULL;
1914 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1915
1916 if (! warn_notreached)
1917 return;
1918
1919 /* Scan forwards, looking at LINE_NUMBER notes, until
1920 we hit a LABEL or we run out of insns. */
1921
1922 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1923 {
1924 if (finish == NULL && GET_CODE (insn) == CODE_LABEL)
1925 break;
1926
1927 if (GET_CODE (insn) == NOTE /* A line number note? */
1928 && NOTE_LINE_NUMBER (insn) >= 0)
1929 {
1930 if (a_line_note == NULL)
1931 a_line_note = insn;
1932 else
1933 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1934 != NOTE_LINE_NUMBER (insn));
1935 }
1936 else if (INSN_P (insn))
1937 {
1938 if (reached_end)
1939 break;
1940 contains_insn = 1;
1941 }
1942
1943 if (insn == finish)
1944 reached_end = 1;
1945 }
1946 if (two_avoided_lines && contains_insn)
1947 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1948 NOTE_LINE_NUMBER (a_line_note),
1949 "will never be executed");
1950}
1951
1952/* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1953 NLABEL as a return. Accrue modifications into the change group. */
1954
1955static void
1956redirect_exp_1 (loc, olabel, nlabel, insn)
1957 rtx *loc;
1958 rtx olabel, nlabel;
1959 rtx insn;
1960{
1961 rtx x = *loc;
1962 RTX_CODE code = GET_CODE (x);
1963 int i;
1964 const char *fmt;
1965
1966 if (code == LABEL_REF)
1967 {
1968 if (XEXP (x, 0) == olabel)
1969 {
1970 rtx n;
1971 if (nlabel)
1972 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1973 else
1974 n = gen_rtx_RETURN (VOIDmode);
1975
1976 validate_change (insn, loc, n, 1);
1977 return;
1978 }
1979 }
1980 else if (code == RETURN && olabel == 0)
1981 {
1982 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1983 if (loc == &PATTERN (insn))
1984 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1985 validate_change (insn, loc, x, 1);
1986 return;
1987 }
1988
1989 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1990 && GET_CODE (SET_SRC (x)) == LABEL_REF
1991 && XEXP (SET_SRC (x), 0) == olabel)
1992 {
1993 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1994 return;
1995 }
1996
1997 fmt = GET_RTX_FORMAT (code);
1998 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1999 {
2000 if (fmt[i] == 'e')
2001 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2002 else if (fmt[i] == 'E')
2003 {
2004 int j;
2005 for (j = 0; j < XVECLEN (x, i); j++)
2006 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2007 }
2008 }
2009}
2010
2011/* Similar, but apply the change group and report success or failure. */
2012
2013static int
2014redirect_exp (olabel, nlabel, insn)
2015 rtx olabel, nlabel;
2016 rtx insn;
2017{
2018 rtx *loc;
2019
2020 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2021 loc = &XVECEXP (PATTERN (insn), 0, 0);
2022 else
2023 loc = &PATTERN (insn);
2024
2025 redirect_exp_1 (loc, olabel, nlabel, insn);
2026 if (num_validated_changes () == 0)
2027 return 0;
2028
2029 return apply_change_group ();
2030}
2031
2032/* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2033 the modifications into the change group. Return false if we did
2034 not see how to do that. */
2035
2036int
2037redirect_jump_1 (jump, nlabel)
2038 rtx jump, nlabel;
2039{
2040 int ochanges = num_validated_changes ();
2041 rtx *loc;
2042
2043 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2044 loc = &XVECEXP (PATTERN (jump), 0, 0);
2045 else
2046 loc = &PATTERN (jump);
2047
2048 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2049 return num_validated_changes () > ochanges;
2050}
2051
2052/* Make JUMP go to NLABEL instead of where it jumps now. If the old
2053 jump target label is unused as a result, it and the code following
2054 it may be deleted.
2055
2056 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2057 RETURN insn.
2058
2059 The return value will be 1 if the change was made, 0 if it wasn't
2060 (this can only occur for NLABEL == 0). */
2061
2062int
2063redirect_jump (jump, nlabel, delete_unused)
2064 rtx jump, nlabel;
2065 int delete_unused;
2066{
2067 rtx olabel = JUMP_LABEL (jump);
2068
2069 if (nlabel == olabel)
2070 return 1;
2071
2072 if (! redirect_exp (olabel, nlabel, jump))
2073 return 0;
2074
2075 JUMP_LABEL (jump) = nlabel;
2076 if (nlabel)
2077 ++LABEL_NUSES (nlabel);
2078
2079 /* If we're eliding the jump over exception cleanups at the end of a
2080 function, move the function end note so that -Wreturn-type works. */
2081 if (olabel && nlabel
2082 && NEXT_INSN (olabel)
2083 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2084 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2085 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2086
2087 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2088 /* Undefined labels will remain outside the insn stream. */
2089 && INSN_UID (olabel))
2090 delete_related_insns (olabel);
2091
2092 return 1;
2093}
2094
2095/* Invert the jump condition of rtx X contained in jump insn, INSN.
2096 Accrue the modifications into the change group. */
2097
2098static void
2099invert_exp_1 (insn)
2100 rtx insn;
2101{
2102 RTX_CODE code;
2103 rtx x = pc_set (insn);
2104
2105 if (!x)
2106 abort ();
2107 x = SET_SRC (x);
2108
2109 code = GET_CODE (x);
2110
2111 if (code == IF_THEN_ELSE)
2112 {
2113 rtx comp = XEXP (x, 0);
2114 rtx tem;
2115 enum rtx_code reversed_code;
2116
2117 /* We can do this in two ways: The preferable way, which can only
2118 be done if this is not an integer comparison, is to reverse
2119 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2120 of the IF_THEN_ELSE. If we can't do either, fail. */
2121
2122 reversed_code = reversed_comparison_code (comp, insn);
2123
2124 if (reversed_code != UNKNOWN)
2125 {
2126 validate_change (insn, &XEXP (x, 0),
2127 gen_rtx_fmt_ee (reversed_code,
2128 GET_MODE (comp), XEXP (comp, 0),
2129 XEXP (comp, 1)),
2130 1);
2131 return;
2132 }
2133
2134 tem = XEXP (x, 1);
2135 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2136 validate_change (insn, &XEXP (x, 2), tem, 1);
2137 }
2138 else
2139 abort ();
2140}
2141
2142/* Invert the jump condition of conditional jump insn, INSN.
2143
2144 Return 1 if we can do so, 0 if we cannot find a way to do so that
2145 matches a pattern. */
2146
2147static int
2148invert_exp (insn)
2149 rtx insn;
2150{
2151 invert_exp_1 (insn);
2152 if (num_validated_changes () == 0)
2153 return 0;
2154
2155 return apply_change_group ();
2156}
2157
2158/* Invert the condition of the jump JUMP, and make it jump to label
2159 NLABEL instead of where it jumps now. Accrue changes into the
2160 change group. Return false if we didn't see how to perform the
2161 inversion and redirection. */
2162
2163int
2164invert_jump_1 (jump, nlabel)
2165 rtx jump, nlabel;
2166{
2167 int ochanges;
2168
2169 ochanges = num_validated_changes ();
2170 invert_exp_1 (jump);
2171 if (num_validated_changes () == ochanges)
2172 return 0;
2173
2174 return redirect_jump_1 (jump, nlabel);
2175}
2176
2177/* Invert the condition of the jump JUMP, and make it jump to label
2178 NLABEL instead of where it jumps now. Return true if successful. */
2179
2180int
2181invert_jump (jump, nlabel, delete_unused)
2182 rtx jump, nlabel;
2183 int delete_unused;
2184{
2185 /* We have to either invert the condition and change the label or
2186 do neither. Either operation could fail. We first try to invert
2187 the jump. If that succeeds, we try changing the label. If that fails,
2188 we invert the jump back to what it was. */
2189
2190 if (! invert_exp (jump))
2191 return 0;
2192
2193 if (redirect_jump (jump, nlabel, delete_unused))
2194 {
2195 invert_br_probabilities (jump);
2196
2197 return 1;
2198 }
2199
2200 if (! invert_exp (jump))
2201 /* This should just be putting it back the way it was. */
2202 abort ();
2203
2204 return 0;
2205}
2206
2207
2208/* Like rtx_equal_p except that it considers two REGs as equal
2209 if they renumber to the same value and considers two commutative
2210 operations to be the same if the order of the operands has been
2211 reversed.
2212
2213 ??? Addition is not commutative on the PA due to the weird implicit
2214 space register selection rules for memory addresses. Therefore, we
2215 don't consider a + b == b + a.
2216
2217 We could/should make this test a little tighter. Possibly only
2218 disabling it on the PA via some backend macro or only disabling this
2219 case when the PLUS is inside a MEM. */
2220
2221int
2222rtx_renumbered_equal_p (x, y)
2223 rtx x, y;
2224{
2225 int i;
2226 RTX_CODE code = GET_CODE (x);
2227 const char *fmt;
2228
2229 if (x == y)
2230 return 1;
2231
2232 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2233 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2234 && GET_CODE (SUBREG_REG (y)) == REG)))
2235 {
2236 int reg_x = -1, reg_y = -1;
2237 int byte_x = 0, byte_y = 0;
2238
2239 if (GET_MODE (x) != GET_MODE (y))
2240 return 0;
2241
2242 /* If we haven't done any renumbering, don't
2243 make any assumptions. */
2244 if (reg_renumber == 0)
2245 return rtx_equal_p (x, y);
2246
2247 if (code == SUBREG)
2248 {
2249 reg_x = REGNO (SUBREG_REG (x));
2250 byte_x = SUBREG_BYTE (x);
2251
2252 if (reg_renumber[reg_x] >= 0)
2253 {
2254 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2255 GET_MODE (SUBREG_REG (x)),
2256 byte_x,
2257 GET_MODE (x));
2258 byte_x = 0;
2259 }
2260 }
2261 else
2262 {
2263 reg_x = REGNO (x);
2264 if (reg_renumber[reg_x] >= 0)
2265 reg_x = reg_renumber[reg_x];
2266 }
2267
2268 if (GET_CODE (y) == SUBREG)
2269 {
2270 reg_y = REGNO (SUBREG_REG (y));
2271 byte_y = SUBREG_BYTE (y);
2272
2273 if (reg_renumber[reg_y] >= 0)
2274 {
2275 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2276 GET_MODE (SUBREG_REG (y)),
2277 byte_y,
2278 GET_MODE (y));
2279 byte_y = 0;
2280 }
2281 }
2282 else
2283 {
2284 reg_y = REGNO (y);
2285 if (reg_renumber[reg_y] >= 0)
2286 reg_y = reg_renumber[reg_y];
2287 }
2288
2289 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2290 }
2291
2292 /* Now we have disposed of all the cases
2293 in which different rtx codes can match. */
2294 if (code != GET_CODE (y))
2295 return 0;
2296
2297 switch (code)
2298 {
2299 case PC:
2300 case CC0:
2301 case ADDR_VEC:
2302 case ADDR_DIFF_VEC:
2303 return 0;
2304
2305 case CONST_INT:
2306 return INTVAL (x) == INTVAL (y);
2307
2308 case LABEL_REF:
2309 /* We can't assume nonlocal labels have their following insns yet. */
2310 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2311 return XEXP (x, 0) == XEXP (y, 0);
2312
2313 /* Two label-refs are equivalent if they point at labels
2314 in the same position in the instruction stream. */
2315 return (next_real_insn (XEXP (x, 0))
2316 == next_real_insn (XEXP (y, 0)));
2317
2318 case SYMBOL_REF:
2319 return XSTR (x, 0) == XSTR (y, 0);
2320
2321 case CODE_LABEL:
2322 /* If we didn't match EQ equality above, they aren't the same. */
2323 return 0;
2324
2325 default:
2326 break;
2327 }
2328
2329 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2330
2331 if (GET_MODE (x) != GET_MODE (y))
2332 return 0;
2333
2334 /* For commutative operations, the RTX match if the operand match in any
2335 order. Also handle the simple binary and unary cases without a loop.
2336
2337 ??? Don't consider PLUS a commutative operator; see comments above. */
2338 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2339 && code != PLUS)
2340 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2341 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2342 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2343 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2344 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2345 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2346 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2347 else if (GET_RTX_CLASS (code) == '1')
2348 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2349
2350 /* Compare the elements. If any pair of corresponding elements
2351 fail to match, return 0 for the whole things. */
2352
2353 fmt = GET_RTX_FORMAT (code);
2354 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2355 {
2356 int j;
2357 switch (fmt[i])
2358 {
2359 case 'w':
2360 if (XWINT (x, i) != XWINT (y, i))
2361 return 0;
2362 break;
2363
2364 case 'i':
2365 if (XINT (x, i) != XINT (y, i))
2366 return 0;
2367 break;
2368
2369 case 't':
2370 if (XTREE (x, i) != XTREE (y, i))
2371 return 0;
2372 break;
2373
2374 case 's':
2375 if (strcmp (XSTR (x, i), XSTR (y, i)))
2376 return 0;
2377 break;
2378
2379 case 'e':
2380 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2381 return 0;
2382 break;
2383
2384 case 'u':
2385 if (XEXP (x, i) != XEXP (y, i))
2386 return 0;
2387 /* fall through. */
2388 case '0':
2389 break;
2390
2391 case 'E':
2392 if (XVECLEN (x, i) != XVECLEN (y, i))
2393 return 0;
2394 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2395 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2396 return 0;
2397 break;
2398
2399 default:
2400 abort ();
2401 }
2402 }
2403 return 1;
2404}
2405
2406/* If X is a hard register or equivalent to one or a subregister of one,
2407 return the hard register number. If X is a pseudo register that was not
2408 assigned a hard register, return the pseudo register number. Otherwise,
2409 return -1. Any rtx is valid for X. */
2410
2411int
2412true_regnum (x)
2413 rtx x;
2414{
2415 if (GET_CODE (x) == REG)
2416 {
2417 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2418 return reg_renumber[REGNO (x)];
2419 return REGNO (x);
2420 }
2421 if (GET_CODE (x) == SUBREG)
2422 {
2423 int base = true_regnum (SUBREG_REG (x));
2424 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2425 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2426 GET_MODE (SUBREG_REG (x)),
2427 SUBREG_BYTE (x), GET_MODE (x));
2428 }
2429 return -1;
2430}