Deleted Added
full compact
1/* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "insn-config.h"
28#include "insn-attr.h"
29#include "hard-reg-set.h"
30#include "recog.h"
31#include "regs.h"
32#include "expr.h"
33#include "function.h"
34#include "flags.h"
35#include "real.h"
36#include "toplev.h"
37#include "basic-block.h"
38#include "output.h"
39#include "reload.h"
40
41#ifndef STACK_PUSH_CODE
42#ifdef STACK_GROWS_DOWNWARD
43#define STACK_PUSH_CODE PRE_DEC
44#else
45#define STACK_PUSH_CODE PRE_INC
46#endif
47#endif
48
49#ifndef STACK_POP_CODE
50#ifdef STACK_GROWS_DOWNWARD
51#define STACK_POP_CODE POST_INC
52#else
53#define STACK_POP_CODE POST_DEC
54#endif
55#endif
56
57static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59static void validate_replace_src_1 PARAMS ((rtx *, void *));
60static rtx split_insn PARAMS ((rtx));
61
62/* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70int volatile_ok;
71
72struct recog_data recog_data;
73
74/* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77
78/* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
80
81int which_alternative;
82
83/* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
86
87int reload_completed;
88
89/* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
92
93void
94init_recog_no_volatile ()
95{
96 volatile_ok = 0;
97}
98
99void
100init_recog ()
101{
102 volatile_ok = 1;
103}
104
105/* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
109
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
113
114int
115recog_memoized_1 (insn)
116 rtx insn;
117{
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
121}
122
123/* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
125
126int
127check_asm_operands (x)
128 rtx x;
129{
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
134
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
137 {
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
142 }
143
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
149
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
152
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
154
155 for (i = 0; i < noperands; i++)
156 {
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
162
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
164 return 0;
165 }
166
167 return 1;
168}
169
170/* Static data for the next two routines. */
171
172typedef struct change_t
173{
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178} change_t;
179
180static change_t *changes;
181static int changes_allocated;
182
183static int num_changes = 0;
184
185/* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
188
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
193
194 IN_GROUP is non-zero if this is part of a group of changes that must be
194 IN_GROUP is nonzero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
197
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
202
203int
204validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
209{
210 rtx old = *loc;
211
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
214
215 if (in_group == 0 && num_changes != 0)
216 abort ();
217
218 *loc = new;
219
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
222 {
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
229
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
233 }
234
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238
239 if (object && GET_CODE (object) != MEM)
240 {
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
245 }
246
247 num_changes++;
248
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
251
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
256}
257
258/* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
260
261int
262insn_invalid_p (insn)
263 rtx insn;
264{
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
268 clobbers. */
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
274
275
275
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
280 return 1;
281
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
286 {
287 rtx newpat;
288
289 if (added_clobbers_hard_reg_p (icode))
290 return 1;
291
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
296 }
297
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
300 {
301 extract_insn (insn);
302
303 if (! constrain_operands (1))
304 return 1;
305 }
306
307 INSN_CODE (insn) = icode;
308 return 0;
309}
310
311/* Return number of changes made and not validated yet. */
312int
313num_changes_pending ()
314{
315 return num_changes;
316}
317
318/* Apply a group of changes previously issued with `validate_change'.
319 Return 1 if all changes are valid, zero otherwise. */
320
321int
322apply_change_group ()
323{
324 int i;
325 rtx last_validated = NULL_RTX;
326
327 /* The changes have been applied and all INSN_CODEs have been reset to force
328 rerecognition.
329
330 The changes are valid if we aren't given an object, or if we are
331 given a MEM and it still is a valid address, or if this is in insn
332 and it is recognized. In the latter case, if reload has completed,
333 we also require that the operands meet the constraints for
334 the insn. */
335
336 for (i = 0; i < num_changes; i++)
337 {
338 rtx object = changes[i].object;
339
340 /* if there is no object to test or if it is the same as the one we
341 already tested, ignore it. */
342 if (object == 0 || object == last_validated)
343 continue;
344
345 if (GET_CODE (object) == MEM)
346 {
347 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
348 break;
349 }
350 else if (insn_invalid_p (object))
351 {
352 rtx pat = PATTERN (object);
353
354 /* Perhaps we couldn't recognize the insn because there were
355 extra CLOBBERs at the end. If so, try to re-recognize
356 without the last CLOBBER (later iterations will cause each of
357 them to be eliminated, in turn). But don't do this if we
358 have an ASM_OPERAND. */
359 if (GET_CODE (pat) == PARALLEL
360 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
361 && asm_noperands (PATTERN (object)) < 0)
362 {
363 rtx newpat;
364
365 if (XVECLEN (pat, 0) == 2)
366 newpat = XVECEXP (pat, 0, 0);
367 else
368 {
369 int j;
370
371 newpat
365 = gen_rtx_PARALLEL (VOIDmode,
372 = gen_rtx_PARALLEL (VOIDmode,
373 rtvec_alloc (XVECLEN (pat, 0) - 1));
374 for (j = 0; j < XVECLEN (newpat, 0); j++)
375 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
376 }
377
378 /* Add a new change to this group to replace the pattern
379 with this new pattern. Then consider this change
380 as having succeeded. The change we added will
381 cause the entire call to fail if things remain invalid.
382
383 Note that this can lose if a later change than the one
384 we are processing specified &XVECEXP (PATTERN (object), 0, X)
385 but this shouldn't occur. */
386
387 validate_change (object, &PATTERN (object), newpat, 1);
388 continue;
389 }
390 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
391 /* If this insn is a CLOBBER or USE, it is always valid, but is
392 never recognized. */
393 continue;
394 else
395 break;
396 }
397 last_validated = object;
398 }
399
400 if (i == num_changes)
401 {
402 basic_block bb;
403
404 for (i = 0; i < num_changes; i++)
405 if (changes[i].object
406 && INSN_P (changes[i].object)
407 && (bb = BLOCK_FOR_INSN (changes[i].object)))
408 bb->flags |= BB_DIRTY;
409
410 num_changes = 0;
411 return 1;
412 }
413 else
414 {
415 cancel_changes (0);
416 return 0;
417 }
418}
419
420/* Return the number of changes so far in the current group. */
421
422int
423num_validated_changes ()
424{
425 return num_changes;
426}
427
428/* Retract the changes numbered NUM and up. */
429
430void
431cancel_changes (num)
432 int num;
433{
434 int i;
435
436 /* Back out all the changes. Do this in the opposite order in which
437 they were made. */
438 for (i = num_changes - 1; i >= num; i--)
439 {
440 *changes[i].loc = changes[i].old;
441 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
442 INSN_CODE (changes[i].object) = changes[i].old_code;
443 }
444 num_changes = num;
445}
446
447/* Replace every occurrence of FROM in X with TO. Mark each change with
448 validate_change passing OBJECT. */
449
450static void
451validate_replace_rtx_1 (loc, from, to, object)
452 rtx *loc;
453 rtx from, to, object;
454{
455 int i, j;
456 const char *fmt;
457 rtx x = *loc;
458 enum rtx_code code;
459 enum machine_mode op0_mode = VOIDmode;
460 int prev_changes = num_changes;
461 rtx new;
462
463 if (!x)
464 return;
465
466 code = GET_CODE (x);
467 fmt = GET_RTX_FORMAT (code);
468 if (fmt[0] == 'e')
469 op0_mode = GET_MODE (XEXP (x, 0));
470
471 /* X matches FROM if it is the same rtx or they are both referring to the
472 same register in the same mode. Avoid calling rtx_equal_p unless the
473 operands look similar. */
474
475 if (x == from
476 || (GET_CODE (x) == REG && GET_CODE (from) == REG
477 && GET_MODE (x) == GET_MODE (from)
478 && REGNO (x) == REGNO (from))
479 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
480 && rtx_equal_p (x, from)))
481 {
482 validate_change (object, loc, to, 1);
483 return;
484 }
485
486 /* Call ourself recursively to perform the replacements. */
487
488 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
489 {
490 if (fmt[i] == 'e')
491 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
492 else if (fmt[i] == 'E')
493 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
494 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
495 }
496
497 /* If we didn't substitute, there is nothing more to do. */
498 if (num_changes == prev_changes)
499 return;
500
501 /* Allow substituted expression to have different mode. This is used by
502 regmove to change mode of pseudo register. */
503 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
504 op0_mode = GET_MODE (XEXP (x, 0));
505
506 /* Do changes needed to keep rtx consistent. Don't do any other
507 simplifications, as it is not our job. */
508
509 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
510 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
511 {
512 validate_change (object, loc,
513 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
514 : swap_condition (code),
515 GET_MODE (x), XEXP (x, 1),
516 XEXP (x, 0)), 1);
517 x = *loc;
518 code = GET_CODE (x);
519 }
520
521 switch (code)
522 {
523 case PLUS:
524 /* If we have a PLUS whose second operand is now a CONST_INT, use
525 simplify_gen_binary to try to simplify it.
526 ??? We may want later to remove this, once simplification is
527 separated from this function. */
528 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
529 validate_change (object, loc,
530 simplify_gen_binary
531 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
532 break;
533 case MINUS:
534 if (GET_CODE (XEXP (x, 1)) == CONST_INT
535 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
536 validate_change (object, loc,
537 simplify_gen_binary
538 (PLUS, GET_MODE (x), XEXP (x, 0),
539 simplify_gen_unary (NEG,
540 GET_MODE (x), XEXP (x, 1),
541 GET_MODE (x))), 1);
542 break;
543 case ZERO_EXTEND:
544 case SIGN_EXTEND:
545 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
546 {
547 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
548 op0_mode);
549 /* If any of the above failed, substitute in something that
550 we know won't be recognized. */
551 if (!new)
552 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
553 validate_change (object, loc, new, 1);
554 }
555 break;
556 case SUBREG:
557 /* All subregs possible to simplify should be simplified. */
558 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
559 SUBREG_BYTE (x));
560
561 /* Subregs of VOIDmode operands are incorrect. */
562 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
563 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
564 if (new)
565 validate_change (object, loc, new, 1);
566 break;
567 case ZERO_EXTRACT:
568 case SIGN_EXTRACT:
569 /* If we are replacing a register with memory, try to change the memory
570 to be the mode required for memory in extract operations (this isn't
571 likely to be an insertion operation; if it was, nothing bad will
572 happen, we might just fail in some cases). */
573
574 if (GET_CODE (XEXP (x, 0)) == MEM
575 && GET_CODE (XEXP (x, 1)) == CONST_INT
576 && GET_CODE (XEXP (x, 2)) == CONST_INT
577 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
578 && !MEM_VOLATILE_P (XEXP (x, 0)))
579 {
580 enum machine_mode wanted_mode = VOIDmode;
581 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
582 int pos = INTVAL (XEXP (x, 2));
583
584 if (GET_CODE (x) == ZERO_EXTRACT)
585 {
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extzv, 1);
588 if (new_mode != MAX_MACHINE_MODE)
589 wanted_mode = new_mode;
590 }
591 else if (GET_CODE (x) == SIGN_EXTRACT)
592 {
593 enum machine_mode new_mode
594 = mode_for_extraction (EP_extv, 1);
595 if (new_mode != MAX_MACHINE_MODE)
596 wanted_mode = new_mode;
597 }
598
599 /* If we have a narrower mode, we can do something. */
600 if (wanted_mode != VOIDmode
601 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
602 {
603 int offset = pos / BITS_PER_UNIT;
604 rtx newmem;
605
606 /* If the bytes and bits are counted differently, we
607 must adjust the offset. */
608 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
609 offset =
610 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
611 offset);
612
613 pos %= GET_MODE_BITSIZE (wanted_mode);
614
615 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
616
617 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
618 validate_change (object, &XEXP (x, 0), newmem, 1);
619 }
620 }
621
622 break;
623
624 default:
625 break;
626 }
627}
628
629/* Try replacing every occurrence of FROM in subexpression LOC of INSN
630 with TO. After all changes have been made, validate by seeing
631 if INSN is still valid. */
632
633int
634validate_replace_rtx_subexp (from, to, insn, loc)
635 rtx from, to, insn, *loc;
636{
637 validate_replace_rtx_1 (loc, from, to, insn);
638 return apply_change_group ();
639}
640
641/* Try replacing every occurrence of FROM in INSN with TO. After all
642 changes have been made, validate by seeing if INSN is still valid. */
643
644int
645validate_replace_rtx (from, to, insn)
646 rtx from, to, insn;
647{
648 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
649 return apply_change_group ();
650}
651
652/* Try replacing every occurrence of FROM in INSN with TO. */
653
654void
655validate_replace_rtx_group (from, to, insn)
656 rtx from, to, insn;
657{
658 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
659}
660
661/* Function called by note_uses to replace used subexpressions. */
662struct validate_replace_src_data
663{
664 rtx from; /* Old RTX */
665 rtx to; /* New RTX */
666 rtx insn; /* Insn in which substitution is occurring. */
667};
668
669static void
670validate_replace_src_1 (x, data)
671 rtx *x;
672 void *data;
673{
674 struct validate_replace_src_data *d
675 = (struct validate_replace_src_data *) data;
676
677 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
678}
679
680/* Try replacing every occurrence of FROM in INSN with TO, avoiding
666 SET_DESTs. After all changes have been made, validate by seeing if
667 INSN is still valid. */
681 SET_DESTs. */
682
669int
670validate_replace_src (from, to, insn)
683void
684validate_replace_src_group (from, to, insn)
685 rtx from, to, insn;
686{
687 struct validate_replace_src_data d;
688
689 d.from = from;
690 d.to = to;
691 d.insn = insn;
692 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
693}
694
695/* Same as validate_repalace_src_group, but validate by seeing if
696 INSN is still valid. */
697int
698validate_replace_src (from, to, insn)
699 rtx from, to, insn;
700{
701 validate_replace_src_group (from, to, insn);
702 return apply_change_group ();
703}
704
705#ifdef HAVE_cc0
706/* Return 1 if the insn using CC0 set by INSN does not contain
707 any ordered tests applied to the condition codes.
708 EQ and NE tests do not count. */
709
710int
711next_insn_tests_no_inequality (insn)
712 rtx insn;
713{
714 rtx next = next_cc0_user (insn);
715
716 /* If there is no next insn, we have to take the conservative choice. */
717 if (next == 0)
718 return 0;
719
720 return ((GET_CODE (next) == JUMP_INSN
721 || GET_CODE (next) == INSN
722 || GET_CODE (next) == CALL_INSN)
723 && ! inequality_comparisons_p (PATTERN (next)));
724}
725
726#if 0 /* This is useless since the insn that sets the cc's
727 must be followed immediately by the use of them. */
728/* Return 1 if the CC value set up by INSN is not used. */
729
730int
731next_insns_test_no_inequality (insn)
732 rtx insn;
733{
734 rtx next = NEXT_INSN (insn);
735
736 for (; next != 0; next = NEXT_INSN (next))
737 {
738 if (GET_CODE (next) == CODE_LABEL
739 || GET_CODE (next) == BARRIER)
740 return 1;
741 if (GET_CODE (next) == NOTE)
742 continue;
743 if (inequality_comparisons_p (PATTERN (next)))
744 return 0;
745 if (sets_cc0_p (PATTERN (next)) == 1)
746 return 1;
747 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
748 return 1;
749 }
750 return 1;
751}
752#endif
753#endif
754
755/* This is used by find_single_use to locate an rtx that contains exactly one
756 use of DEST, which is typically either a REG or CC0. It returns a
757 pointer to the innermost rtx expression containing DEST. Appearances of
758 DEST that are being used to totally replace it are not counted. */
759
760static rtx *
761find_single_use_1 (dest, loc)
762 rtx dest;
763 rtx *loc;
764{
765 rtx x = *loc;
766 enum rtx_code code = GET_CODE (x);
767 rtx *result = 0;
768 rtx *this_result;
769 int i;
770 const char *fmt;
771
772 switch (code)
773 {
774 case CONST_INT:
775 case CONST:
776 case LABEL_REF:
777 case SYMBOL_REF:
778 case CONST_DOUBLE:
779 case CONST_VECTOR:
780 case CLOBBER:
781 return 0;
782
783 case SET:
784 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
785 of a REG that occupies all of the REG, the insn uses DEST if
786 it is mentioned in the destination or the source. Otherwise, we
787 need just check the source. */
788 if (GET_CODE (SET_DEST (x)) != CC0
789 && GET_CODE (SET_DEST (x)) != PC
790 && GET_CODE (SET_DEST (x)) != REG
791 && ! (GET_CODE (SET_DEST (x)) == SUBREG
792 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
793 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
794 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
795 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
796 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
797 break;
798
799 return find_single_use_1 (dest, &SET_SRC (x));
800
801 case MEM:
802 case SUBREG:
803 return find_single_use_1 (dest, &XEXP (x, 0));
781
804
805 default:
806 break;
807 }
808
809 /* If it wasn't one of the common cases above, check each expression and
810 vector of this code. Look for a unique usage of DEST. */
811
812 fmt = GET_RTX_FORMAT (code);
813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
814 {
815 if (fmt[i] == 'e')
816 {
817 if (dest == XEXP (x, i)
818 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
819 && REGNO (dest) == REGNO (XEXP (x, i))))
820 this_result = loc;
821 else
822 this_result = find_single_use_1 (dest, &XEXP (x, i));
823
824 if (result == 0)
825 result = this_result;
826 else if (this_result)
827 /* Duplicate usage. */
828 return 0;
829 }
830 else if (fmt[i] == 'E')
831 {
832 int j;
833
834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
835 {
836 if (XVECEXP (x, i, j) == dest
837 || (GET_CODE (dest) == REG
838 && GET_CODE (XVECEXP (x, i, j)) == REG
839 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
840 this_result = loc;
841 else
842 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
843
844 if (result == 0)
845 result = this_result;
846 else if (this_result)
847 return 0;
848 }
849 }
850 }
851
852 return result;
853}
854
855/* See if DEST, produced in INSN, is used only a single time in the
856 sequel. If so, return a pointer to the innermost rtx expression in which
857 it is used.
858
836 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
859 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
860
861 This routine will return usually zero either before flow is called (because
862 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
863 note can't be trusted).
864
865 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
866 care about REG_DEAD notes or LOG_LINKS.
867
868 Otherwise, we find the single use by finding an insn that has a
869 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
870 only referenced once in that insn, we know that it must be the first
871 and last insn referencing DEST. */
872
873rtx *
874find_single_use (dest, insn, ploc)
875 rtx dest;
876 rtx insn;
877 rtx *ploc;
878{
879 rtx next;
880 rtx *result;
881 rtx link;
882
883#ifdef HAVE_cc0
884 if (dest == cc0_rtx)
885 {
886 next = NEXT_INSN (insn);
887 if (next == 0
888 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
889 return 0;
890
891 result = find_single_use_1 (dest, &PATTERN (next));
892 if (result && ploc)
893 *ploc = next;
894 return result;
895 }
896#endif
897
898 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
899 return 0;
900
901 for (next = next_nonnote_insn (insn);
902 next != 0 && GET_CODE (next) != CODE_LABEL;
903 next = next_nonnote_insn (next))
904 if (INSN_P (next) && dead_or_set_p (next, dest))
905 {
906 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
907 if (XEXP (link, 0) == insn)
908 break;
909
910 if (link)
911 {
912 result = find_single_use_1 (dest, &PATTERN (next));
913 if (ploc)
914 *ploc = next;
915 return result;
916 }
917 }
918
919 return 0;
920}
921
922/* Return 1 if OP is a valid general operand for machine mode MODE.
923 This is either a register reference, a memory reference,
924 or a constant. In the case of a memory reference, the address
925 is checked for general validity for the target machine.
926
927 Register and memory references must have mode MODE in order to be valid,
928 but some constants have no machine mode and are valid for any mode.
929
930 If MODE is VOIDmode, OP is checked for validity for whatever mode
931 it has.
932
933 The main use of this function is as a predicate in match_operand
934 expressions in the machine description.
935
936 For an explanation of this function's behavior for registers of
937 class NO_REGS, see the comment for `register_operand'. */
938
939int
940general_operand (op, mode)
941 rtx op;
942 enum machine_mode mode;
943{
944 enum rtx_code code = GET_CODE (op);
945
946 if (mode == VOIDmode)
947 mode = GET_MODE (op);
948
949 /* Don't accept CONST_INT or anything similar
950 if the caller wants something floating. */
951 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
952 && GET_MODE_CLASS (mode) != MODE_INT
953 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
954 return 0;
955
956 if (GET_CODE (op) == CONST_INT
957 && mode != VOIDmode
958 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
959 return 0;
960
961 if (CONSTANT_P (op))
962 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
963 || mode == VOIDmode)
964#ifdef LEGITIMATE_PIC_OPERAND_P
965 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
966#endif
967 && LEGITIMATE_CONSTANT_P (op));
968
969 /* Except for certain constants with VOIDmode, already checked for,
970 OP's mode must match MODE if MODE specifies a mode. */
971
972 if (GET_MODE (op) != mode)
973 return 0;
974
975 if (code == SUBREG)
976 {
977 rtx sub = SUBREG_REG (op);
978
979#ifdef INSN_SCHEDULING
980 /* On machines that have insn scheduling, we want all memory
981 reference to be explicit, so outlaw paradoxical SUBREGs. */
982 if (GET_CODE (sub) == MEM
983 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
984 return 0;
985#endif
986 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
987 may result in incorrect reference. We should simplify all valid
988 subregs of MEM anyway. But allow this after reload because we
965 might be called from cleanup_subreg_operands.
989 might be called from cleanup_subreg_operands.
990
991 ??? This is a kludge. */
992 if (!reload_completed && SUBREG_BYTE (op) != 0
993 && GET_CODE (sub) == MEM)
970 return 0;
994 return 0;
995
996 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
973 create such rtl, and we must reject it. */
997 create such rtl, and we must reject it. */
998 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
999 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1000 return 0;
1001
1002 op = sub;
1003 code = GET_CODE (op);
1004 }
1005
1006 if (code == REG)
1007 /* A register whose class is NO_REGS is not a general operand. */
1008 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1009 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1010
1011 if (code == MEM)
1012 {
1013 rtx y = XEXP (op, 0);
1014
1015 if (! volatile_ok && MEM_VOLATILE_P (op))
1016 return 0;
1017
1018 if (GET_CODE (y) == ADDRESSOF)
1019 return 1;
1020
1021 /* Use the mem's mode, since it will be reloaded thus. */
1022 mode = GET_MODE (op);
1023 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1024 }
1025
1026 /* Pretend this is an operand for now; we'll run force_operand
1027 on its replacement in fixup_var_refs_1. */
1028 if (code == ADDRESSOF)
1029 return 1;
1030
1031 return 0;
1032
1033 win:
1034 return 1;
1035}
1036
1037/* Return 1 if OP is a valid memory address for a memory reference
1038 of mode MODE.
1039
1040 The main use of this function is as a predicate in match_operand
1041 expressions in the machine description. */
1042
1043int
1044address_operand (op, mode)
1045 rtx op;
1046 enum machine_mode mode;
1047{
1048 return memory_address_p (mode, op);
1049}
1050
1051/* Return 1 if OP is a register reference of mode MODE.
1052 If MODE is VOIDmode, accept a register in any mode.
1053
1054 The main use of this function is as a predicate in match_operand
1055 expressions in the machine description.
1056
1057 As a special exception, registers whose class is NO_REGS are
1058 not accepted by `register_operand'. The reason for this change
1059 is to allow the representation of special architecture artifacts
1060 (such as a condition code register) without extending the rtl
1061 definitions. Since registers of class NO_REGS cannot be used
1062 as registers in any case where register classes are examined,
1063 it is most consistent to keep this function from accepting them. */
1064
1065int
1066register_operand (op, mode)
1067 rtx op;
1068 enum machine_mode mode;
1069{
1070 if (GET_MODE (op) != mode && mode != VOIDmode)
1071 return 0;
1072
1073 if (GET_CODE (op) == SUBREG)
1074 {
1075 rtx sub = SUBREG_REG (op);
1076
1077 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1078 because it is guaranteed to be reloaded into one.
1079 Just make sure the MEM is valid in itself.
1080 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1081 but currently it does result from (SUBREG (REG)...) where the
1082 reg went on the stack.) */
1083 if (! reload_completed && GET_CODE (sub) == MEM)
1084 return general_operand (op, mode);
1085
1062#ifdef CLASS_CANNOT_CHANGE_MODE
1086#ifdef CANNOT_CHANGE_MODE_CLASS
1087 if (GET_CODE (sub) == REG
1088 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1065 && (TEST_HARD_REG_BIT
1066 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1067 REGNO (sub)))
1068 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (sub))
1089 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1090 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1091 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1092 return 0;
1093#endif
1094
1095 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1096 create such rtl, and we must reject it. */
1097 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1098 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1099 return 0;
1100
1101 op = sub;
1102 }
1103
1104 /* If we have an ADDRESSOF, consider it valid since it will be
1105 converted into something that will not be a MEM. */
1106 if (GET_CODE (op) == ADDRESSOF)
1107 return 1;
1108
1109 /* We don't consider registers whose class is NO_REGS
1110 to be a register operand. */
1111 return (GET_CODE (op) == REG
1112 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1113 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1114}
1115
1116/* Return 1 for a register in Pmode; ignore the tested mode. */
1117
1118int
1119pmode_register_operand (op, mode)
1120 rtx op;
1121 enum machine_mode mode ATTRIBUTE_UNUSED;
1122{
1123 return register_operand (op, Pmode);
1124}
1125
1126/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1127 or a hard register. */
1128
1129int
1130scratch_operand (op, mode)
1131 rtx op;
1132 enum machine_mode mode;
1133{
1134 if (GET_MODE (op) != mode && mode != VOIDmode)
1135 return 0;
1136
1137 return (GET_CODE (op) == SCRATCH
1138 || (GET_CODE (op) == REG
1139 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1140}
1141
1142/* Return 1 if OP is a valid immediate operand for mode MODE.
1143
1144 The main use of this function is as a predicate in match_operand
1145 expressions in the machine description. */
1146
1147int
1148immediate_operand (op, mode)
1149 rtx op;
1150 enum machine_mode mode;
1151{
1152 /* Don't accept CONST_INT or anything similar
1153 if the caller wants something floating. */
1154 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1155 && GET_MODE_CLASS (mode) != MODE_INT
1156 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1157 return 0;
1158
1159 if (GET_CODE (op) == CONST_INT
1160 && mode != VOIDmode
1161 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1162 return 0;
1163
1164 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1165 result in 0/1. It seems a safe assumption that this is
1166 in range for everyone. */
1167 if (GET_CODE (op) == CONSTANT_P_RTX)
1168 return 1;
1169
1170 return (CONSTANT_P (op)
1171 && (GET_MODE (op) == mode || mode == VOIDmode
1172 || GET_MODE (op) == VOIDmode)
1173#ifdef LEGITIMATE_PIC_OPERAND_P
1174 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1175#endif
1176 && LEGITIMATE_CONSTANT_P (op));
1177}
1178
1179/* Returns 1 if OP is an operand that is a CONST_INT. */
1180
1181int
1182const_int_operand (op, mode)
1183 rtx op;
1184 enum machine_mode mode;
1185{
1186 if (GET_CODE (op) != CONST_INT)
1187 return 0;
1188
1189 if (mode != VOIDmode
1190 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1191 return 0;
1192
1193 return 1;
1194}
1195
1196/* Returns 1 if OP is an operand that is a constant integer or constant
1197 floating-point number. */
1198
1199int
1200const_double_operand (op, mode)
1201 rtx op;
1202 enum machine_mode mode;
1203{
1204 /* Don't accept CONST_INT or anything similar
1205 if the caller wants something floating. */
1206 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1207 && GET_MODE_CLASS (mode) != MODE_INT
1208 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1209 return 0;
1210
1211 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1212 && (mode == VOIDmode || GET_MODE (op) == mode
1213 || GET_MODE (op) == VOIDmode));
1214}
1215
1216/* Return 1 if OP is a general operand that is not an immediate operand. */
1217
1218int
1219nonimmediate_operand (op, mode)
1220 rtx op;
1221 enum machine_mode mode;
1222{
1223 return (general_operand (op, mode) && ! CONSTANT_P (op));
1224}
1225
1226/* Return 1 if OP is a register reference or immediate value of mode MODE. */
1227
1228int
1229nonmemory_operand (op, mode)
1230 rtx op;
1231 enum machine_mode mode;
1232{
1233 if (CONSTANT_P (op))
1234 {
1235 /* Don't accept CONST_INT or anything similar
1236 if the caller wants something floating. */
1237 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1238 && GET_MODE_CLASS (mode) != MODE_INT
1239 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1240 return 0;
1241
1242 if (GET_CODE (op) == CONST_INT
1243 && mode != VOIDmode
1244 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1245 return 0;
1246
1247 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1248 || mode == VOIDmode)
1249#ifdef LEGITIMATE_PIC_OPERAND_P
1250 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1251#endif
1252 && LEGITIMATE_CONSTANT_P (op));
1253 }
1254
1255 if (GET_MODE (op) != mode && mode != VOIDmode)
1256 return 0;
1257
1258 if (GET_CODE (op) == SUBREG)
1259 {
1260 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1261 because it is guaranteed to be reloaded into one.
1262 Just make sure the MEM is valid in itself.
1263 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1264 but currently it does result from (SUBREG (REG)...) where the
1265 reg went on the stack.) */
1266 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1267 return general_operand (op, mode);
1268 op = SUBREG_REG (op);
1269 }
1270
1271 /* We don't consider registers whose class is NO_REGS
1272 to be a register operand. */
1273 return (GET_CODE (op) == REG
1274 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1275 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1276}
1277
1278/* Return 1 if OP is a valid operand that stands for pushing a
1279 value of mode MODE onto the stack.
1280
1281 The main use of this function is as a predicate in match_operand
1282 expressions in the machine description. */
1283
1284int
1285push_operand (op, mode)
1286 rtx op;
1287 enum machine_mode mode;
1288{
1289 unsigned int rounded_size = GET_MODE_SIZE (mode);
1290
1291#ifdef PUSH_ROUNDING
1292 rounded_size = PUSH_ROUNDING (rounded_size);
1293#endif
1294
1295 if (GET_CODE (op) != MEM)
1296 return 0;
1297
1298 if (mode != VOIDmode && GET_MODE (op) != mode)
1299 return 0;
1300
1301 op = XEXP (op, 0);
1302
1303 if (rounded_size == GET_MODE_SIZE (mode))
1304 {
1305 if (GET_CODE (op) != STACK_PUSH_CODE)
1306 return 0;
1307 }
1308 else
1309 {
1310 if (GET_CODE (op) != PRE_MODIFY
1311 || GET_CODE (XEXP (op, 1)) != PLUS
1312 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1313 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1314#ifdef STACK_GROWS_DOWNWARD
1315 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1316#else
1317 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1318#endif
1319 )
1320 return 0;
1321 }
1322
1323 return XEXP (op, 0) == stack_pointer_rtx;
1324}
1325
1326/* Return 1 if OP is a valid operand that stands for popping a
1327 value of mode MODE off the stack.
1328
1329 The main use of this function is as a predicate in match_operand
1330 expressions in the machine description. */
1331
1332int
1333pop_operand (op, mode)
1334 rtx op;
1335 enum machine_mode mode;
1336{
1337 if (GET_CODE (op) != MEM)
1338 return 0;
1339
1340 if (mode != VOIDmode && GET_MODE (op) != mode)
1341 return 0;
1342
1343 op = XEXP (op, 0);
1344
1345 if (GET_CODE (op) != STACK_POP_CODE)
1346 return 0;
1347
1348 return XEXP (op, 0) == stack_pointer_rtx;
1349}
1350
1351/* Return 1 if ADDR is a valid memory address for mode MODE. */
1352
1353int
1354memory_address_p (mode, addr)
1355 enum machine_mode mode ATTRIBUTE_UNUSED;
1356 rtx addr;
1357{
1358 if (GET_CODE (addr) == ADDRESSOF)
1359 return 1;
1337
1360
1361 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1362 return 0;
1363
1364 win:
1365 return 1;
1366}
1367
1368/* Return 1 if OP is a valid memory reference with mode MODE,
1369 including a valid address.
1370
1371 The main use of this function is as a predicate in match_operand
1372 expressions in the machine description. */
1373
1374int
1375memory_operand (op, mode)
1376 rtx op;
1377 enum machine_mode mode;
1378{
1379 rtx inner;
1380
1381 if (! reload_completed)
1382 /* Note that no SUBREG is a memory operand before end of reload pass,
1383 because (SUBREG (MEM...)) forces reloading into a register. */
1384 return GET_CODE (op) == MEM && general_operand (op, mode);
1385
1386 if (mode != VOIDmode && GET_MODE (op) != mode)
1387 return 0;
1388
1389 inner = op;
1390 if (GET_CODE (inner) == SUBREG)
1391 inner = SUBREG_REG (inner);
1392
1393 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1394}
1395
1396/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1397 that is, a memory reference whose address is a general_operand. */
1398
1399int
1400indirect_operand (op, mode)
1401 rtx op;
1402 enum machine_mode mode;
1403{
1404 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1405 if (! reload_completed
1406 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1407 {
1408 int offset = SUBREG_BYTE (op);
1409 rtx inner = SUBREG_REG (op);
1410
1411 if (mode != VOIDmode && GET_MODE (op) != mode)
1412 return 0;
1413
1414 /* The only way that we can have a general_operand as the resulting
1415 address is if OFFSET is zero and the address already is an operand
1416 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1417 operand. */
1418
1419 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1420 || (GET_CODE (XEXP (inner, 0)) == PLUS
1421 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1422 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1423 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1424 }
1425
1426 return (GET_CODE (op) == MEM
1427 && memory_operand (op, mode)
1428 && general_operand (XEXP (op, 0), Pmode));
1429}
1430
1431/* Return 1 if this is a comparison operator. This allows the use of
1432 MATCH_OPERATOR to recognize all the branch insns. */
1433
1434int
1435comparison_operator (op, mode)
1436 rtx op;
1437 enum machine_mode mode;
1438{
1439 return ((mode == VOIDmode || GET_MODE (op) == mode)
1440 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1441}
1442
1443/* If BODY is an insn body that uses ASM_OPERANDS,
1444 return the number of operands (both input and output) in the insn.
1445 Otherwise return -1. */
1446
1447int
1448asm_noperands (body)
1449 rtx body;
1450{
1451 switch (GET_CODE (body))
1452 {
1453 case ASM_OPERANDS:
1454 /* No output operands: return number of input operands. */
1455 return ASM_OPERANDS_INPUT_LENGTH (body);
1456 case SET:
1457 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1458 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1459 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1460 else
1461 return -1;
1462 case PARALLEL:
1463 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1464 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1465 {
1466 /* Multiple output operands, or 1 output plus some clobbers:
1467 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1468 int i;
1469 int n_sets;
1470
1471 /* Count backwards through CLOBBERs to determine number of SETs. */
1472 for (i = XVECLEN (body, 0); i > 0; i--)
1473 {
1474 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1475 break;
1476 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1477 return -1;
1478 }
1479
1480 /* N_SETS is now number of output operands. */
1481 n_sets = i;
1482
1483 /* Verify that all the SETs we have
1484 came from a single original asm_operands insn
1485 (so that invalid combinations are blocked). */
1486 for (i = 0; i < n_sets; i++)
1487 {
1488 rtx elt = XVECEXP (body, 0, i);
1489 if (GET_CODE (elt) != SET)
1490 return -1;
1491 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1492 return -1;
1493 /* If these ASM_OPERANDS rtx's came from different original insns
1494 then they aren't allowed together. */
1495 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1496 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1497 return -1;
1498 }
1499 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1500 + n_sets);
1501 }
1502 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1503 {
1504 /* 0 outputs, but some clobbers:
1505 body is [(asm_operands ...) (clobber (reg ...))...]. */
1506 int i;
1507
1508 /* Make sure all the other parallel things really are clobbers. */
1509 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1510 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1511 return -1;
1512
1513 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1514 }
1515 else
1516 return -1;
1517 default:
1518 return -1;
1519 }
1520}
1521
1522/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1523 copy its operands (both input and output) into the vector OPERANDS,
1524 the locations of the operands within the insn into the vector OPERAND_LOCS,
1525 and the constraints for the operands into CONSTRAINTS.
1526 Write the modes of the operands into MODES.
1527 Return the assembler-template.
1528
1529 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1530 we don't store that info. */
1531
1532const char *
1533decode_asm_operands (body, operands, operand_locs, constraints, modes)
1534 rtx body;
1535 rtx *operands;
1536 rtx **operand_locs;
1537 const char **constraints;
1538 enum machine_mode *modes;
1539{
1540 int i;
1541 int noperands;
1542 const char *template = 0;
1543
1544 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1545 {
1546 rtx asmop = SET_SRC (body);
1547 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1548
1549 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1550
1551 for (i = 1; i < noperands; i++)
1552 {
1553 if (operand_locs)
1554 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1555 if (operands)
1556 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1557 if (constraints)
1558 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1559 if (modes)
1560 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1561 }
1562
1563 /* The output is in the SET.
1564 Its constraint is in the ASM_OPERANDS itself. */
1565 if (operands)
1566 operands[0] = SET_DEST (body);
1567 if (operand_locs)
1568 operand_locs[0] = &SET_DEST (body);
1569 if (constraints)
1570 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1571 if (modes)
1572 modes[0] = GET_MODE (SET_DEST (body));
1573 template = ASM_OPERANDS_TEMPLATE (asmop);
1574 }
1575 else if (GET_CODE (body) == ASM_OPERANDS)
1576 {
1577 rtx asmop = body;
1578 /* No output operands: BODY is (asm_operands ....). */
1579
1580 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1581
1582 /* The input operands are found in the 1st element vector. */
1583 /* Constraints for inputs are in the 2nd element vector. */
1584 for (i = 0; i < noperands; i++)
1585 {
1586 if (operand_locs)
1587 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1588 if (operands)
1589 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1590 if (constraints)
1591 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1592 if (modes)
1593 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1594 }
1595 template = ASM_OPERANDS_TEMPLATE (asmop);
1596 }
1597 else if (GET_CODE (body) == PARALLEL
1598 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1599 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1600 {
1601 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1602 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1603 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1604 int nout = 0; /* Does not include CLOBBERs. */
1605
1606 /* At least one output, plus some CLOBBERs. */
1607
1608 /* The outputs are in the SETs.
1609 Their constraints are in the ASM_OPERANDS itself. */
1610 for (i = 0; i < nparallel; i++)
1611 {
1612 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1613 break; /* Past last SET */
1591
1614
1615 if (operands)
1616 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1617 if (operand_locs)
1618 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1619 if (constraints)
1620 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1621 if (modes)
1622 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1623 nout++;
1624 }
1625
1626 for (i = 0; i < nin; i++)
1627 {
1628 if (operand_locs)
1629 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1630 if (operands)
1631 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1632 if (constraints)
1633 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1634 if (modes)
1635 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1636 }
1637
1638 template = ASM_OPERANDS_TEMPLATE (asmop);
1639 }
1640 else if (GET_CODE (body) == PARALLEL
1641 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1642 {
1643 /* No outputs, but some CLOBBERs. */
1644
1645 rtx asmop = XVECEXP (body, 0, 0);
1646 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1647
1648 for (i = 0; i < nin; i++)
1649 {
1650 if (operand_locs)
1651 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1652 if (operands)
1653 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1654 if (constraints)
1655 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1656 if (modes)
1657 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1658 }
1659
1660 template = ASM_OPERANDS_TEMPLATE (asmop);
1661 }
1662
1663 return template;
1664}
1665
1643/* Check if an asm_operand matches it's constraints.
1666/* Check if an asm_operand matches it's constraints.
1667 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1668
1669int
1670asm_operand_ok (op, constraint)
1671 rtx op;
1672 const char *constraint;
1673{
1674 int result = 0;
1675
1676 /* Use constrain_operands after reload. */
1677 if (reload_completed)
1678 abort ();
1679
1680 while (*constraint)
1681 {
1682 char c = *constraint++;
1683 switch (c)
1684 {
1685 case '=':
1686 case '+':
1687 case '*':
1688 case '%':
1689 case '?':
1690 case '!':
1691 case '#':
1692 case '&':
1693 case ',':
1694 break;
1695
1696 case '0': case '1': case '2': case '3': case '4':
1697 case '5': case '6': case '7': case '8': case '9':
1698 /* For best results, our caller should have given us the
1699 proper matching constraint, but we can't actually fail
1700 the check if they didn't. Indicate that results are
1701 inconclusive. */
1702 while (ISDIGIT (*constraint))
1703 constraint++;
1704 result = -1;
1705 break;
1706
1707 case 'p':
1708 if (address_operand (op, VOIDmode))
1709 return 1;
1710 break;
1711
1712 case 'm':
1713 case 'V': /* non-offsettable */
1714 if (memory_operand (op, VOIDmode))
1715 return 1;
1716 break;
1717
1718 case 'o': /* offsettable */
1719 if (offsettable_nonstrict_memref_p (op))
1720 return 1;
1721 break;
1722
1723 case '<':
1724 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1725 excepting those that expand_call created. Further, on some
1726 machines which do not have generalized auto inc/dec, an inc/dec
1727 is not a memory_operand.
1728
1729 Match any memory and hope things are resolved after reload. */
1730
1731 if (GET_CODE (op) == MEM
1732 && (1
1733 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1711 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1734 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1735 return 1;
1736 break;
1737
1738 case '>':
1739 if (GET_CODE (op) == MEM
1740 && (1
1741 || GET_CODE (XEXP (op, 0)) == PRE_INC
1719 || GET_CODE (XEXP (op, 0)) == POST_INC))
1742 || GET_CODE (XEXP (op, 0)) == POST_INC))
1743 return 1;
1744 break;
1745
1746 case 'E':
1724#ifndef REAL_ARITHMETIC
1725 /* Match any floating double constant, but only if
1726 we can examine the bits of it reliably. */
1727 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1728 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1729 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1730 break;
1731#endif
1732 /* FALLTHRU */
1733
1747 case 'F':
1735 if (GET_CODE (op) == CONST_DOUBLE)
1748 if (GET_CODE (op) == CONST_DOUBLE
1749 || (GET_CODE (op) == CONST_VECTOR
1750 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1751 return 1;
1752 break;
1753
1754 case 'G':
1755 if (GET_CODE (op) == CONST_DOUBLE
1756 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1757 return 1;
1758 break;
1759 case 'H':
1760 if (GET_CODE (op) == CONST_DOUBLE
1761 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1762 return 1;
1763 break;
1764
1765 case 's':
1766 if (GET_CODE (op) == CONST_INT
1767 || (GET_CODE (op) == CONST_DOUBLE
1768 && GET_MODE (op) == VOIDmode))
1769 break;
1770 /* FALLTHRU */
1771
1772 case 'i':
1773 if (CONSTANT_P (op)
1774#ifdef LEGITIMATE_PIC_OPERAND_P
1775 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1776#endif
1777 )
1778 return 1;
1779 break;
1780
1781 case 'n':
1782 if (GET_CODE (op) == CONST_INT
1783 || (GET_CODE (op) == CONST_DOUBLE
1784 && GET_MODE (op) == VOIDmode))
1785 return 1;
1786 break;
1787
1788 case 'I':
1789 if (GET_CODE (op) == CONST_INT
1790 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1791 return 1;
1792 break;
1793 case 'J':
1794 if (GET_CODE (op) == CONST_INT
1795 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1796 return 1;
1797 break;
1798 case 'K':
1799 if (GET_CODE (op) == CONST_INT
1800 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1801 return 1;
1802 break;
1803 case 'L':
1804 if (GET_CODE (op) == CONST_INT
1805 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1806 return 1;
1807 break;
1808 case 'M':
1809 if (GET_CODE (op) == CONST_INT
1810 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1811 return 1;
1812 break;
1813 case 'N':
1814 if (GET_CODE (op) == CONST_INT
1815 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1816 return 1;
1817 break;
1818 case 'O':
1819 if (GET_CODE (op) == CONST_INT
1820 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1821 return 1;
1822 break;
1823 case 'P':
1824 if (GET_CODE (op) == CONST_INT
1825 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1826 return 1;
1827 break;
1828
1829 case 'X':
1830 return 1;
1831
1832 case 'g':
1833 if (general_operand (op, VOIDmode))
1834 return 1;
1835 break;
1836
1837 default:
1838 /* For all other letters, we first check for a register class,
1839 otherwise it is an EXTRA_CONSTRAINT. */
1840 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1841 {
1842 case 'r':
1843 if (GET_MODE (op) == BLKmode)
1844 break;
1845 if (register_operand (op, VOIDmode))
1846 return 1;
1847 }
1848#ifdef EXTRA_CONSTRAINT
1849 if (EXTRA_CONSTRAINT (op, c))
1850 return 1;
1851 if (EXTRA_MEMORY_CONSTRAINT (c))
1852 {
1853 /* Every memory operand can be reloaded to fit. */
1854 if (memory_operand (op, VOIDmode))
1855 return 1;
1856 }
1857 if (EXTRA_ADDRESS_CONSTRAINT (c))
1858 {
1859 /* Every address operand can be reloaded to fit. */
1860 if (address_operand (op, VOIDmode))
1861 return 1;
1862 }
1863#endif
1864 break;
1865 }
1866 }
1867
1868 return result;
1869}
1870
1871/* Given an rtx *P, if it is a sum containing an integer constant term,
1872 return the location (type rtx *) of the pointer to that constant term.
1873 Otherwise, return a null pointer. */
1874
1875rtx *
1876find_constant_term_loc (p)
1877 rtx *p;
1878{
1879 rtx *tem;
1880 enum rtx_code code = GET_CODE (*p);
1881
1882 /* If *P IS such a constant term, P is its location. */
1883
1884 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1885 || code == CONST)
1886 return p;
1887
1888 /* Otherwise, if not a sum, it has no constant term. */
1889
1890 if (GET_CODE (*p) != PLUS)
1891 return 0;
1892
1893 /* If one of the summands is constant, return its location. */
1894
1895 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1896 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1897 return p;
1898
1899 /* Otherwise, check each summand for containing a constant term. */
1900
1901 if (XEXP (*p, 0) != 0)
1902 {
1903 tem = find_constant_term_loc (&XEXP (*p, 0));
1904 if (tem != 0)
1905 return tem;
1906 }
1907
1908 if (XEXP (*p, 1) != 0)
1909 {
1910 tem = find_constant_term_loc (&XEXP (*p, 1));
1911 if (tem != 0)
1912 return tem;
1913 }
1914
1915 return 0;
1916}
1917
1918/* Return 1 if OP is a memory reference
1919 whose address contains no side effects
1920 and remains valid after the addition
1921 of a positive integer less than the
1922 size of the object being referenced.
1923
1924 We assume that the original address is valid and do not check it.
1925
1926 This uses strict_memory_address_p as a subroutine, so
1927 don't use it before reload. */
1928
1929int
1930offsettable_memref_p (op)
1931 rtx op;
1932{
1933 return ((GET_CODE (op) == MEM)
1934 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1935}
1936
1937/* Similar, but don't require a strictly valid mem ref:
1938 consider pseudo-regs valid as index or base regs. */
1939
1940int
1941offsettable_nonstrict_memref_p (op)
1942 rtx op;
1943{
1944 return ((GET_CODE (op) == MEM)
1945 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1946}
1947
1948/* Return 1 if Y is a memory address which contains no side effects
1949 and would remain valid after the addition of a positive integer
1950 less than the size of that mode.
1951
1952 We assume that the original address is valid and do not check it.
1953 We do check that it is valid for narrower modes.
1954
1955 If STRICTP is nonzero, we require a strictly valid address,
1956 for the sake of use in reload.c. */
1957
1958int
1959offsettable_address_p (strictp, mode, y)
1960 int strictp;
1961 enum machine_mode mode;
1962 rtx y;
1963{
1964 enum rtx_code ycode = GET_CODE (y);
1965 rtx z;
1966 rtx y1 = y;
1967 rtx *y2;
1968 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1969 (strictp ? strict_memory_address_p : memory_address_p);
1970 unsigned int mode_sz = GET_MODE_SIZE (mode);
1971
1972 if (CONSTANT_ADDRESS_P (y))
1973 return 1;
1974
1975 /* Adjusting an offsettable address involves changing to a narrower mode.
1976 Make sure that's OK. */
1977
1978 if (mode_dependent_address_p (y))
1979 return 0;
1980
1981 /* ??? How much offset does an offsettable BLKmode reference need?
1982 Clearly that depends on the situation in which it's being used.
1983 However, the current situation in which we test 0xffffffff is
1984 less than ideal. Caveat user. */
1985 if (mode_sz == 0)
1986 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1987
1988 /* If the expression contains a constant term,
1989 see if it remains valid when max possible offset is added. */
1990
1991 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1992 {
1993 int good;
1994
1995 y1 = *y2;
1996 *y2 = plus_constant (*y2, mode_sz - 1);
1997 /* Use QImode because an odd displacement may be automatically invalid
1998 for any wider mode. But it should be valid for a single byte. */
1999 good = (*addressp) (QImode, y);
2000
2001 /* In any case, restore old contents of memory. */
2002 *y2 = y1;
2003 return good;
2004 }
2005
2006 if (GET_RTX_CLASS (ycode) == 'a')
2007 return 0;
2008
2009 /* The offset added here is chosen as the maximum offset that
2010 any instruction could need to add when operating on something
2011 of the specified mode. We assume that if Y and Y+c are
2012 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2013 go inside a LO_SUM here, so we do so as well. */
2014 if (GET_CODE (y) == LO_SUM
2015 && mode != BLKmode
2016 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2017 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2018 plus_constant (XEXP (y, 1), mode_sz - 1));
2019 else
2020 z = plus_constant (y, mode_sz - 1);
2021
2022 /* Use QImode because an odd displacement may be automatically invalid
2023 for any wider mode. But it should be valid for a single byte. */
2024 return (*addressp) (QImode, z);
2025}
2026
2027/* Return 1 if ADDR is an address-expression whose effect depends
2028 on the mode of the memory reference it is used in.
2029
2030 Autoincrement addressing is a typical example of mode-dependence
2031 because the amount of the increment depends on the mode. */
2032
2033int
2034mode_dependent_address_p (addr)
2035 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2036{
2037 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2038 return 0;
2039 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2040 win: ATTRIBUTE_UNUSED_LABEL
2041 return 1;
2042}
2043
2044/* Return 1 if OP is a general operand
2045 other than a memory ref with a mode dependent address. */
2046
2047int
2048mode_independent_operand (op, mode)
2049 enum machine_mode mode;
2050 rtx op;
2051{
2052 rtx addr;
2053
2054 if (! general_operand (op, mode))
2055 return 0;
2056
2057 if (GET_CODE (op) != MEM)
2058 return 1;
2059
2060 addr = XEXP (op, 0);
2061 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2062 return 1;
2063 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2064 lose: ATTRIBUTE_UNUSED_LABEL
2065 return 0;
2066}
2067
2068/* Like extract_insn, but save insn extracted and don't extract again, when
2069 called again for the same insn expecting that recog_data still contain the
2070 valid information. This is used primary by gen_attr infrastructure that
2071 often does extract insn again and again. */
2072void
2073extract_insn_cached (insn)
2074 rtx insn;
2075{
2076 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2077 return;
2078 extract_insn (insn);
2079 recog_data.insn = insn;
2080}
2081/* Do cached extract_insn, constrain_operand and complain about failures.
2082 Used by insn_attrtab. */
2083void
2084extract_constrain_insn_cached (insn)
2085 rtx insn;
2086{
2087 extract_insn_cached (insn);
2088 if (which_alternative == -1
2089 && !constrain_operands (reload_completed))
2090 fatal_insn_not_found (insn);
2091}
2092/* Do cached constrain_operand and complain about failures. */
2093int
2094constrain_operands_cached (strict)
2095 int strict;
2096{
2097 if (which_alternative == -1)
2098 return constrain_operands (strict);
2099 else
2100 return 1;
2101}
2102
2103/* Analyze INSN and fill in recog_data. */
2104
2105void
2106extract_insn (insn)
2107 rtx insn;
2108{
2109 int i;
2110 int icode;
2111 int noperands;
2112 rtx body = PATTERN (insn);
2113
2114 recog_data.insn = NULL;
2115 recog_data.n_operands = 0;
2116 recog_data.n_alternatives = 0;
2117 recog_data.n_dups = 0;
2118 which_alternative = -1;
2119
2120 switch (GET_CODE (body))
2121 {
2122 case USE:
2123 case CLOBBER:
2124 case ASM_INPUT:
2125 case ADDR_VEC:
2126 case ADDR_DIFF_VEC:
2127 return;
2128
2129 case SET:
2130 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2131 goto asm_insn;
2132 else
2133 goto normal_insn;
2134 case PARALLEL:
2135 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2136 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2137 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2138 goto asm_insn;
2139 else
2140 goto normal_insn;
2141 case ASM_OPERANDS:
2142 asm_insn:
2143 recog_data.n_operands = noperands = asm_noperands (body);
2144 if (noperands >= 0)
2145 {
2146 /* This insn is an `asm' with operands. */
2147
2148 /* expand_asm_operands makes sure there aren't too many operands. */
2149 if (noperands > MAX_RECOG_OPERANDS)
2150 abort ();
2151
2152 /* Now get the operand values and constraints out of the insn. */
2153 decode_asm_operands (body, recog_data.operand,
2154 recog_data.operand_loc,
2155 recog_data.constraints,
2156 recog_data.operand_mode);
2157 if (noperands > 0)
2158 {
2159 const char *p = recog_data.constraints[0];
2160 recog_data.n_alternatives = 1;
2161 while (*p)
2162 recog_data.n_alternatives += (*p++ == ',');
2163 }
2164 break;
2165 }
2166 fatal_insn_not_found (insn);
2167
2168 default:
2169 normal_insn:
2170 /* Ordinary insn: recognize it, get the operands via insn_extract
2171 and get the constraints. */
2172
2173 icode = recog_memoized (insn);
2174 if (icode < 0)
2175 fatal_insn_not_found (insn);
2176
2177 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2178 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2179 recog_data.n_dups = insn_data[icode].n_dups;
2180
2181 insn_extract (insn);
2182
2183 for (i = 0; i < noperands; i++)
2184 {
2185 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2186 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2187 /* VOIDmode match_operands gets mode from their real operand. */
2188 if (recog_data.operand_mode[i] == VOIDmode)
2189 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2190 }
2191 }
2192 for (i = 0; i < noperands; i++)
2193 recog_data.operand_type[i]
2194 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2195 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2196 : OP_IN);
2197
2198 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2199 abort ();
2200}
2201
2202/* After calling extract_insn, you can use this function to extract some
2203 information from the constraint strings into a more usable form.
2204 The collected data is stored in recog_op_alt. */
2205void
2206preprocess_constraints ()
2207{
2208 int i;
2209
2210 memset (recog_op_alt, 0, sizeof recog_op_alt);
2211 for (i = 0; i < recog_data.n_operands; i++)
2212 {
2213 int j;
2214 struct operand_alternative *op_alt;
2215 const char *p = recog_data.constraints[i];
2216
2217 op_alt = recog_op_alt[i];
2218
2219 for (j = 0; j < recog_data.n_alternatives; j++)
2220 {
2221 op_alt[j].class = NO_REGS;
2222 op_alt[j].constraint = p;
2223 op_alt[j].matches = -1;
2224 op_alt[j].matched = -1;
2225
2226 if (*p == '\0' || *p == ',')
2227 {
2228 op_alt[j].anything_ok = 1;
2229 continue;
2230 }
2231
2232 for (;;)
2233 {
2234 char c = *p++;
2235 if (c == '#')
2236 do
2237 c = *p++;
2238 while (c != ',' && c != '\0');
2239 if (c == ',' || c == '\0')
2240 break;
2241
2242 switch (c)
2243 {
2244 case '=': case '+': case '*': case '%':
2245 case 'E': case 'F': case 'G': case 'H':
2246 case 's': case 'i': case 'n':
2247 case 'I': case 'J': case 'K': case 'L':
2248 case 'M': case 'N': case 'O': case 'P':
2249 /* These don't say anything we care about. */
2250 break;
2251
2252 case '?':
2253 op_alt[j].reject += 6;
2254 break;
2255 case '!':
2256 op_alt[j].reject += 600;
2257 break;
2258 case '&':
2259 op_alt[j].earlyclobber = 1;
2233 break;
2260 break;
2261
2262 case '0': case '1': case '2': case '3': case '4':
2263 case '5': case '6': case '7': case '8': case '9':
2264 {
2265 char *end;
2266 op_alt[j].matches = strtoul (p - 1, &end, 10);
2267 recog_op_alt[op_alt[j].matches][j].matched = i;
2268 p = end;
2269 }
2270 break;
2271
2272 case 'm':
2273 op_alt[j].memory_ok = 1;
2274 break;
2275 case '<':
2276 op_alt[j].decmem_ok = 1;
2277 break;
2278 case '>':
2279 op_alt[j].incmem_ok = 1;
2280 break;
2281 case 'V':
2282 op_alt[j].nonoffmem_ok = 1;
2283 break;
2284 case 'o':
2285 op_alt[j].offmem_ok = 1;
2286 break;
2287 case 'X':
2288 op_alt[j].anything_ok = 1;
2289 break;
2290
2291 case 'p':
2292 op_alt[j].is_address = 1;
2293 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2294 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2295 break;
2296
2297 case 'g': case 'r':
2298 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2299 break;
2300
2301 default:
2302 if (EXTRA_MEMORY_CONSTRAINT (c))
2303 {
2304 op_alt[j].memory_ok = 1;
2305 break;
2306 }
2307 if (EXTRA_ADDRESS_CONSTRAINT (c))
2308 {
2309 op_alt[j].is_address = 1;
2310 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2311 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2312 break;
2313 }
2314
2315 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2316 break;
2317 }
2318 }
2319 }
2320 }
2321}
2282
2322
2323/* Check the operands of an insn against the insn's operand constraints
2324 and return 1 if they are valid.
2325 The information about the insn's operands, constraints, operand modes
2326 etc. is obtained from the global variables set up by extract_insn.
2327
2328 WHICH_ALTERNATIVE is set to a number which indicates which
2329 alternative of constraints was matched: 0 for the first alternative,
2330 1 for the next, etc.
2331
2332 In addition, when two operands are match
2333 and it happens that the output operand is (reg) while the
2334 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2335 make the output operand look like the input.
2336 This is because the output operand is the one the template will print.
2337
2338 This is used in final, just before printing the assembler code and by
2339 the routines that determine an insn's attribute.
2340
2301 If STRICT is a positive non-zero value, it means that we have been
2341 If STRICT is a positive nonzero value, it means that we have been
2342 called after reload has been completed. In that case, we must
2343 do all checks strictly. If it is zero, it means that we have been called
2344 before reload has completed. In that case, we first try to see if we can
2345 find an alternative that matches strictly. If not, we try again, this
2346 time assuming that reload will fix up the insn. This provides a "best
2347 guess" for the alternative and is used to compute attributes of insns prior
2348 to reload. A negative value of STRICT is used for this internal call. */
2349
2350struct funny_match
2351{
2352 int this, other;
2353};
2354
2355int
2356constrain_operands (strict)
2357 int strict;
2358{
2359 const char *constraints[MAX_RECOG_OPERANDS];
2360 int matching_operands[MAX_RECOG_OPERANDS];
2361 int earlyclobber[MAX_RECOG_OPERANDS];
2362 int c;
2363
2364 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2365 int funny_match_index;
2366
2367 which_alternative = 0;
2368 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2369 return 1;
2370
2371 for (c = 0; c < recog_data.n_operands; c++)
2372 {
2373 constraints[c] = recog_data.constraints[c];
2374 matching_operands[c] = -1;
2375 }
2376
2377 do
2378 {
2379 int opno;
2380 int lose = 0;
2381 funny_match_index = 0;
2382
2383 for (opno = 0; opno < recog_data.n_operands; opno++)
2384 {
2385 rtx op = recog_data.operand[opno];
2386 enum machine_mode mode = GET_MODE (op);
2387 const char *p = constraints[opno];
2388 int offset = 0;
2389 int win = 0;
2390 int val;
2391
2392 earlyclobber[opno] = 0;
2393
2394 /* A unary operator may be accepted by the predicate, but it
2395 is irrelevant for matching constraints. */
2396 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2397 op = XEXP (op, 0);
2398
2399 if (GET_CODE (op) == SUBREG)
2400 {
2401 if (GET_CODE (SUBREG_REG (op)) == REG
2402 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2403 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2404 GET_MODE (SUBREG_REG (op)),
2405 SUBREG_BYTE (op),
2406 GET_MODE (op));
2407 op = SUBREG_REG (op);
2408 }
2409
2410 /* An empty constraint or empty alternative
2411 allows anything which matched the pattern. */
2412 if (*p == 0 || *p == ',')
2413 win = 1;
2414
2415 while (*p && (c = *p++) != ',')
2416 switch (c)
2417 {
2418 case '?': case '!': case '*': case '%':
2419 case '=': case '+':
2420 break;
2421
2422 case '#':
2423 /* Ignore rest of this alternative as far as
2424 constraint checking is concerned. */
2425 while (*p && *p != ',')
2426 p++;
2427 break;
2428
2429 case '&':
2430 earlyclobber[opno] = 1;
2431 break;
2432
2433 case '0': case '1': case '2': case '3': case '4':
2434 case '5': case '6': case '7': case '8': case '9':
2435 {
2436 /* This operand must be the same as a previous one.
2437 This kind of constraint is used for instructions such
2438 as add when they take only two operands.
2439
2440 Note that the lower-numbered operand is passed first.
2441
2442 If we are not testing strictly, assume that this
2443 constraint will be satisfied. */
2444
2445 char *end;
2446 int match;
2447
2448 match = strtoul (p - 1, &end, 10);
2449 p = end;
2450
2451 if (strict < 0)
2452 val = 1;
2453 else
2454 {
2455 rtx op1 = recog_data.operand[match];
2456 rtx op2 = recog_data.operand[opno];
2457
2458 /* A unary operator may be accepted by the predicate,
2459 but it is irrelevant for matching constraints. */
2460 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2461 op1 = XEXP (op1, 0);
2462 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2463 op2 = XEXP (op2, 0);
2464
2465 val = operands_match_p (op1, op2);
2466 }
2467
2468 matching_operands[opno] = match;
2469 matching_operands[match] = opno;
2470
2471 if (val != 0)
2472 win = 1;
2473
2474 /* If output is *x and input is *--x, arrange later
2475 to change the output to *--x as well, since the
2476 output op is the one that will be printed. */
2477 if (val == 2 && strict > 0)
2478 {
2479 funny_match[funny_match_index].this = opno;
2480 funny_match[funny_match_index++].other = match;
2481 }
2482 }
2483 break;
2484
2485 case 'p':
2486 /* p is used for address_operands. When we are called by
2487 gen_reload, no one will have checked that the address is
2488 strictly valid, i.e., that all pseudos requiring hard regs
2489 have gotten them. */
2490 if (strict <= 0
2491 || (strict_memory_address_p (recog_data.operand_mode[opno],
2492 op)))
2493 win = 1;
2494 break;
2495
2496 /* No need to check general_operand again;
2497 it was done in insn-recog.c. */
2498 case 'g':
2499 /* Anything goes unless it is a REG and really has a hard reg
2500 but the hard reg is not in the class GENERAL_REGS. */
2501 if (strict < 0
2502 || GENERAL_REGS == ALL_REGS
2503 || GET_CODE (op) != REG
2504 || (reload_in_progress
2505 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2506 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2507 win = 1;
2508 break;
2509
2510 case 'X':
2511 /* This is used for a MATCH_SCRATCH in the cases when
2512 we don't actually need anything. So anything goes
2513 any time. */
2514 win = 1;
2515 break;
2516
2517 case 'm':
2518 if (GET_CODE (op) == MEM
2519 /* Before reload, accept what reload can turn into mem. */
2520 || (strict < 0 && CONSTANT_P (op))
2521 /* During reload, accept a pseudo */
2522 || (reload_in_progress && GET_CODE (op) == REG
2523 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2524 win = 1;
2525 break;
2526
2527 case '<':
2528 if (GET_CODE (op) == MEM
2529 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2530 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2531 win = 1;
2532 break;
2533
2534 case '>':
2535 if (GET_CODE (op) == MEM
2536 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2537 || GET_CODE (XEXP (op, 0)) == POST_INC))
2538 win = 1;
2539 break;
2540
2541 case 'E':
2502#ifndef REAL_ARITHMETIC
2503 /* Match any CONST_DOUBLE, but only if
2504 we can examine the bits of it reliably. */
2505 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2506 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2507 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2508 break;
2509#endif
2510 if (GET_CODE (op) == CONST_DOUBLE)
2511 win = 1;
2512 break;
2513
2542 case 'F':
2515 if (GET_CODE (op) == CONST_DOUBLE)
2543 if (GET_CODE (op) == CONST_DOUBLE
2544 || (GET_CODE (op) == CONST_VECTOR
2545 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2546 win = 1;
2547 break;
2548
2549 case 'G':
2550 case 'H':
2551 if (GET_CODE (op) == CONST_DOUBLE
2552 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2553 win = 1;
2554 break;
2555
2556 case 's':
2557 if (GET_CODE (op) == CONST_INT
2558 || (GET_CODE (op) == CONST_DOUBLE
2559 && GET_MODE (op) == VOIDmode))
2560 break;
2561 case 'i':
2562 if (CONSTANT_P (op))
2563 win = 1;
2564 break;
2565
2566 case 'n':
2567 if (GET_CODE (op) == CONST_INT
2568 || (GET_CODE (op) == CONST_DOUBLE
2569 && GET_MODE (op) == VOIDmode))
2570 win = 1;
2571 break;
2572
2573 case 'I':
2574 case 'J':
2575 case 'K':
2576 case 'L':
2577 case 'M':
2578 case 'N':
2579 case 'O':
2580 case 'P':
2581 if (GET_CODE (op) == CONST_INT
2582 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2583 win = 1;
2584 break;
2585
2586 case 'V':
2587 if (GET_CODE (op) == MEM
2588 && ((strict > 0 && ! offsettable_memref_p (op))
2589 || (strict < 0
2590 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2591 || (reload_in_progress
2592 && !(GET_CODE (op) == REG
2593 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2594 win = 1;
2595 break;
2596
2597 case 'o':
2598 if ((strict > 0 && offsettable_memref_p (op))
2599 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2600 /* Before reload, accept what reload can handle. */
2601 || (strict < 0
2602 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2603 /* During reload, accept a pseudo */
2604 || (reload_in_progress && GET_CODE (op) == REG
2605 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2606 win = 1;
2607 break;
2608
2609 default:
2610 {
2611 enum reg_class class;
2612
2613 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2614 if (class != NO_REGS)
2615 {
2616 if (strict < 0
2617 || (strict == 0
2618 && GET_CODE (op) == REG
2619 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2620 || (strict == 0 && GET_CODE (op) == SCRATCH)
2621 || (GET_CODE (op) == REG
2622 && reg_fits_class_p (op, class, offset, mode)))
2623 win = 1;
2624 }
2625#ifdef EXTRA_CONSTRAINT
2626 else if (EXTRA_CONSTRAINT (op, c))
2627 win = 1;
2628
2629 if (EXTRA_MEMORY_CONSTRAINT (c))
2630 {
2631 /* Every memory operand can be reloaded to fit. */
2632 if (strict < 0 && GET_CODE (op) == MEM)
2633 win = 1;
2634
2635 /* Before reload, accept what reload can turn into mem. */
2636 if (strict < 0 && CONSTANT_P (op))
2637 win = 1;
2638
2639 /* During reload, accept a pseudo */
2640 if (reload_in_progress && GET_CODE (op) == REG
2641 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2642 win = 1;
2643 }
2644 if (EXTRA_ADDRESS_CONSTRAINT (c))
2645 {
2646 /* Every address operand can be reloaded to fit. */
2647 if (strict < 0)
2648 win = 1;
2649 }
2650#endif
2651 break;
2652 }
2653 }
2654
2655 constraints[opno] = p;
2656 /* If this operand did not win somehow,
2657 this alternative loses. */
2658 if (! win)
2659 lose = 1;
2660 }
2661 /* This alternative won; the operands are ok.
2662 Change whichever operands this alternative says to change. */
2663 if (! lose)
2664 {
2665 int opno, eopno;
2666
2667 /* See if any earlyclobber operand conflicts with some other
2668 operand. */
2669
2670 if (strict > 0)
2671 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2672 /* Ignore earlyclobber operands now in memory,
2673 because we would often report failure when we have
2674 two memory operands, one of which was formerly a REG. */
2675 if (earlyclobber[eopno]
2676 && GET_CODE (recog_data.operand[eopno]) == REG)
2677 for (opno = 0; opno < recog_data.n_operands; opno++)
2678 if ((GET_CODE (recog_data.operand[opno]) == MEM
2679 || recog_data.operand_type[opno] != OP_OUT)
2680 && opno != eopno
2681 /* Ignore things like match_operator operands. */
2682 && *recog_data.constraints[opno] != 0
2683 && ! (matching_operands[opno] == eopno
2684 && operands_match_p (recog_data.operand[opno],
2685 recog_data.operand[eopno]))
2686 && ! safe_from_earlyclobber (recog_data.operand[opno],
2687 recog_data.operand[eopno]))
2688 lose = 1;
2689
2690 if (! lose)
2691 {
2692 while (--funny_match_index >= 0)
2693 {
2694 recog_data.operand[funny_match[funny_match_index].other]
2695 = recog_data.operand[funny_match[funny_match_index].this];
2696 }
2697
2698 return 1;
2699 }
2700 }
2701
2702 which_alternative++;
2703 }
2704 while (which_alternative < recog_data.n_alternatives);
2705
2706 which_alternative = -1;
2707 /* If we are about to reject this, but we are not to test strictly,
2708 try a very loose test. Only return failure if it fails also. */
2709 if (strict == 0)
2710 return constrain_operands (-1);
2711 else
2712 return 0;
2713}
2714
2715/* Return 1 iff OPERAND (assumed to be a REG rtx)
2716 is a hard reg in class CLASS when its regno is offset by OFFSET
2717 and changed to mode MODE.
2718 If REG occupies multiple hard regs, all of them must be in CLASS. */
2719
2720int
2721reg_fits_class_p (operand, class, offset, mode)
2722 rtx operand;
2723 enum reg_class class;
2724 int offset;
2725 enum machine_mode mode;
2726{
2727 int regno = REGNO (operand);
2728 if (regno < FIRST_PSEUDO_REGISTER
2729 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2730 regno + offset))
2731 {
2732 int sr;
2733 regno += offset;
2734 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2735 sr > 0; sr--)
2736 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2737 regno + sr))
2738 break;
2739 return sr == 0;
2740 }
2741
2742 return 0;
2743}
2744
2745/* Split single instruction. Helper function for split_all_insns.
2746 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2747static rtx
2748split_insn (insn)
2749 rtx insn;
2750{
2751 rtx set;
2752 if (!INSN_P (insn))
2753 ;
2754 /* Don't split no-op move insns. These should silently
2755 disappear later in final. Splitting such insns would
2756 break the code that handles REG_NO_CONFLICT blocks. */
2757
2758 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2759 {
2760 /* Nops get in the way while scheduling, so delete them
2761 now if register allocation has already been done. It
2762 is too risky to try to do this before register
2763 allocation, and there are unlikely to be very many
2764 nops then anyways. */
2765 if (reload_completed)
2714 {
2715 PUT_CODE (insn, NOTE);
2716 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2717 NOTE_SOURCE_FILE (insn) = 0;
2718 }
2766 delete_insn_and_edges (insn);
2767 }
2768 else
2769 {
2770 /* Split insns here to get max fine-grain parallelism. */
2771 rtx first = PREV_INSN (insn);
2772 rtx last = try_split (PATTERN (insn), insn, 1);
2773
2774 if (last != insn)
2775 {
2776 /* try_split returns the NOTE that INSN became. */
2777 PUT_CODE (insn, NOTE);
2778 NOTE_SOURCE_FILE (insn) = 0;
2779 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2780
2781 /* ??? Coddle to md files that generate subregs in post-
2734 reload splitters instead of computing the proper
2782 reload splitters instead of computing the proper
2783 hard register. */
2784 if (reload_completed && first != last)
2785 {
2786 first = NEXT_INSN (first);
2787 while (1)
2788 {
2789 if (INSN_P (first))
2790 cleanup_subreg_operands (first);
2791 if (first == last)
2792 break;
2793 first = NEXT_INSN (first);
2794 }
2795 }
2796 return last;
2797 }
2798 }
2799 return NULL_RTX;
2800}
2801/* Split all insns in the function. If UPD_LIFE, update life info after. */
2802
2803void
2804split_all_insns (upd_life)
2805 int upd_life;
2806{
2807 sbitmap blocks;
2760 int changed;
2761 int i;
2808 bool changed;
2809 basic_block bb;
2810
2763 blocks = sbitmap_alloc (n_basic_blocks);
2811 blocks = sbitmap_alloc (last_basic_block);
2812 sbitmap_zero (blocks);
2765 changed = 0;
2813 changed = false;
2814
2767 for (i = n_basic_blocks - 1; i >= 0; --i)
2815 FOR_EACH_BB_REVERSE (bb)
2816 {
2769 basic_block bb = BASIC_BLOCK (i);
2817 rtx insn, next;
2818 bool finish = false;
2819
2772 for (insn = bb->head; insn ; insn = next)
2820 for (insn = bb->head; !finish ; insn = next)
2821 {
2822 rtx last;
2823
2824 /* Can't use `next_real_insn' because that might go across
2825 CODE_LABELS and short-out basic blocks. */
2826 next = NEXT_INSN (insn);
2827 finish = (insn == bb->end);
2828 last = split_insn (insn);
2829 if (last)
2830 {
2831 /* The split sequence may include barrier, but the
2832 BB boundary we are interested in will be set to previous
2833 one. */
2834
2835 while (GET_CODE (last) == BARRIER)
2836 last = PREV_INSN (last);
2788 SET_BIT (blocks, i);
2789 changed = 1;
2837 SET_BIT (blocks, bb->index);
2838 changed = true;
2839 insn = last;
2840 }
2792
2793 if (insn == bb->end)
2794 break;
2841 }
2796
2797 if (insn == NULL)
2798 abort ();
2842 }
2843
2844 if (changed)
2845 {
2846 int old_last_basic_block = last_basic_block;
2847
2848 find_many_sub_basic_blocks (blocks);
2849
2850 if (old_last_basic_block != last_basic_block && upd_life)
2851 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2852 }
2853
2854 if (changed && upd_life)
2807 {
2808 count_or_remove_death_notes (blocks, 1);
2809 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2810 }
2855 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2856 PROP_DEATH_NOTES | PROP_REG_INFO);
2857
2858#ifdef ENABLE_CHECKING
2859 verify_flow_info ();
2860#endif
2861
2862 sbitmap_free (blocks);
2863}
2864
2818/* Same as split_all_insns, but do not expect CFG to be available.
2865/* Same as split_all_insns, but do not expect CFG to be available.
2866 Used by machine depedent reorg passes. */
2867
2868void
2869split_all_insns_noflow ()
2870{
2871 rtx next, insn;
2872
2873 for (insn = get_insns (); insn; insn = next)
2874 {
2875 next = NEXT_INSN (insn);
2876 split_insn (insn);
2877 }
2878 return;
2879}
2880
2881#ifdef HAVE_peephole2
2882struct peep2_insn_data
2883{
2884 rtx insn;
2885 regset live_before;
2886};
2887
2888static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2889static int peep2_current;
2890
2891/* A non-insn marker indicating the last insn of the block.
2892 The live_before regset for this element is correct, indicating
2893 global_live_at_end for the block. */
2894#define PEEP2_EOB pc_rtx
2895
2896/* Return the Nth non-note insn after `current', or return NULL_RTX if it
2897 does not exist. Used by the recognizer to find the next insn to match
2898 in a multi-insn pattern. */
2899
2900rtx
2901peep2_next_insn (n)
2902 int n;
2903{
2904 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2905 abort ();
2906
2907 n += peep2_current;
2908 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2909 n -= MAX_INSNS_PER_PEEP2 + 1;
2910
2911 if (peep2_insn_data[n].insn == PEEP2_EOB)
2912 return NULL_RTX;
2913 return peep2_insn_data[n].insn;
2914}
2915
2916/* Return true if REGNO is dead before the Nth non-note insn
2917 after `current'. */
2918
2919int
2920peep2_regno_dead_p (ofs, regno)
2921 int ofs;
2922 int regno;
2923{
2924 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2925 abort ();
2926
2927 ofs += peep2_current;
2928 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2929 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2930
2931 if (peep2_insn_data[ofs].insn == NULL_RTX)
2932 abort ();
2933
2934 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2935}
2936
2937/* Similarly for a REG. */
2938
2939int
2940peep2_reg_dead_p (ofs, reg)
2941 int ofs;
2942 rtx reg;
2943{
2944 int regno, n;
2945
2946 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2947 abort ();
2948
2949 ofs += peep2_current;
2950 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2951 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2952
2953 if (peep2_insn_data[ofs].insn == NULL_RTX)
2954 abort ();
2955
2956 regno = REGNO (reg);
2957 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2958 while (--n >= 0)
2959 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2960 return 0;
2961 return 1;
2962}
2963
2964/* Try to find a hard register of mode MODE, matching the register class in
2965 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2966 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2967 in which case the only condition is that the register must be available
2968 before CURRENT_INSN.
2969 Registers that already have bits set in REG_SET will not be considered.
2970
2971 If an appropriate register is available, it will be returned and the
2972 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2973 returned. */
2974
2975rtx
2976peep2_find_free_register (from, to, class_str, mode, reg_set)
2977 int from, to;
2978 const char *class_str;
2979 enum machine_mode mode;
2980 HARD_REG_SET *reg_set;
2981{
2982 static int search_ofs;
2983 enum reg_class class;
2984 HARD_REG_SET live;
2985 int i;
2986
2987 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2988 abort ();
2989
2990 from += peep2_current;
2991 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2992 from -= MAX_INSNS_PER_PEEP2 + 1;
2993 to += peep2_current;
2994 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2995 to -= MAX_INSNS_PER_PEEP2 + 1;
2996
2997 if (peep2_insn_data[from].insn == NULL_RTX)
2998 abort ();
2999 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3000
3001 while (from != to)
3002 {
3003 HARD_REG_SET this_live;
3004
3005 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3006 from = 0;
3007 if (peep2_insn_data[from].insn == NULL_RTX)
3008 abort ();
3009 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3010 IOR_HARD_REG_SET (live, this_live);
3011 }
3012
3013 class = (class_str[0] == 'r' ? GENERAL_REGS
3014 : REG_CLASS_FROM_LETTER (class_str[0]));
3015
3016 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3017 {
3018 int raw_regno, regno, success, j;
3019
3020 /* Distribute the free registers as much as possible. */
3021 raw_regno = search_ofs + i;
3022 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3023 raw_regno -= FIRST_PSEUDO_REGISTER;
3024#ifdef REG_ALLOC_ORDER
3025 regno = reg_alloc_order[raw_regno];
3026#else
3027 regno = raw_regno;
3028#endif
3029
3030 /* Don't allocate fixed registers. */
3031 if (fixed_regs[regno])
3032 continue;
3033 /* Make sure the register is of the right class. */
3034 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3035 continue;
3036 /* And can support the mode we need. */
3037 if (! HARD_REGNO_MODE_OK (regno, mode))
3038 continue;
3039 /* And that we don't create an extra save/restore. */
3040 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3041 continue;
3042 /* And we don't clobber traceback for noreturn functions. */
3043 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3044 && (! reload_completed || frame_pointer_needed))
3045 continue;
3046
3047 success = 1;
3048 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3049 {
3050 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3051 || TEST_HARD_REG_BIT (live, regno + j))
3052 {
3053 success = 0;
3054 break;
3055 }
3056 }
3057 if (success)
3058 {
3059 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3060 SET_HARD_REG_BIT (*reg_set, regno + j);
3061
3062 /* Start the next search with the next register. */
3063 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3064 raw_regno = 0;
3065 search_ofs = raw_regno;
3066
3067 return gen_rtx_REG (mode, regno);
3068 }
3069 }
3070
3071 search_ofs = 0;
3072 return NULL_RTX;
3073}
3074
3075/* Perform the peephole2 optimization pass. */
3076
3077void
3078peephole2_optimize (dump_file)
3079 FILE *dump_file ATTRIBUTE_UNUSED;
3080{
3081 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3082 rtx insn, prev;
3083 regset live;
3037 int i, b;
3084 int i;
3085 basic_block bb;
3086#ifdef HAVE_conditional_execution
3087 sbitmap blocks;
3088 bool changed;
3089#endif
3090 bool do_cleanup_cfg = false;
3091 bool do_rebuild_jump_labels = false;
3092
3093 /* Initialize the regsets we're going to use. */
3094 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3095 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3096 live = INITIALIZE_REG_SET (rs_heads[i]);
3097
3098#ifdef HAVE_conditional_execution
3051 blocks = sbitmap_alloc (n_basic_blocks);
3099 blocks = sbitmap_alloc (last_basic_block);
3100 sbitmap_zero (blocks);
3101 changed = false;
3102#else
3103 count_or_remove_death_notes (NULL, 1);
3104#endif
3105
3058 for (b = n_basic_blocks - 1; b >= 0; --b)
3106 FOR_EACH_BB_REVERSE (bb)
3107 {
3060 basic_block bb = BASIC_BLOCK (b);
3108 struct propagate_block_info *pbi;
3109
3110 /* Indicate that all slots except the last holds invalid data. */
3111 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3112 peep2_insn_data[i].insn = NULL_RTX;
3113
3114 /* Indicate that the last slot contains live_after data. */
3115 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3116 peep2_current = MAX_INSNS_PER_PEEP2;
3117
3118 /* Start up propagation. */
3119 COPY_REG_SET (live, bb->global_live_at_end);
3120 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3121
3122#ifdef HAVE_conditional_execution
3123 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3124#else
3125 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3126#endif
3127
3128 for (insn = bb->end; ; insn = prev)
3129 {
3130 prev = PREV_INSN (insn);
3131 if (INSN_P (insn))
3132 {
3133 rtx try, before_try, x;
3134 int match_len;
3135 rtx note;
3136 bool was_call = false;
3137
3138 /* Record this insn. */
3139 if (--peep2_current < 0)
3140 peep2_current = MAX_INSNS_PER_PEEP2;
3141 peep2_insn_data[peep2_current].insn = insn;
3142 propagate_one_insn (pbi, insn);
3143 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3144
3145 /* Match the peephole. */
3146 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3147 if (try != NULL)
3148 {
3149 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3150 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3151 cfg-related call notes. */
3152 for (i = 0; i <= match_len; ++i)
3153 {
3106 int j, k;
3154 int j;
3155 rtx old_insn, new_insn, note;
3156
3157 j = i + peep2_current;
3158 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3159 j -= MAX_INSNS_PER_PEEP2 + 1;
3160 old_insn = peep2_insn_data[j].insn;
3161 if (GET_CODE (old_insn) != CALL_INSN)
3162 continue;
3163 was_call = true;
3164
3116 new_insn = NULL_RTX;
3117 if (GET_CODE (try) == SEQUENCE)
3118 for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
3119 {
3120 rtx x = XVECEXP (try, 0, k);
3121 if (GET_CODE (x) == CALL_INSN)
3122 {
3123 new_insn = x;
3124 break;
3125 }
3126 }
3127 else if (GET_CODE (try) == CALL_INSN)
3128 new_insn = try;
3129 if (! new_insn)
3165 new_insn = try;
3166 while (new_insn != NULL_RTX)
3167 {
3168 if (GET_CODE (new_insn) == CALL_INSN)
3169 break;
3170 new_insn = NEXT_INSN (new_insn);
3171 }
3172
3173 if (new_insn == NULL_RTX)
3174 abort ();
3175
3176 CALL_INSN_FUNCTION_USAGE (new_insn)
3177 = CALL_INSN_FUNCTION_USAGE (old_insn);
3178
3179 for (note = REG_NOTES (old_insn);
3180 note;
3181 note = XEXP (note, 1))
3182 switch (REG_NOTE_KIND (note))
3183 {
3184 case REG_NORETURN:
3185 case REG_SETJMP:
3186 case REG_ALWAYS_RETURN:
3187 REG_NOTES (new_insn)
3188 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3189 XEXP (note, 0),
3190 REG_NOTES (new_insn));
3191 default:
3192 /* Discard all other reg notes. */
3193 break;
3194 }
3195
3196 /* Croak if there is another call in the sequence. */
3197 while (++i <= match_len)
3198 {
3199 j = i + peep2_current;
3200 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3201 j -= MAX_INSNS_PER_PEEP2 + 1;
3202 old_insn = peep2_insn_data[j].insn;
3203 if (GET_CODE (old_insn) == CALL_INSN)
3204 abort ();
3205 }
3206 break;
3207 }
3208
3209 i = match_len + peep2_current;
3210 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3211 i -= MAX_INSNS_PER_PEEP2 + 1;
3212
3169 note = find_reg_note (peep2_insn_data[i].insn,
3213 note = find_reg_note (peep2_insn_data[i].insn,
3214 REG_EH_REGION, NULL_RTX);
3215
3216 /* Replace the old sequence with the new. */
3173 try = emit_insn_after (try, peep2_insn_data[i].insn);
3217 try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
3218 INSN_SCOPE (peep2_insn_data[i].insn));
3219 before_try = PREV_INSN (insn);
3220 delete_insn_chain (insn, peep2_insn_data[i].insn);
3221
3222 /* Re-insert the EH_REGION notes. */
3178 if (note)
3223 if (note || (was_call && nonlocal_goto_handler_labels))
3224 {
3225 edge eh_edge;
3226
3227 for (eh_edge = bb->succ; eh_edge
3228 ; eh_edge = eh_edge->succ_next)
3184 if (eh_edge->flags & EDGE_EH)
3229 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3230 break;
3231
3232 for (x = try ; x != before_try ; x = PREV_INSN (x))
3233 if (GET_CODE (x) == CALL_INSN
3234 || (flag_non_call_exceptions
3235 && may_trap_p (PATTERN (x))
3236 && !find_reg_note (x, REG_EH_REGION, NULL)))
3237 {
3193 REG_NOTES (x)
3194 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3195 XEXP (note, 0),
3196 REG_NOTES (x));
3238 if (note)
3239 REG_NOTES (x)
3240 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3241 XEXP (note, 0),
3242 REG_NOTES (x));
3243
3244 if (x != bb->end && eh_edge)
3245 {
3246 edge nfte, nehe;
3247 int flags;
3248
3249 nfte = split_block (bb, x);
3204 flags = EDGE_EH | EDGE_ABNORMAL;
3250 flags = (eh_edge->flags
3251 & (EDGE_EH | EDGE_ABNORMAL));
3252 if (GET_CODE (x) == CALL_INSN)
3253 flags |= EDGE_ABNORMAL_CALL;
3254 nehe = make_edge (nfte->src, eh_edge->dest,
3255 flags);
3256
3257 nehe->probability = eh_edge->probability;
3258 nfte->probability
3259 = REG_BR_PROB_BASE - nehe->probability;
3260
3261 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3262#ifdef HAVE_conditional_execution
3263 SET_BIT (blocks, nfte->dest->index);
3264 changed = true;
3265#endif
3266 bb = nfte->src;
3267 eh_edge = nehe;
3268 }
3269 }
3270
3271 /* Converting possibly trapping insn to non-trapping is
3272 possible. Zap dummy outgoing edges. */
3273 do_cleanup_cfg |= purge_dead_edges (bb);
3274 }
3275
3276#ifdef HAVE_conditional_execution
3277 /* With conditional execution, we cannot back up the
3278 live information so easily, since the conditional
3279 death data structures are not so self-contained.
3280 So record that we've made a modification to this
3281 block and update life information at the end. */
3235 SET_BIT (blocks, b);
3282 SET_BIT (blocks, bb->index);
3283 changed = true;
3284
3285 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3286 peep2_insn_data[i].insn = NULL_RTX;
3287 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3288#else
3289 /* Back up lifetime information past the end of the
3290 newly created sequence. */
3291 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3292 i = 0;
3293 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3294
3295 /* Update life information for the new sequence. */
3296 x = try;
3297 do
3298 {
3299 if (INSN_P (x))
3300 {
3301 if (--i < 0)
3302 i = MAX_INSNS_PER_PEEP2;
3303 peep2_insn_data[i].insn = x;
3304 propagate_one_insn (pbi, x);
3305 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3306 }
3307 x = PREV_INSN (x);
3308 }
3309 while (x != prev);
3310
3311 /* ??? Should verify that LIVE now matches what we
3312 had before the new sequence. */
3313
3314 peep2_current = i;
3315#endif
3316
3317 /* If we generated a jump instruction, it won't have
3318 JUMP_LABEL set. Recompute after we're done. */
3319 for (x = try; x != before_try; x = PREV_INSN (x))
3320 if (GET_CODE (x) == JUMP_INSN)
3321 {
3322 do_rebuild_jump_labels = true;
3323 break;
3324 }
3325 }
3326 }
3327
3328 if (insn == bb->head)
3329 break;
3330 }
3331
3332 free_propagate_block_info (pbi);
3333 }
3334
3335 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3336 FREE_REG_SET (peep2_insn_data[i].live_before);
3337 FREE_REG_SET (live);
3338
3339 if (do_rebuild_jump_labels)
3340 rebuild_jump_labels (get_insns ());
3341
3342 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3343 we've changed global life since exception handlers are no longer
3344 reachable. */
3345 if (do_cleanup_cfg)
3346 {
3347 cleanup_cfg (0);
3348 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3349 }
3350#ifdef HAVE_conditional_execution
3351 else
3352 {
3353 count_or_remove_death_notes (blocks, 1);
3354 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3355 }
3356 sbitmap_free (blocks);
3357#endif
3358}
3359#endif /* HAVE_peephole2 */
3360
3361/* Common predicates for use with define_bypass. */
3362
3363/* True if the dependency between OUT_INSN and IN_INSN is on the store
3364 data not the address operand(s) of the store. IN_INSN must be
3365 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3366 SETs inside. */
3367
3368int
3369store_data_bypass_p (out_insn, in_insn)
3370 rtx out_insn, in_insn;
3371{
3372 rtx out_set, in_set;
3373
3374 in_set = single_set (in_insn);
3375 if (! in_set)
3376 abort ();
3377
3378 if (GET_CODE (SET_DEST (in_set)) != MEM)
3379 return false;
3380
3381 out_set = single_set (out_insn);
3382 if (out_set)
3383 {
3384 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3385 return false;
3386 }
3387 else
3388 {
3389 rtx out_pat;
3390 int i;
3391
3392 out_pat = PATTERN (out_insn);
3393 if (GET_CODE (out_pat) != PARALLEL)
3394 abort ();
3395
3396 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3397 {
3398 rtx exp = XVECEXP (out_pat, 0, i);
3399
3400 if (GET_CODE (exp) == CLOBBER)
3401 continue;
3402
3403 if (GET_CODE (exp) != SET)
3404 abort ();
3405
3406 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3407 return false;
3408 }
3409 }
3410
3411 return true;
3412}
3413
3414/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3415 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3416 or multiple set; IN_INSN should be single_set for truth, but for convenience
3417 of insn categorization may be any JUMP or CALL insn. */
3418
3419int
3420if_test_bypass_p (out_insn, in_insn)
3421 rtx out_insn, in_insn;
3422{
3423 rtx out_set, in_set;
3424
3425 in_set = single_set (in_insn);
3426 if (! in_set)
3427 {
3428 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3429 return false;
3430 abort ();
3431 }
3432
3433 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3434 return false;
3435 in_set = SET_SRC (in_set);
3436
3437 out_set = single_set (out_insn);
3438 if (out_set)
3439 {
3440 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3441 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3442 return false;
3443 }
3444 else
3445 {
3446 rtx out_pat;
3447 int i;
3448
3449 out_pat = PATTERN (out_insn);
3450 if (GET_CODE (out_pat) != PARALLEL)
3451 abort ();
3452
3453 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3454 {
3455 rtx exp = XVECEXP (out_pat, 0, i);
3456
3457 if (GET_CODE (exp) == CLOBBER)
3458 continue;
3459
3460 if (GET_CODE (exp) != SET)
3461 abort ();
3462
3463 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3464 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3465 return false;
3466 }
3467 }
3468
3469 return true;
3470}