recog.c revision 90075
1/* Subroutines used by or related to instruction recognition.
2   Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "insn-config.h"
28#include "insn-attr.h"
29#include "hard-reg-set.h"
30#include "recog.h"
31#include "regs.h"
32#include "expr.h"
33#include "function.h"
34#include "flags.h"
35#include "real.h"
36#include "toplev.h"
37#include "basic-block.h"
38#include "output.h"
39#include "reload.h"
40
41#ifndef STACK_PUSH_CODE
42#ifdef STACK_GROWS_DOWNWARD
43#define STACK_PUSH_CODE PRE_DEC
44#else
45#define STACK_PUSH_CODE PRE_INC
46#endif
47#endif
48
49#ifndef STACK_POP_CODE
50#ifdef STACK_GROWS_DOWNWARD
51#define STACK_POP_CODE POST_INC
52#else
53#define STACK_POP_CODE POST_DEC
54#endif
55#endif
56
57static void validate_replace_rtx_1	PARAMS ((rtx *, rtx, rtx, rtx));
58static rtx *find_single_use_1		PARAMS ((rtx, rtx *));
59static void validate_replace_src_1 	PARAMS ((rtx *, void *));
60static rtx split_insn			PARAMS ((rtx));
61
62/* Nonzero means allow operands to be volatile.
63   This should be 0 if you are generating rtl, such as if you are calling
64   the functions in optabs.c and expmed.c (most of the time).
65   This should be 1 if all valid insns need to be recognized,
66   such as in regclass.c and final.c and reload.c.
67
68   init_recog and init_recog_no_volatile are responsible for setting this.  */
69
70int volatile_ok;
71
72struct recog_data recog_data;
73
74/* Contains a vector of operand_alternative structures for every operand.
75   Set up by preprocess_constraints.  */
76struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77
78/* On return from `constrain_operands', indicate which alternative
79   was satisfied.  */
80
81int which_alternative;
82
83/* Nonzero after end of reload pass.
84   Set to 1 or 0 by toplev.c.
85   Controls the significance of (SUBREG (MEM)).  */
86
87int reload_completed;
88
89/* Initialize data used by the function `recog'.
90   This must be called once in the compilation of a function
91   before any insn recognition may be done in the function.  */
92
93void
94init_recog_no_volatile ()
95{
96  volatile_ok = 0;
97}
98
99void
100init_recog ()
101{
102  volatile_ok = 1;
103}
104
105/* Try recognizing the instruction INSN,
106   and return the code number that results.
107   Remember the code so that repeated calls do not
108   need to spend the time for actual rerecognition.
109
110   This function is the normal interface to instruction recognition.
111   The automatically-generated function `recog' is normally called
112   through this one.  (The only exception is in combine.c.)  */
113
114int
115recog_memoized_1 (insn)
116     rtx insn;
117{
118  if (INSN_CODE (insn) < 0)
119    INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120  return INSN_CODE (insn);
121}
122
123/* Check that X is an insn-body for an `asm' with operands
124   and that the operands mentioned in it are legitimate.  */
125
126int
127check_asm_operands (x)
128     rtx x;
129{
130  int noperands;
131  rtx *operands;
132  const char **constraints;
133  int i;
134
135  /* Post-reload, be more strict with things.  */
136  if (reload_completed)
137    {
138      /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
139      extract_insn (make_insn_raw (x));
140      constrain_operands (1);
141      return which_alternative >= 0;
142    }
143
144  noperands = asm_noperands (x);
145  if (noperands < 0)
146    return 0;
147  if (noperands == 0)
148    return 1;
149
150  operands = (rtx *) alloca (noperands * sizeof (rtx));
151  constraints = (const char **) alloca (noperands * sizeof (char *));
152
153  decode_asm_operands (x, operands, NULL, constraints, NULL);
154
155  for (i = 0; i < noperands; i++)
156    {
157      const char *c = constraints[i];
158      if (c[0] == '%')
159	c++;
160      if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161	c = constraints[c[0] - '0'];
162
163      if (! asm_operand_ok (operands[i], c))
164        return 0;
165    }
166
167  return 1;
168}
169
170/* Static data for the next two routines.  */
171
172typedef struct change_t
173{
174  rtx object;
175  int old_code;
176  rtx *loc;
177  rtx old;
178} change_t;
179
180static change_t *changes;
181static int changes_allocated;
182
183static int num_changes = 0;
184
185/* Validate a proposed change to OBJECT.  LOC is the location in the rtl
186   at which NEW will be placed.  If OBJECT is zero, no validation is done,
187   the change is simply made.
188
189   Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
190   will be called with the address and mode as parameters.  If OBJECT is
191   an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192   the change in place.
193
194   IN_GROUP is non-zero if this is part of a group of changes that must be
195   performed as a group.  In that case, the changes will be stored.  The
196   function `apply_change_group' will validate and apply the changes.
197
198   If IN_GROUP is zero, this is a single change.  Try to recognize the insn
199   or validate the memory reference with the change applied.  If the result
200   is not valid for the machine, suppress the change and return zero.
201   Otherwise, perform the change and return 1.  */
202
203int
204validate_change (object, loc, new, in_group)
205    rtx object;
206    rtx *loc;
207    rtx new;
208    int in_group;
209{
210  rtx old = *loc;
211
212  if (old == new || rtx_equal_p (old, new))
213    return 1;
214
215  if (in_group == 0 && num_changes != 0)
216    abort ();
217
218  *loc = new;
219
220  /* Save the information describing this change.  */
221  if (num_changes >= changes_allocated)
222    {
223      if (changes_allocated == 0)
224	/* This value allows for repeated substitutions inside complex
225	   indexed addresses, or changes in up to 5 insns.  */
226	changes_allocated = MAX_RECOG_OPERANDS * 5;
227      else
228	changes_allocated *= 2;
229
230      changes =
231	(change_t*) xrealloc (changes,
232			      sizeof (change_t) * changes_allocated);
233    }
234
235  changes[num_changes].object = object;
236  changes[num_changes].loc = loc;
237  changes[num_changes].old = old;
238
239  if (object && GET_CODE (object) != MEM)
240    {
241      /* Set INSN_CODE to force rerecognition of insn.  Save old code in
242	 case invalid.  */
243      changes[num_changes].old_code = INSN_CODE (object);
244      INSN_CODE (object) = -1;
245    }
246
247  num_changes++;
248
249  /* If we are making a group of changes, return 1.  Otherwise, validate the
250     change group we made.  */
251
252  if (in_group)
253    return 1;
254  else
255    return apply_change_group ();
256}
257
258/* This subroutine of apply_change_group verifies whether the changes to INSN
259   were valid; i.e. whether INSN can still be recognized.  */
260
261int
262insn_invalid_p (insn)
263     rtx insn;
264{
265  rtx pat = PATTERN (insn);
266  int num_clobbers = 0;
267  /* If we are before reload and the pattern is a SET, see if we can add
268     clobbers.  */
269  int icode = recog (pat, insn,
270		     (GET_CODE (pat) == SET
271		      && ! reload_completed && ! reload_in_progress)
272		     ? &num_clobbers : 0);
273  int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
274
275
276  /* If this is an asm and the operand aren't legal, then fail.  Likewise if
277     this is not an asm and the insn wasn't recognized.  */
278  if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279      || (!is_asm && icode < 0))
280    return 1;
281
282  /* If we have to add CLOBBERs, fail if we have to add ones that reference
283     hard registers since our callers can't know if they are live or not.
284     Otherwise, add them.  */
285  if (num_clobbers > 0)
286    {
287      rtx newpat;
288
289      if (added_clobbers_hard_reg_p (icode))
290	return 1;
291
292      newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293      XVECEXP (newpat, 0, 0) = pat;
294      add_clobbers (newpat, icode);
295      PATTERN (insn) = pat = newpat;
296    }
297
298  /* After reload, verify that all constraints are satisfied.  */
299  if (reload_completed)
300    {
301      extract_insn (insn);
302
303      if (! constrain_operands (1))
304	return 1;
305    }
306
307  INSN_CODE (insn) = icode;
308  return 0;
309}
310
311/* Apply a group of changes previously issued with `validate_change'.
312   Return 1 if all changes are valid, zero otherwise.  */
313
314int
315apply_change_group ()
316{
317  int i;
318  rtx last_validated = NULL_RTX;
319
320  /* The changes have been applied and all INSN_CODEs have been reset to force
321     rerecognition.
322
323     The changes are valid if we aren't given an object, or if we are
324     given a MEM and it still is a valid address, or if this is in insn
325     and it is recognized.  In the latter case, if reload has completed,
326     we also require that the operands meet the constraints for
327     the insn.  */
328
329  for (i = 0; i < num_changes; i++)
330    {
331      rtx object = changes[i].object;
332
333      /* if there is no object to test or if it is the same as the one we
334         already tested, ignore it.  */
335      if (object == 0 || object == last_validated)
336	continue;
337
338      if (GET_CODE (object) == MEM)
339	{
340	  if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
341	    break;
342	}
343      else if (insn_invalid_p (object))
344	{
345	  rtx pat = PATTERN (object);
346
347	  /* Perhaps we couldn't recognize the insn because there were
348	     extra CLOBBERs at the end.  If so, try to re-recognize
349	     without the last CLOBBER (later iterations will cause each of
350	     them to be eliminated, in turn).  But don't do this if we
351	     have an ASM_OPERAND.  */
352	  if (GET_CODE (pat) == PARALLEL
353	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
354	      && asm_noperands (PATTERN (object)) < 0)
355	    {
356	      rtx newpat;
357
358	      if (XVECLEN (pat, 0) == 2)
359		newpat = XVECEXP (pat, 0, 0);
360	      else
361		{
362		  int j;
363
364		  newpat
365		    = gen_rtx_PARALLEL (VOIDmode,
366					rtvec_alloc (XVECLEN (pat, 0) - 1));
367		  for (j = 0; j < XVECLEN (newpat, 0); j++)
368		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
369		}
370
371	      /* Add a new change to this group to replace the pattern
372		 with this new pattern.  Then consider this change
373		 as having succeeded.  The change we added will
374		 cause the entire call to fail if things remain invalid.
375
376		 Note that this can lose if a later change than the one
377		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
378		 but this shouldn't occur.  */
379
380	      validate_change (object, &PATTERN (object), newpat, 1);
381	      continue;
382	    }
383	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
384	    /* If this insn is a CLOBBER or USE, it is always valid, but is
385	       never recognized.  */
386	    continue;
387	  else
388	    break;
389	}
390      last_validated = object;
391    }
392
393  if (i == num_changes)
394    {
395      num_changes = 0;
396      return 1;
397    }
398  else
399    {
400      cancel_changes (0);
401      return 0;
402    }
403}
404
405/* Return the number of changes so far in the current group.  */
406
407int
408num_validated_changes ()
409{
410  return num_changes;
411}
412
413/* Retract the changes numbered NUM and up.  */
414
415void
416cancel_changes (num)
417     int num;
418{
419  int i;
420
421  /* Back out all the changes.  Do this in the opposite order in which
422     they were made.  */
423  for (i = num_changes - 1; i >= num; i--)
424    {
425      *changes[i].loc = changes[i].old;
426      if (changes[i].object && GET_CODE (changes[i].object) != MEM)
427	INSN_CODE (changes[i].object) = changes[i].old_code;
428    }
429  num_changes = num;
430}
431
432/* Replace every occurrence of FROM in X with TO.  Mark each change with
433   validate_change passing OBJECT.  */
434
435static void
436validate_replace_rtx_1 (loc, from, to, object)
437     rtx *loc;
438     rtx from, to, object;
439{
440  int i, j;
441  const char *fmt;
442  rtx x = *loc;
443  enum rtx_code code;
444  enum machine_mode op0_mode = VOIDmode;
445  int prev_changes = num_changes;
446  rtx new;
447
448  if (!x)
449    return;
450
451  code = GET_CODE (x);
452  fmt = GET_RTX_FORMAT (code);
453  if (fmt[0] == 'e')
454    op0_mode = GET_MODE (XEXP (x, 0));
455
456  /* X matches FROM if it is the same rtx or they are both referring to the
457     same register in the same mode.  Avoid calling rtx_equal_p unless the
458     operands look similar.  */
459
460  if (x == from
461      || (GET_CODE (x) == REG && GET_CODE (from) == REG
462	  && GET_MODE (x) == GET_MODE (from)
463	  && REGNO (x) == REGNO (from))
464      || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
465	  && rtx_equal_p (x, from)))
466    {
467      validate_change (object, loc, to, 1);
468      return;
469    }
470
471  /* Call ourself recursively to perform the replacements.  */
472
473  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
474    {
475      if (fmt[i] == 'e')
476	validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
477      else if (fmt[i] == 'E')
478	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
479	  validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
480    }
481
482  /* If we didn't substitute, there is nothing more to do.  */
483  if (num_changes == prev_changes)
484    return;
485
486  /* Allow substituted expression to have different mode.  This is used by
487     regmove to change mode of pseudo register.  */
488  if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
489    op0_mode = GET_MODE (XEXP (x, 0));
490
491  /* Do changes needed to keep rtx consistent.  Don't do any other
492     simplifications, as it is not our job.  */
493
494  if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
495      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
496    {
497      validate_change (object, loc,
498		       gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
499				       : swap_condition (code),
500				       GET_MODE (x), XEXP (x, 1),
501				       XEXP (x, 0)), 1);
502      x = *loc;
503      code = GET_CODE (x);
504    }
505
506  switch (code)
507    {
508    case PLUS:
509      /* If we have a PLUS whose second operand is now a CONST_INT, use
510         plus_constant to try to simplify it.
511         ??? We may want later to remove this, once simplification is
512         separated from this function.  */
513      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
514	validate_change (object, loc,
515			 simplify_gen_binary
516			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
517      break;
518    case MINUS:
519      if (GET_CODE (XEXP (x, 1)) == CONST_INT
520	  || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
521	validate_change (object, loc,
522			 simplify_gen_binary
523			 (PLUS, GET_MODE (x), XEXP (x, 0),
524			  simplify_gen_unary (NEG,
525					      GET_MODE (x), XEXP (x, 1),
526					      GET_MODE (x))), 1);
527      break;
528    case ZERO_EXTEND:
529    case SIGN_EXTEND:
530      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
531	{
532	  new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
533				    op0_mode);
534	  /* If any of the above failed, substitute in something that
535	     we know won't be recognized.  */
536	  if (!new)
537	    new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
538	  validate_change (object, loc, new, 1);
539	}
540      break;
541    case SUBREG:
542      /* All subregs possible to simplify should be simplified.  */
543      new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
544			     SUBREG_BYTE (x));
545
546      /* Subregs of VOIDmode operands are incorrect.  */
547      if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
548	new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
549      if (new)
550	validate_change (object, loc, new, 1);
551      break;
552    case ZERO_EXTRACT:
553    case SIGN_EXTRACT:
554      /* If we are replacing a register with memory, try to change the memory
555         to be the mode required for memory in extract operations (this isn't
556         likely to be an insertion operation; if it was, nothing bad will
557         happen, we might just fail in some cases).  */
558
559      if (GET_CODE (XEXP (x, 0)) == MEM
560	  && GET_CODE (XEXP (x, 1)) == CONST_INT
561	  && GET_CODE (XEXP (x, 2)) == CONST_INT
562	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
563	  && !MEM_VOLATILE_P (XEXP (x, 0)))
564	{
565	  enum machine_mode wanted_mode = VOIDmode;
566	  enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
567	  int pos = INTVAL (XEXP (x, 2));
568
569	  if (GET_CODE (x) == ZERO_EXTRACT)
570	    {
571	      enum machine_mode new_mode
572		= mode_for_extraction (EP_extzv, 1);
573	      if (new_mode != MAX_MACHINE_MODE)
574		wanted_mode = new_mode;
575	    }
576	  else if (GET_CODE (x) == SIGN_EXTRACT)
577	    {
578	      enum machine_mode new_mode
579		= mode_for_extraction (EP_extv, 1);
580	      if (new_mode != MAX_MACHINE_MODE)
581		wanted_mode = new_mode;
582	    }
583
584	  /* If we have a narrower mode, we can do something.  */
585	  if (wanted_mode != VOIDmode
586	      && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
587	    {
588	      int offset = pos / BITS_PER_UNIT;
589	      rtx newmem;
590
591	      /* If the bytes and bits are counted differently, we
592	         must adjust the offset.  */
593	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
594		offset =
595		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
596		   offset);
597
598	      pos %= GET_MODE_BITSIZE (wanted_mode);
599
600	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
601
602	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
603	      validate_change (object, &XEXP (x, 0), newmem, 1);
604	    }
605	}
606
607      break;
608
609    default:
610      break;
611    }
612}
613
614/* Try replacing every occurrence of FROM in subexpression LOC of INSN
615   with TO.  After all changes have been made, validate by seeing
616   if INSN is still valid.  */
617
618int
619validate_replace_rtx_subexp (from, to, insn, loc)
620     rtx from, to, insn, *loc;
621{
622  validate_replace_rtx_1 (loc, from, to, insn);
623  return apply_change_group ();
624}
625
626/* Try replacing every occurrence of FROM in INSN with TO.  After all
627   changes have been made, validate by seeing if INSN is still valid.  */
628
629int
630validate_replace_rtx (from, to, insn)
631     rtx from, to, insn;
632{
633  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
634  return apply_change_group ();
635}
636
637/* Try replacing every occurrence of FROM in INSN with TO.  */
638
639void
640validate_replace_rtx_group (from, to, insn)
641     rtx from, to, insn;
642{
643  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
644}
645
646/* Function called by note_uses to replace used subexpressions.  */
647struct validate_replace_src_data
648{
649  rtx from;			/* Old RTX */
650  rtx to;			/* New RTX */
651  rtx insn;			/* Insn in which substitution is occurring.  */
652};
653
654static void
655validate_replace_src_1 (x, data)
656     rtx *x;
657     void *data;
658{
659  struct validate_replace_src_data *d
660    = (struct validate_replace_src_data *) data;
661
662  validate_replace_rtx_1 (x, d->from, d->to, d->insn);
663}
664
665/* Try replacing every occurrence of FROM in INSN with TO, avoiding
666   SET_DESTs.  After all changes have been made, validate by seeing if
667   INSN is still valid.  */
668
669int
670validate_replace_src (from, to, insn)
671     rtx from, to, insn;
672{
673  struct validate_replace_src_data d;
674
675  d.from = from;
676  d.to = to;
677  d.insn = insn;
678  note_uses (&PATTERN (insn), validate_replace_src_1, &d);
679  return apply_change_group ();
680}
681
682#ifdef HAVE_cc0
683/* Return 1 if the insn using CC0 set by INSN does not contain
684   any ordered tests applied to the condition codes.
685   EQ and NE tests do not count.  */
686
687int
688next_insn_tests_no_inequality (insn)
689     rtx insn;
690{
691  rtx next = next_cc0_user (insn);
692
693  /* If there is no next insn, we have to take the conservative choice.  */
694  if (next == 0)
695    return 0;
696
697  return ((GET_CODE (next) == JUMP_INSN
698	   || GET_CODE (next) == INSN
699	   || GET_CODE (next) == CALL_INSN)
700	  && ! inequality_comparisons_p (PATTERN (next)));
701}
702
703#if 0  /* This is useless since the insn that sets the cc's
704	  must be followed immediately by the use of them.  */
705/* Return 1 if the CC value set up by INSN is not used.  */
706
707int
708next_insns_test_no_inequality (insn)
709     rtx insn;
710{
711  rtx next = NEXT_INSN (insn);
712
713  for (; next != 0; next = NEXT_INSN (next))
714    {
715      if (GET_CODE (next) == CODE_LABEL
716	  || GET_CODE (next) == BARRIER)
717	return 1;
718      if (GET_CODE (next) == NOTE)
719	continue;
720      if (inequality_comparisons_p (PATTERN (next)))
721	return 0;
722      if (sets_cc0_p (PATTERN (next)) == 1)
723	return 1;
724      if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
725	return 1;
726    }
727  return 1;
728}
729#endif
730#endif
731
732/* This is used by find_single_use to locate an rtx that contains exactly one
733   use of DEST, which is typically either a REG or CC0.  It returns a
734   pointer to the innermost rtx expression containing DEST.  Appearances of
735   DEST that are being used to totally replace it are not counted.  */
736
737static rtx *
738find_single_use_1 (dest, loc)
739     rtx dest;
740     rtx *loc;
741{
742  rtx x = *loc;
743  enum rtx_code code = GET_CODE (x);
744  rtx *result = 0;
745  rtx *this_result;
746  int i;
747  const char *fmt;
748
749  switch (code)
750    {
751    case CONST_INT:
752    case CONST:
753    case LABEL_REF:
754    case SYMBOL_REF:
755    case CONST_DOUBLE:
756    case CLOBBER:
757      return 0;
758
759    case SET:
760      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
761	 of a REG that occupies all of the REG, the insn uses DEST if
762	 it is mentioned in the destination or the source.  Otherwise, we
763	 need just check the source.  */
764      if (GET_CODE (SET_DEST (x)) != CC0
765	  && GET_CODE (SET_DEST (x)) != PC
766	  && GET_CODE (SET_DEST (x)) != REG
767	  && ! (GET_CODE (SET_DEST (x)) == SUBREG
768		&& GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
769		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
770		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
771		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
772			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
773	break;
774
775      return find_single_use_1 (dest, &SET_SRC (x));
776
777    case MEM:
778    case SUBREG:
779      return find_single_use_1 (dest, &XEXP (x, 0));
780
781    default:
782      break;
783    }
784
785  /* If it wasn't one of the common cases above, check each expression and
786     vector of this code.  Look for a unique usage of DEST.  */
787
788  fmt = GET_RTX_FORMAT (code);
789  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
790    {
791      if (fmt[i] == 'e')
792	{
793	  if (dest == XEXP (x, i)
794	      || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
795		  && REGNO (dest) == REGNO (XEXP (x, i))))
796	    this_result = loc;
797	  else
798	    this_result = find_single_use_1 (dest, &XEXP (x, i));
799
800	  if (result == 0)
801	    result = this_result;
802	  else if (this_result)
803	    /* Duplicate usage.  */
804	    return 0;
805	}
806      else if (fmt[i] == 'E')
807	{
808	  int j;
809
810	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
811	    {
812	      if (XVECEXP (x, i, j) == dest
813		  || (GET_CODE (dest) == REG
814		      && GET_CODE (XVECEXP (x, i, j)) == REG
815		      && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
816		this_result = loc;
817	      else
818		this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
819
820	      if (result == 0)
821		result = this_result;
822	      else if (this_result)
823		return 0;
824	    }
825	}
826    }
827
828  return result;
829}
830
831/* See if DEST, produced in INSN, is used only a single time in the
832   sequel.  If so, return a pointer to the innermost rtx expression in which
833   it is used.
834
835   If PLOC is non-zero, *PLOC is set to the insn containing the single use.
836
837   This routine will return usually zero either before flow is called (because
838   there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
839   note can't be trusted).
840
841   If DEST is cc0_rtx, we look only at the next insn.  In that case, we don't
842   care about REG_DEAD notes or LOG_LINKS.
843
844   Otherwise, we find the single use by finding an insn that has a
845   LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST.  If DEST is
846   only referenced once in that insn, we know that it must be the first
847   and last insn referencing DEST.  */
848
849rtx *
850find_single_use (dest, insn, ploc)
851     rtx dest;
852     rtx insn;
853     rtx *ploc;
854{
855  rtx next;
856  rtx *result;
857  rtx link;
858
859#ifdef HAVE_cc0
860  if (dest == cc0_rtx)
861    {
862      next = NEXT_INSN (insn);
863      if (next == 0
864	  || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
865	return 0;
866
867      result = find_single_use_1 (dest, &PATTERN (next));
868      if (result && ploc)
869	*ploc = next;
870      return result;
871    }
872#endif
873
874  if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
875    return 0;
876
877  for (next = next_nonnote_insn (insn);
878       next != 0 && GET_CODE (next) != CODE_LABEL;
879       next = next_nonnote_insn (next))
880    if (INSN_P (next) && dead_or_set_p (next, dest))
881      {
882	for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
883	  if (XEXP (link, 0) == insn)
884	    break;
885
886	if (link)
887	  {
888	    result = find_single_use_1 (dest, &PATTERN (next));
889	    if (ploc)
890	      *ploc = next;
891	    return result;
892	  }
893      }
894
895  return 0;
896}
897
898/* Return 1 if OP is a valid general operand for machine mode MODE.
899   This is either a register reference, a memory reference,
900   or a constant.  In the case of a memory reference, the address
901   is checked for general validity for the target machine.
902
903   Register and memory references must have mode MODE in order to be valid,
904   but some constants have no machine mode and are valid for any mode.
905
906   If MODE is VOIDmode, OP is checked for validity for whatever mode
907   it has.
908
909   The main use of this function is as a predicate in match_operand
910   expressions in the machine description.
911
912   For an explanation of this function's behavior for registers of
913   class NO_REGS, see the comment for `register_operand'.  */
914
915int
916general_operand (op, mode)
917     rtx op;
918     enum machine_mode mode;
919{
920  enum rtx_code code = GET_CODE (op);
921
922  if (mode == VOIDmode)
923    mode = GET_MODE (op);
924
925  /* Don't accept CONST_INT or anything similar
926     if the caller wants something floating.  */
927  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
928      && GET_MODE_CLASS (mode) != MODE_INT
929      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
930    return 0;
931
932  if (GET_CODE (op) == CONST_INT
933      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
934    return 0;
935
936  if (CONSTANT_P (op))
937    return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
938	     || mode == VOIDmode)
939#ifdef LEGITIMATE_PIC_OPERAND_P
940	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
941#endif
942	    && LEGITIMATE_CONSTANT_P (op));
943
944  /* Except for certain constants with VOIDmode, already checked for,
945     OP's mode must match MODE if MODE specifies a mode.  */
946
947  if (GET_MODE (op) != mode)
948    return 0;
949
950  if (code == SUBREG)
951    {
952#ifdef INSN_SCHEDULING
953      /* On machines that have insn scheduling, we want all memory
954	 reference to be explicit, so outlaw paradoxical SUBREGs.  */
955      if (GET_CODE (SUBREG_REG (op)) == MEM
956	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
957	return 0;
958#endif
959      /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
960         may result in incorrect reference.  We should simplify all valid
961         subregs of MEM anyway.  But allow this after reload because we
962	 might be called from cleanup_subreg_operands.
963
964	 ??? This is a kludge.  */
965      if (!reload_completed && SUBREG_BYTE (op) != 0
966	  && GET_CODE (SUBREG_REG (op)) == MEM)
967        return 0;
968
969      op = SUBREG_REG (op);
970      code = GET_CODE (op);
971    }
972
973  if (code == REG)
974    /* A register whose class is NO_REGS is not a general operand.  */
975    return (REGNO (op) >= FIRST_PSEUDO_REGISTER
976	    || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
977
978  if (code == MEM)
979    {
980      rtx y = XEXP (op, 0);
981
982      if (! volatile_ok && MEM_VOLATILE_P (op))
983	return 0;
984
985      if (GET_CODE (y) == ADDRESSOF)
986	return 1;
987
988      /* Use the mem's mode, since it will be reloaded thus.  */
989      mode = GET_MODE (op);
990      GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
991    }
992
993  /* Pretend this is an operand for now; we'll run force_operand
994     on its replacement in fixup_var_refs_1.  */
995  if (code == ADDRESSOF)
996    return 1;
997
998  return 0;
999
1000 win:
1001  return 1;
1002}
1003
1004/* Return 1 if OP is a valid memory address for a memory reference
1005   of mode MODE.
1006
1007   The main use of this function is as a predicate in match_operand
1008   expressions in the machine description.  */
1009
1010int
1011address_operand (op, mode)
1012     rtx op;
1013     enum machine_mode mode;
1014{
1015  return memory_address_p (mode, op);
1016}
1017
1018/* Return 1 if OP is a register reference of mode MODE.
1019   If MODE is VOIDmode, accept a register in any mode.
1020
1021   The main use of this function is as a predicate in match_operand
1022   expressions in the machine description.
1023
1024   As a special exception, registers whose class is NO_REGS are
1025   not accepted by `register_operand'.  The reason for this change
1026   is to allow the representation of special architecture artifacts
1027   (such as a condition code register) without extending the rtl
1028   definitions.  Since registers of class NO_REGS cannot be used
1029   as registers in any case where register classes are examined,
1030   it is most consistent to keep this function from accepting them.  */
1031
1032int
1033register_operand (op, mode)
1034     rtx op;
1035     enum machine_mode mode;
1036{
1037  if (GET_MODE (op) != mode && mode != VOIDmode)
1038    return 0;
1039
1040  if (GET_CODE (op) == SUBREG)
1041    {
1042      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1043	 because it is guaranteed to be reloaded into one.
1044	 Just make sure the MEM is valid in itself.
1045	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1046	 but currently it does result from (SUBREG (REG)...) where the
1047	 reg went on the stack.)  */
1048      if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1049	return general_operand (op, mode);
1050
1051#ifdef CLASS_CANNOT_CHANGE_MODE
1052      if (GET_CODE (SUBREG_REG (op)) == REG
1053	  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1054	  && (TEST_HARD_REG_BIT
1055	      (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1056	       REGNO (SUBREG_REG (op))))
1057	  && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1058	  && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1059	  && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1060	return 0;
1061#endif
1062
1063      op = SUBREG_REG (op);
1064    }
1065
1066  /* If we have an ADDRESSOF, consider it valid since it will be
1067     converted into something that will not be a MEM.  */
1068  if (GET_CODE (op) == ADDRESSOF)
1069    return 1;
1070
1071  /* We don't consider registers whose class is NO_REGS
1072     to be a register operand.  */
1073  return (GET_CODE (op) == REG
1074	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1075	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1076}
1077
1078/* Return 1 for a register in Pmode; ignore the tested mode.  */
1079
1080int
1081pmode_register_operand (op, mode)
1082     rtx op;
1083     enum machine_mode mode ATTRIBUTE_UNUSED;
1084{
1085  return register_operand (op, Pmode);
1086}
1087
1088/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1089   or a hard register.  */
1090
1091int
1092scratch_operand (op, mode)
1093     rtx op;
1094     enum machine_mode mode;
1095{
1096  if (GET_MODE (op) != mode && mode != VOIDmode)
1097    return 0;
1098
1099  return (GET_CODE (op) == SCRATCH
1100	  || (GET_CODE (op) == REG
1101	      && REGNO (op) < FIRST_PSEUDO_REGISTER));
1102}
1103
1104/* Return 1 if OP is a valid immediate operand for mode MODE.
1105
1106   The main use of this function is as a predicate in match_operand
1107   expressions in the machine description.  */
1108
1109int
1110immediate_operand (op, mode)
1111     rtx op;
1112     enum machine_mode mode;
1113{
1114  /* Don't accept CONST_INT or anything similar
1115     if the caller wants something floating.  */
1116  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1117      && GET_MODE_CLASS (mode) != MODE_INT
1118      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1119    return 0;
1120
1121  if (GET_CODE (op) == CONST_INT
1122      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1123    return 0;
1124
1125  /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1126     result in 0/1.  It seems a safe assumption that this is
1127     in range for everyone.  */
1128  if (GET_CODE (op) == CONSTANT_P_RTX)
1129    return 1;
1130
1131  return (CONSTANT_P (op)
1132	  && (GET_MODE (op) == mode || mode == VOIDmode
1133	      || GET_MODE (op) == VOIDmode)
1134#ifdef LEGITIMATE_PIC_OPERAND_P
1135	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1136#endif
1137	  && LEGITIMATE_CONSTANT_P (op));
1138}
1139
1140/* Returns 1 if OP is an operand that is a CONST_INT.  */
1141
1142int
1143const_int_operand (op, mode)
1144     rtx op;
1145     enum machine_mode mode;
1146{
1147  if (GET_CODE (op) != CONST_INT)
1148    return 0;
1149
1150  if (mode != VOIDmode
1151      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1152    return 0;
1153
1154  return 1;
1155}
1156
1157/* Returns 1 if OP is an operand that is a constant integer or constant
1158   floating-point number.  */
1159
1160int
1161const_double_operand (op, mode)
1162     rtx op;
1163     enum machine_mode mode;
1164{
1165  /* Don't accept CONST_INT or anything similar
1166     if the caller wants something floating.  */
1167  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1168      && GET_MODE_CLASS (mode) != MODE_INT
1169      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1170    return 0;
1171
1172  return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1173	  && (mode == VOIDmode || GET_MODE (op) == mode
1174	      || GET_MODE (op) == VOIDmode));
1175}
1176
1177/* Return 1 if OP is a general operand that is not an immediate operand.  */
1178
1179int
1180nonimmediate_operand (op, mode)
1181     rtx op;
1182     enum machine_mode mode;
1183{
1184  return (general_operand (op, mode) && ! CONSTANT_P (op));
1185}
1186
1187/* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1188
1189int
1190nonmemory_operand (op, mode)
1191     rtx op;
1192     enum machine_mode mode;
1193{
1194  if (CONSTANT_P (op))
1195    {
1196      /* Don't accept CONST_INT or anything similar
1197	 if the caller wants something floating.  */
1198      if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1199	  && GET_MODE_CLASS (mode) != MODE_INT
1200	  && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1201	return 0;
1202
1203      if (GET_CODE (op) == CONST_INT
1204	  && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1205	return 0;
1206
1207      return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1208	       || mode == VOIDmode)
1209#ifdef LEGITIMATE_PIC_OPERAND_P
1210	      && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1211#endif
1212	      && LEGITIMATE_CONSTANT_P (op));
1213    }
1214
1215  if (GET_MODE (op) != mode && mode != VOIDmode)
1216    return 0;
1217
1218  if (GET_CODE (op) == SUBREG)
1219    {
1220      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1221	 because it is guaranteed to be reloaded into one.
1222	 Just make sure the MEM is valid in itself.
1223	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1224	 but currently it does result from (SUBREG (REG)...) where the
1225	 reg went on the stack.)  */
1226      if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1227	return general_operand (op, mode);
1228      op = SUBREG_REG (op);
1229    }
1230
1231  /* We don't consider registers whose class is NO_REGS
1232     to be a register operand.  */
1233  return (GET_CODE (op) == REG
1234	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1235	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1236}
1237
1238/* Return 1 if OP is a valid operand that stands for pushing a
1239   value of mode MODE onto the stack.
1240
1241   The main use of this function is as a predicate in match_operand
1242   expressions in the machine description.  */
1243
1244int
1245push_operand (op, mode)
1246     rtx op;
1247     enum machine_mode mode;
1248{
1249  unsigned int rounded_size = GET_MODE_SIZE (mode);
1250
1251#ifdef PUSH_ROUNDING
1252  rounded_size = PUSH_ROUNDING (rounded_size);
1253#endif
1254
1255  if (GET_CODE (op) != MEM)
1256    return 0;
1257
1258  if (mode != VOIDmode && GET_MODE (op) != mode)
1259    return 0;
1260
1261  op = XEXP (op, 0);
1262
1263  if (rounded_size == GET_MODE_SIZE (mode))
1264    {
1265      if (GET_CODE (op) != STACK_PUSH_CODE)
1266	return 0;
1267    }
1268  else
1269    {
1270      if (GET_CODE (op) != PRE_MODIFY
1271	  || GET_CODE (XEXP (op, 1)) != PLUS
1272	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1273	  || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1274#ifdef STACK_GROWS_DOWNWARD
1275	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1276#else
1277	  || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1278#endif
1279	  )
1280	return 0;
1281    }
1282
1283  return XEXP (op, 0) == stack_pointer_rtx;
1284}
1285
1286/* Return 1 if OP is a valid operand that stands for popping a
1287   value of mode MODE off the stack.
1288
1289   The main use of this function is as a predicate in match_operand
1290   expressions in the machine description.  */
1291
1292int
1293pop_operand (op, mode)
1294     rtx op;
1295     enum machine_mode mode;
1296{
1297  if (GET_CODE (op) != MEM)
1298    return 0;
1299
1300  if (mode != VOIDmode && GET_MODE (op) != mode)
1301    return 0;
1302
1303  op = XEXP (op, 0);
1304
1305  if (GET_CODE (op) != STACK_POP_CODE)
1306    return 0;
1307
1308  return XEXP (op, 0) == stack_pointer_rtx;
1309}
1310
1311/* Return 1 if ADDR is a valid memory address for mode MODE.  */
1312
1313int
1314memory_address_p (mode, addr)
1315     enum machine_mode mode ATTRIBUTE_UNUSED;
1316     rtx addr;
1317{
1318  if (GET_CODE (addr) == ADDRESSOF)
1319    return 1;
1320
1321  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1322  return 0;
1323
1324 win:
1325  return 1;
1326}
1327
1328/* Return 1 if OP is a valid memory reference with mode MODE,
1329   including a valid address.
1330
1331   The main use of this function is as a predicate in match_operand
1332   expressions in the machine description.  */
1333
1334int
1335memory_operand (op, mode)
1336     rtx op;
1337     enum machine_mode mode;
1338{
1339  rtx inner;
1340
1341  if (! reload_completed)
1342    /* Note that no SUBREG is a memory operand before end of reload pass,
1343       because (SUBREG (MEM...)) forces reloading into a register.  */
1344    return GET_CODE (op) == MEM && general_operand (op, mode);
1345
1346  if (mode != VOIDmode && GET_MODE (op) != mode)
1347    return 0;
1348
1349  inner = op;
1350  if (GET_CODE (inner) == SUBREG)
1351    inner = SUBREG_REG (inner);
1352
1353  return (GET_CODE (inner) == MEM && general_operand (op, mode));
1354}
1355
1356/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1357   that is, a memory reference whose address is a general_operand.  */
1358
1359int
1360indirect_operand (op, mode)
1361     rtx op;
1362     enum machine_mode mode;
1363{
1364  /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1365  if (! reload_completed
1366      && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1367    {
1368      int offset = SUBREG_BYTE (op);
1369      rtx inner = SUBREG_REG (op);
1370
1371      if (mode != VOIDmode && GET_MODE (op) != mode)
1372	return 0;
1373
1374      /* The only way that we can have a general_operand as the resulting
1375	 address is if OFFSET is zero and the address already is an operand
1376	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1377	 operand.  */
1378
1379      return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1380	      || (GET_CODE (XEXP (inner, 0)) == PLUS
1381		  && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1382		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1383		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1384    }
1385
1386  return (GET_CODE (op) == MEM
1387	  && memory_operand (op, mode)
1388	  && general_operand (XEXP (op, 0), Pmode));
1389}
1390
1391/* Return 1 if this is a comparison operator.  This allows the use of
1392   MATCH_OPERATOR to recognize all the branch insns.  */
1393
1394int
1395comparison_operator (op, mode)
1396    rtx op;
1397    enum machine_mode mode;
1398{
1399  return ((mode == VOIDmode || GET_MODE (op) == mode)
1400	  && GET_RTX_CLASS (GET_CODE (op)) == '<');
1401}
1402
1403/* If BODY is an insn body that uses ASM_OPERANDS,
1404   return the number of operands (both input and output) in the insn.
1405   Otherwise return -1.  */
1406
1407int
1408asm_noperands (body)
1409     rtx body;
1410{
1411  switch (GET_CODE (body))
1412    {
1413    case ASM_OPERANDS:
1414      /* No output operands: return number of input operands.  */
1415      return ASM_OPERANDS_INPUT_LENGTH (body);
1416    case SET:
1417      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1418	/* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1419	return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1420      else
1421	return -1;
1422    case PARALLEL:
1423      if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1424	  && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1425	{
1426	  /* Multiple output operands, or 1 output plus some clobbers:
1427	     body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1428	  int i;
1429	  int n_sets;
1430
1431	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1432	  for (i = XVECLEN (body, 0); i > 0; i--)
1433	    {
1434	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1435		break;
1436	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1437		return -1;
1438	    }
1439
1440	  /* N_SETS is now number of output operands.  */
1441	  n_sets = i;
1442
1443	  /* Verify that all the SETs we have
1444	     came from a single original asm_operands insn
1445	     (so that invalid combinations are blocked).  */
1446	  for (i = 0; i < n_sets; i++)
1447	    {
1448	      rtx elt = XVECEXP (body, 0, i);
1449	      if (GET_CODE (elt) != SET)
1450		return -1;
1451	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1452		return -1;
1453	      /* If these ASM_OPERANDS rtx's came from different original insns
1454	         then they aren't allowed together.  */
1455	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1456		  != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1457		return -1;
1458	    }
1459	  return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1460		  + n_sets);
1461	}
1462      else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1463	{
1464	  /* 0 outputs, but some clobbers:
1465	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1466	  int i;
1467
1468	  /* Make sure all the other parallel things really are clobbers.  */
1469	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1470	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1471	      return -1;
1472
1473	  return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1474	}
1475      else
1476	return -1;
1477    default:
1478      return -1;
1479    }
1480}
1481
1482/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1483   copy its operands (both input and output) into the vector OPERANDS,
1484   the locations of the operands within the insn into the vector OPERAND_LOCS,
1485   and the constraints for the operands into CONSTRAINTS.
1486   Write the modes of the operands into MODES.
1487   Return the assembler-template.
1488
1489   If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1490   we don't store that info.  */
1491
1492const char *
1493decode_asm_operands (body, operands, operand_locs, constraints, modes)
1494     rtx body;
1495     rtx *operands;
1496     rtx **operand_locs;
1497     const char **constraints;
1498     enum machine_mode *modes;
1499{
1500  int i;
1501  int noperands;
1502  const char *template = 0;
1503
1504  if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1505    {
1506      rtx asmop = SET_SRC (body);
1507      /* Single output operand: BODY is (set OUTPUT (asm_operands ....)).  */
1508
1509      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1510
1511      for (i = 1; i < noperands; i++)
1512	{
1513	  if (operand_locs)
1514	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1515	  if (operands)
1516	    operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1517	  if (constraints)
1518	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1519	  if (modes)
1520	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1521	}
1522
1523      /* The output is in the SET.
1524	 Its constraint is in the ASM_OPERANDS itself.  */
1525      if (operands)
1526	operands[0] = SET_DEST (body);
1527      if (operand_locs)
1528	operand_locs[0] = &SET_DEST (body);
1529      if (constraints)
1530	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1531      if (modes)
1532	modes[0] = GET_MODE (SET_DEST (body));
1533      template = ASM_OPERANDS_TEMPLATE (asmop);
1534    }
1535  else if (GET_CODE (body) == ASM_OPERANDS)
1536    {
1537      rtx asmop = body;
1538      /* No output operands: BODY is (asm_operands ....).  */
1539
1540      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1541
1542      /* The input operands are found in the 1st element vector.  */
1543      /* Constraints for inputs are in the 2nd element vector.  */
1544      for (i = 0; i < noperands; i++)
1545	{
1546	  if (operand_locs)
1547	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1548	  if (operands)
1549	    operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1550	  if (constraints)
1551	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1552	  if (modes)
1553	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1554	}
1555      template = ASM_OPERANDS_TEMPLATE (asmop);
1556    }
1557  else if (GET_CODE (body) == PARALLEL
1558	   && GET_CODE (XVECEXP (body, 0, 0)) == SET
1559	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1560    {
1561      rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1562      int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1563      int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1564      int nout = 0;		/* Does not include CLOBBERs.  */
1565
1566      /* At least one output, plus some CLOBBERs.  */
1567
1568      /* The outputs are in the SETs.
1569	 Their constraints are in the ASM_OPERANDS itself.  */
1570      for (i = 0; i < nparallel; i++)
1571	{
1572	  if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1573	    break;		/* Past last SET */
1574
1575	  if (operands)
1576	    operands[i] = SET_DEST (XVECEXP (body, 0, i));
1577	  if (operand_locs)
1578	    operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1579	  if (constraints)
1580	    constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1581	  if (modes)
1582	    modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1583	  nout++;
1584	}
1585
1586      for (i = 0; i < nin; i++)
1587	{
1588	  if (operand_locs)
1589	    operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1590	  if (operands)
1591	    operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1592	  if (constraints)
1593	    constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1594	  if (modes)
1595	    modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1596	}
1597
1598      template = ASM_OPERANDS_TEMPLATE (asmop);
1599    }
1600  else if (GET_CODE (body) == PARALLEL
1601	   && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1602    {
1603      /* No outputs, but some CLOBBERs.  */
1604
1605      rtx asmop = XVECEXP (body, 0, 0);
1606      int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1607
1608      for (i = 0; i < nin; i++)
1609	{
1610	  if (operand_locs)
1611	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1612	  if (operands)
1613	    operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1614	  if (constraints)
1615	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1616	  if (modes)
1617	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1618	}
1619
1620      template = ASM_OPERANDS_TEMPLATE (asmop);
1621    }
1622
1623  return template;
1624}
1625
1626/* Check if an asm_operand matches it's constraints.
1627   Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1628
1629int
1630asm_operand_ok (op, constraint)
1631     rtx op;
1632     const char *constraint;
1633{
1634  int result = 0;
1635
1636  /* Use constrain_operands after reload.  */
1637  if (reload_completed)
1638    abort ();
1639
1640  while (*constraint)
1641    {
1642      char c = *constraint++;
1643      switch (c)
1644	{
1645	case '=':
1646	case '+':
1647	case '*':
1648	case '%':
1649	case '?':
1650	case '!':
1651	case '#':
1652	case '&':
1653	case ',':
1654	  break;
1655
1656	case '0': case '1': case '2': case '3': case '4':
1657	case '5': case '6': case '7': case '8': case '9':
1658	  /* For best results, our caller should have given us the
1659	     proper matching constraint, but we can't actually fail
1660	     the check if they didn't.  Indicate that results are
1661	     inconclusive.  */
1662	  while (ISDIGIT (*constraint))
1663	    constraint++;
1664	  result = -1;
1665	  break;
1666
1667	case 'p':
1668	  if (address_operand (op, VOIDmode))
1669	    return 1;
1670	  break;
1671
1672	case 'm':
1673	case 'V': /* non-offsettable */
1674	  if (memory_operand (op, VOIDmode))
1675	    return 1;
1676	  break;
1677
1678	case 'o': /* offsettable */
1679	  if (offsettable_nonstrict_memref_p (op))
1680	    return 1;
1681	  break;
1682
1683	case '<':
1684	  /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1685	     excepting those that expand_call created.  Further, on some
1686	     machines which do not have generalized auto inc/dec, an inc/dec
1687	     is not a memory_operand.
1688
1689	     Match any memory and hope things are resolved after reload.  */
1690
1691	  if (GET_CODE (op) == MEM
1692	      && (1
1693		  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1694                  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1695	    return 1;
1696	  break;
1697
1698	case '>':
1699	  if (GET_CODE (op) == MEM
1700	      && (1
1701		  || GET_CODE (XEXP (op, 0)) == PRE_INC
1702                  || GET_CODE (XEXP (op, 0)) == POST_INC))
1703	    return 1;
1704	  break;
1705
1706	case 'E':
1707#ifndef REAL_ARITHMETIC
1708	  /* Match any floating double constant, but only if
1709	     we can examine the bits of it reliably.  */
1710	  if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1711	       || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1712	      && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1713	    break;
1714#endif
1715	  /* FALLTHRU */
1716
1717	case 'F':
1718	  if (GET_CODE (op) == CONST_DOUBLE)
1719	    return 1;
1720	  break;
1721
1722	case 'G':
1723	  if (GET_CODE (op) == CONST_DOUBLE
1724	      && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1725	    return 1;
1726	  break;
1727	case 'H':
1728	  if (GET_CODE (op) == CONST_DOUBLE
1729	      && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1730	    return 1;
1731	  break;
1732
1733	case 's':
1734	  if (GET_CODE (op) == CONST_INT
1735	      || (GET_CODE (op) == CONST_DOUBLE
1736		  && GET_MODE (op) == VOIDmode))
1737	    break;
1738	  /* FALLTHRU */
1739
1740	case 'i':
1741	  if (CONSTANT_P (op)
1742#ifdef LEGITIMATE_PIC_OPERAND_P
1743	      && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1744#endif
1745	      )
1746	    return 1;
1747	  break;
1748
1749	case 'n':
1750	  if (GET_CODE (op) == CONST_INT
1751	      || (GET_CODE (op) == CONST_DOUBLE
1752		  && GET_MODE (op) == VOIDmode))
1753	    return 1;
1754	  break;
1755
1756	case 'I':
1757	  if (GET_CODE (op) == CONST_INT
1758	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1759	    return 1;
1760	  break;
1761	case 'J':
1762	  if (GET_CODE (op) == CONST_INT
1763	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1764	    return 1;
1765	  break;
1766	case 'K':
1767	  if (GET_CODE (op) == CONST_INT
1768	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1769	    return 1;
1770	  break;
1771	case 'L':
1772	  if (GET_CODE (op) == CONST_INT
1773	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1774	    return 1;
1775	  break;
1776	case 'M':
1777	  if (GET_CODE (op) == CONST_INT
1778	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1779	    return 1;
1780	  break;
1781	case 'N':
1782	  if (GET_CODE (op) == CONST_INT
1783	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1784	    return 1;
1785	  break;
1786	case 'O':
1787	  if (GET_CODE (op) == CONST_INT
1788	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1789	    return 1;
1790	  break;
1791	case 'P':
1792	  if (GET_CODE (op) == CONST_INT
1793	      && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1794	    return 1;
1795	  break;
1796
1797	case 'X':
1798	  return 1;
1799
1800	case 'g':
1801	  if (general_operand (op, VOIDmode))
1802	    return 1;
1803	  break;
1804
1805	default:
1806	  /* For all other letters, we first check for a register class,
1807	     otherwise it is an EXTRA_CONSTRAINT.  */
1808	  if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1809	    {
1810	    case 'r':
1811	      if (GET_MODE (op) == BLKmode)
1812		break;
1813	      if (register_operand (op, VOIDmode))
1814		return 1;
1815	    }
1816#ifdef EXTRA_CONSTRAINT
1817	  if (EXTRA_CONSTRAINT (op, c))
1818	    return 1;
1819#endif
1820	  break;
1821	}
1822    }
1823
1824  return result;
1825}
1826
1827/* Given an rtx *P, if it is a sum containing an integer constant term,
1828   return the location (type rtx *) of the pointer to that constant term.
1829   Otherwise, return a null pointer.  */
1830
1831rtx *
1832find_constant_term_loc (p)
1833     rtx *p;
1834{
1835  rtx *tem;
1836  enum rtx_code code = GET_CODE (*p);
1837
1838  /* If *P IS such a constant term, P is its location.  */
1839
1840  if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1841      || code == CONST)
1842    return p;
1843
1844  /* Otherwise, if not a sum, it has no constant term.  */
1845
1846  if (GET_CODE (*p) != PLUS)
1847    return 0;
1848
1849  /* If one of the summands is constant, return its location.  */
1850
1851  if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1852      && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1853    return p;
1854
1855  /* Otherwise, check each summand for containing a constant term.  */
1856
1857  if (XEXP (*p, 0) != 0)
1858    {
1859      tem = find_constant_term_loc (&XEXP (*p, 0));
1860      if (tem != 0)
1861	return tem;
1862    }
1863
1864  if (XEXP (*p, 1) != 0)
1865    {
1866      tem = find_constant_term_loc (&XEXP (*p, 1));
1867      if (tem != 0)
1868	return tem;
1869    }
1870
1871  return 0;
1872}
1873
1874/* Return 1 if OP is a memory reference
1875   whose address contains no side effects
1876   and remains valid after the addition
1877   of a positive integer less than the
1878   size of the object being referenced.
1879
1880   We assume that the original address is valid and do not check it.
1881
1882   This uses strict_memory_address_p as a subroutine, so
1883   don't use it before reload.  */
1884
1885int
1886offsettable_memref_p (op)
1887     rtx op;
1888{
1889  return ((GET_CODE (op) == MEM)
1890	  && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1891}
1892
1893/* Similar, but don't require a strictly valid mem ref:
1894   consider pseudo-regs valid as index or base regs.  */
1895
1896int
1897offsettable_nonstrict_memref_p (op)
1898     rtx op;
1899{
1900  return ((GET_CODE (op) == MEM)
1901	  && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1902}
1903
1904/* Return 1 if Y is a memory address which contains no side effects
1905   and would remain valid after the addition of a positive integer
1906   less than the size of that mode.
1907
1908   We assume that the original address is valid and do not check it.
1909   We do check that it is valid for narrower modes.
1910
1911   If STRICTP is nonzero, we require a strictly valid address,
1912   for the sake of use in reload.c.  */
1913
1914int
1915offsettable_address_p (strictp, mode, y)
1916     int strictp;
1917     enum machine_mode mode;
1918     rtx y;
1919{
1920  enum rtx_code ycode = GET_CODE (y);
1921  rtx z;
1922  rtx y1 = y;
1923  rtx *y2;
1924  int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1925    (strictp ? strict_memory_address_p : memory_address_p);
1926  unsigned int mode_sz = GET_MODE_SIZE (mode);
1927
1928  if (CONSTANT_ADDRESS_P (y))
1929    return 1;
1930
1931  /* Adjusting an offsettable address involves changing to a narrower mode.
1932     Make sure that's OK.  */
1933
1934  if (mode_dependent_address_p (y))
1935    return 0;
1936
1937  /* ??? How much offset does an offsettable BLKmode reference need?
1938     Clearly that depends on the situation in which it's being used.
1939     However, the current situation in which we test 0xffffffff is
1940     less than ideal.  Caveat user.  */
1941  if (mode_sz == 0)
1942    mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1943
1944  /* If the expression contains a constant term,
1945     see if it remains valid when max possible offset is added.  */
1946
1947  if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1948    {
1949      int good;
1950
1951      y1 = *y2;
1952      *y2 = plus_constant (*y2, mode_sz - 1);
1953      /* Use QImode because an odd displacement may be automatically invalid
1954	 for any wider mode.  But it should be valid for a single byte.  */
1955      good = (*addressp) (QImode, y);
1956
1957      /* In any case, restore old contents of memory.  */
1958      *y2 = y1;
1959      return good;
1960    }
1961
1962  if (GET_RTX_CLASS (ycode) == 'a')
1963    return 0;
1964
1965  /* The offset added here is chosen as the maximum offset that
1966     any instruction could need to add when operating on something
1967     of the specified mode.  We assume that if Y and Y+c are
1968     valid addresses then so is Y+d for all 0<d<c.  adjust_address will
1969     go inside a LO_SUM here, so we do so as well.  */
1970  if (GET_CODE (y) == LO_SUM)
1971    z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1972			plus_constant (XEXP (y, 1), mode_sz - 1));
1973  else
1974    z = plus_constant (y, mode_sz - 1);
1975
1976  /* Use QImode because an odd displacement may be automatically invalid
1977     for any wider mode.  But it should be valid for a single byte.  */
1978  return (*addressp) (QImode, z);
1979}
1980
1981/* Return 1 if ADDR is an address-expression whose effect depends
1982   on the mode of the memory reference it is used in.
1983
1984   Autoincrement addressing is a typical example of mode-dependence
1985   because the amount of the increment depends on the mode.  */
1986
1987int
1988mode_dependent_address_p (addr)
1989  rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS.  */
1990{
1991  GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1992  return 0;
1993  /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS.  */
1994 win: ATTRIBUTE_UNUSED_LABEL
1995  return 1;
1996}
1997
1998/* Return 1 if OP is a general operand
1999   other than a memory ref with a mode dependent address.  */
2000
2001int
2002mode_independent_operand (op, mode)
2003     enum machine_mode mode;
2004     rtx op;
2005{
2006  rtx addr;
2007
2008  if (! general_operand (op, mode))
2009    return 0;
2010
2011  if (GET_CODE (op) != MEM)
2012    return 1;
2013
2014  addr = XEXP (op, 0);
2015  GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2016  return 1;
2017  /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS.  */
2018 lose: ATTRIBUTE_UNUSED_LABEL
2019  return 0;
2020}
2021
2022/* Like extract_insn, but save insn extracted and don't extract again, when
2023   called again for the same insn expecting that recog_data still contain the
2024   valid information.  This is used primary by gen_attr infrastructure that
2025   often does extract insn again and again.  */
2026void
2027extract_insn_cached (insn)
2028     rtx insn;
2029{
2030  if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2031    return;
2032  extract_insn (insn);
2033  recog_data.insn = insn;
2034}
2035/* Do cached extract_insn, constrain_operand and complain about failures.
2036   Used by insn_attrtab.  */
2037void
2038extract_constrain_insn_cached (insn)
2039     rtx insn;
2040{
2041  extract_insn_cached (insn);
2042  if (which_alternative == -1
2043      && !constrain_operands (reload_completed))
2044    fatal_insn_not_found (insn);
2045}
2046/* Do cached constrain_operand and complain about failures.  */
2047int
2048constrain_operands_cached (strict)
2049	int strict;
2050{
2051  if (which_alternative == -1)
2052    return constrain_operands (strict);
2053  else
2054    return 1;
2055}
2056
2057/* Analyze INSN and fill in recog_data.  */
2058
2059void
2060extract_insn (insn)
2061     rtx insn;
2062{
2063  int i;
2064  int icode;
2065  int noperands;
2066  rtx body = PATTERN (insn);
2067
2068  recog_data.insn = NULL;
2069  recog_data.n_operands = 0;
2070  recog_data.n_alternatives = 0;
2071  recog_data.n_dups = 0;
2072  which_alternative = -1;
2073
2074  switch (GET_CODE (body))
2075    {
2076    case USE:
2077    case CLOBBER:
2078    case ASM_INPUT:
2079    case ADDR_VEC:
2080    case ADDR_DIFF_VEC:
2081      return;
2082
2083    case SET:
2084      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2085	goto asm_insn;
2086      else
2087	goto normal_insn;
2088    case PARALLEL:
2089      if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2090	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2091	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2092	goto asm_insn;
2093      else
2094	goto normal_insn;
2095    case ASM_OPERANDS:
2096    asm_insn:
2097      recog_data.n_operands = noperands = asm_noperands (body);
2098      if (noperands >= 0)
2099	{
2100	  /* This insn is an `asm' with operands.  */
2101
2102	  /* expand_asm_operands makes sure there aren't too many operands.  */
2103	  if (noperands > MAX_RECOG_OPERANDS)
2104	    abort ();
2105
2106	  /* Now get the operand values and constraints out of the insn.  */
2107	  decode_asm_operands (body, recog_data.operand,
2108			       recog_data.operand_loc,
2109			       recog_data.constraints,
2110			       recog_data.operand_mode);
2111	  if (noperands > 0)
2112	    {
2113	      const char *p =  recog_data.constraints[0];
2114	      recog_data.n_alternatives = 1;
2115	      while (*p)
2116		recog_data.n_alternatives += (*p++ == ',');
2117	    }
2118	  break;
2119	}
2120      fatal_insn_not_found (insn);
2121
2122    default:
2123    normal_insn:
2124      /* Ordinary insn: recognize it, get the operands via insn_extract
2125	 and get the constraints.  */
2126
2127      icode = recog_memoized (insn);
2128      if (icode < 0)
2129	fatal_insn_not_found (insn);
2130
2131      recog_data.n_operands = noperands = insn_data[icode].n_operands;
2132      recog_data.n_alternatives = insn_data[icode].n_alternatives;
2133      recog_data.n_dups = insn_data[icode].n_dups;
2134
2135      insn_extract (insn);
2136
2137      for (i = 0; i < noperands; i++)
2138	{
2139	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2140	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2141	  /* VOIDmode match_operands gets mode from their real operand.  */
2142	  if (recog_data.operand_mode[i] == VOIDmode)
2143	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2144	}
2145    }
2146  for (i = 0; i < noperands; i++)
2147    recog_data.operand_type[i]
2148      = (recog_data.constraints[i][0] == '=' ? OP_OUT
2149	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2150	 : OP_IN);
2151
2152  if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2153    abort ();
2154}
2155
2156/* After calling extract_insn, you can use this function to extract some
2157   information from the constraint strings into a more usable form.
2158   The collected data is stored in recog_op_alt.  */
2159void
2160preprocess_constraints ()
2161{
2162  int i;
2163
2164  memset (recog_op_alt, 0, sizeof recog_op_alt);
2165  for (i = 0; i < recog_data.n_operands; i++)
2166    {
2167      int j;
2168      struct operand_alternative *op_alt;
2169      const char *p = recog_data.constraints[i];
2170
2171      op_alt = recog_op_alt[i];
2172
2173      for (j = 0; j < recog_data.n_alternatives; j++)
2174	{
2175	  op_alt[j].class = NO_REGS;
2176	  op_alt[j].constraint = p;
2177	  op_alt[j].matches = -1;
2178	  op_alt[j].matched = -1;
2179
2180	  if (*p == '\0' || *p == ',')
2181	    {
2182	      op_alt[j].anything_ok = 1;
2183	      continue;
2184	    }
2185
2186	  for (;;)
2187	    {
2188	      char c = *p++;
2189	      if (c == '#')
2190		do
2191		  c = *p++;
2192		while (c != ',' && c != '\0');
2193	      if (c == ',' || c == '\0')
2194		break;
2195
2196	      switch (c)
2197		{
2198		case '=': case '+': case '*': case '%':
2199		case 'E': case 'F': case 'G': case 'H':
2200		case 's': case 'i': case 'n':
2201		case 'I': case 'J': case 'K': case 'L':
2202		case 'M': case 'N': case 'O': case 'P':
2203		  /* These don't say anything we care about.  */
2204		  break;
2205
2206		case '?':
2207		  op_alt[j].reject += 6;
2208		  break;
2209		case '!':
2210		  op_alt[j].reject += 600;
2211		  break;
2212		case '&':
2213		  op_alt[j].earlyclobber = 1;
2214		  break;
2215
2216		case '0': case '1': case '2': case '3': case '4':
2217		case '5': case '6': case '7': case '8': case '9':
2218		  {
2219		    char *end;
2220		    op_alt[j].matches = strtoul (p - 1, &end, 10);
2221		    recog_op_alt[op_alt[j].matches][j].matched = i;
2222		    p = end;
2223		  }
2224		  break;
2225
2226		case 'm':
2227		  op_alt[j].memory_ok = 1;
2228		  break;
2229		case '<':
2230		  op_alt[j].decmem_ok = 1;
2231		  break;
2232		case '>':
2233		  op_alt[j].incmem_ok = 1;
2234		  break;
2235		case 'V':
2236		  op_alt[j].nonoffmem_ok = 1;
2237		  break;
2238		case 'o':
2239		  op_alt[j].offmem_ok = 1;
2240		  break;
2241		case 'X':
2242		  op_alt[j].anything_ok = 1;
2243		  break;
2244
2245		case 'p':
2246		  op_alt[j].is_address = 1;
2247		  op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2248		    [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2249		  break;
2250
2251		case 'g': case 'r':
2252		  op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2253		  break;
2254
2255		default:
2256		  op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2257		  break;
2258		}
2259	    }
2260	}
2261    }
2262}
2263
2264/* Check the operands of an insn against the insn's operand constraints
2265   and return 1 if they are valid.
2266   The information about the insn's operands, constraints, operand modes
2267   etc. is obtained from the global variables set up by extract_insn.
2268
2269   WHICH_ALTERNATIVE is set to a number which indicates which
2270   alternative of constraints was matched: 0 for the first alternative,
2271   1 for the next, etc.
2272
2273   In addition, when two operands are match
2274   and it happens that the output operand is (reg) while the
2275   input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2276   make the output operand look like the input.
2277   This is because the output operand is the one the template will print.
2278
2279   This is used in final, just before printing the assembler code and by
2280   the routines that determine an insn's attribute.
2281
2282   If STRICT is a positive non-zero value, it means that we have been
2283   called after reload has been completed.  In that case, we must
2284   do all checks strictly.  If it is zero, it means that we have been called
2285   before reload has completed.  In that case, we first try to see if we can
2286   find an alternative that matches strictly.  If not, we try again, this
2287   time assuming that reload will fix up the insn.  This provides a "best
2288   guess" for the alternative and is used to compute attributes of insns prior
2289   to reload.  A negative value of STRICT is used for this internal call.  */
2290
2291struct funny_match
2292{
2293  int this, other;
2294};
2295
2296int
2297constrain_operands (strict)
2298     int strict;
2299{
2300  const char *constraints[MAX_RECOG_OPERANDS];
2301  int matching_operands[MAX_RECOG_OPERANDS];
2302  int earlyclobber[MAX_RECOG_OPERANDS];
2303  int c;
2304
2305  struct funny_match funny_match[MAX_RECOG_OPERANDS];
2306  int funny_match_index;
2307
2308  which_alternative = 0;
2309  if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2310    return 1;
2311
2312  for (c = 0; c < recog_data.n_operands; c++)
2313    {
2314      constraints[c] = recog_data.constraints[c];
2315      matching_operands[c] = -1;
2316    }
2317
2318  do
2319    {
2320      int opno;
2321      int lose = 0;
2322      funny_match_index = 0;
2323
2324      for (opno = 0; opno < recog_data.n_operands; opno++)
2325	{
2326	  rtx op = recog_data.operand[opno];
2327	  enum machine_mode mode = GET_MODE (op);
2328	  const char *p = constraints[opno];
2329	  int offset = 0;
2330	  int win = 0;
2331	  int val;
2332
2333	  earlyclobber[opno] = 0;
2334
2335	  /* A unary operator may be accepted by the predicate, but it
2336	     is irrelevant for matching constraints.  */
2337	  if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2338	    op = XEXP (op, 0);
2339
2340	  if (GET_CODE (op) == SUBREG)
2341	    {
2342	      if (GET_CODE (SUBREG_REG (op)) == REG
2343		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2344		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2345					      GET_MODE (SUBREG_REG (op)),
2346					      SUBREG_BYTE (op),
2347					      GET_MODE (op));
2348	      op = SUBREG_REG (op);
2349	    }
2350
2351	  /* An empty constraint or empty alternative
2352	     allows anything which matched the pattern.  */
2353	  if (*p == 0 || *p == ',')
2354	    win = 1;
2355
2356	  while (*p && (c = *p++) != ',')
2357	    switch (c)
2358	      {
2359	      case '?':  case '!': case '*':  case '%':
2360	      case '=':  case '+':
2361		break;
2362
2363	      case '#':
2364		/* Ignore rest of this alternative as far as
2365		   constraint checking is concerned.  */
2366		while (*p && *p != ',')
2367		  p++;
2368		break;
2369
2370	      case '&':
2371		earlyclobber[opno] = 1;
2372		break;
2373
2374	      case '0':  case '1':  case '2':  case '3':  case '4':
2375	      case '5':  case '6':  case '7':  case '8':  case '9':
2376		{
2377		  /* This operand must be the same as a previous one.
2378		     This kind of constraint is used for instructions such
2379		     as add when they take only two operands.
2380
2381		     Note that the lower-numbered operand is passed first.
2382
2383		     If we are not testing strictly, assume that this
2384		     constraint will be satisfied.  */
2385
2386		  char *end;
2387		  int match;
2388
2389		  match = strtoul (p - 1, &end, 10);
2390		  p = end;
2391
2392		  if (strict < 0)
2393		    val = 1;
2394		  else
2395		    {
2396		      rtx op1 = recog_data.operand[match];
2397		      rtx op2 = recog_data.operand[opno];
2398
2399		      /* A unary operator may be accepted by the predicate,
2400			 but it is irrelevant for matching constraints.  */
2401		      if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2402			op1 = XEXP (op1, 0);
2403		      if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2404			op2 = XEXP (op2, 0);
2405
2406		      val = operands_match_p (op1, op2);
2407		    }
2408
2409		  matching_operands[opno] = match;
2410		  matching_operands[match] = opno;
2411
2412		  if (val != 0)
2413		    win = 1;
2414
2415		  /* If output is *x and input is *--x, arrange later
2416		     to change the output to *--x as well, since the
2417		     output op is the one that will be printed.  */
2418		  if (val == 2 && strict > 0)
2419		    {
2420		      funny_match[funny_match_index].this = opno;
2421		      funny_match[funny_match_index++].other = match;
2422		    }
2423		}
2424		break;
2425
2426	      case 'p':
2427		/* p is used for address_operands.  When we are called by
2428		   gen_reload, no one will have checked that the address is
2429		   strictly valid, i.e., that all pseudos requiring hard regs
2430		   have gotten them.  */
2431		if (strict <= 0
2432		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2433						 op)))
2434		  win = 1;
2435		break;
2436
2437		/* No need to check general_operand again;
2438		   it was done in insn-recog.c.  */
2439	      case 'g':
2440		/* Anything goes unless it is a REG and really has a hard reg
2441		   but the hard reg is not in the class GENERAL_REGS.  */
2442		if (strict < 0
2443		    || GENERAL_REGS == ALL_REGS
2444		    || GET_CODE (op) != REG
2445		    || (reload_in_progress
2446			&& REGNO (op) >= FIRST_PSEUDO_REGISTER)
2447		    || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2448		  win = 1;
2449		break;
2450
2451	      case 'X':
2452		/* This is used for a MATCH_SCRATCH in the cases when
2453		   we don't actually need anything.  So anything goes
2454		   any time.  */
2455		win = 1;
2456		break;
2457
2458	      case 'm':
2459		if (GET_CODE (op) == MEM
2460		    /* Before reload, accept what reload can turn into mem.  */
2461		    || (strict < 0 && CONSTANT_P (op))
2462		    /* During reload, accept a pseudo  */
2463		    || (reload_in_progress && GET_CODE (op) == REG
2464			&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
2465		  win = 1;
2466		break;
2467
2468	      case '<':
2469		if (GET_CODE (op) == MEM
2470		    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2471			|| GET_CODE (XEXP (op, 0)) == POST_DEC))
2472		  win = 1;
2473		break;
2474
2475	      case '>':
2476		if (GET_CODE (op) == MEM
2477		    && (GET_CODE (XEXP (op, 0)) == PRE_INC
2478			|| GET_CODE (XEXP (op, 0)) == POST_INC))
2479		  win = 1;
2480		break;
2481
2482	      case 'E':
2483#ifndef REAL_ARITHMETIC
2484		/* Match any CONST_DOUBLE, but only if
2485		   we can examine the bits of it reliably.  */
2486		if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2487		     || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2488		    && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2489		  break;
2490#endif
2491		if (GET_CODE (op) == CONST_DOUBLE)
2492		  win = 1;
2493		break;
2494
2495	      case 'F':
2496		if (GET_CODE (op) == CONST_DOUBLE)
2497		  win = 1;
2498		break;
2499
2500	      case 'G':
2501	      case 'H':
2502		if (GET_CODE (op) == CONST_DOUBLE
2503		    && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2504		  win = 1;
2505		break;
2506
2507	      case 's':
2508		if (GET_CODE (op) == CONST_INT
2509		    || (GET_CODE (op) == CONST_DOUBLE
2510			&& GET_MODE (op) == VOIDmode))
2511		  break;
2512	      case 'i':
2513		if (CONSTANT_P (op))
2514		  win = 1;
2515		break;
2516
2517	      case 'n':
2518		if (GET_CODE (op) == CONST_INT
2519		    || (GET_CODE (op) == CONST_DOUBLE
2520			&& GET_MODE (op) == VOIDmode))
2521		  win = 1;
2522		break;
2523
2524	      case 'I':
2525	      case 'J':
2526	      case 'K':
2527	      case 'L':
2528	      case 'M':
2529	      case 'N':
2530	      case 'O':
2531	      case 'P':
2532		if (GET_CODE (op) == CONST_INT
2533		    && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2534		  win = 1;
2535		break;
2536
2537	      case 'V':
2538		if (GET_CODE (op) == MEM
2539		    && ((strict > 0 && ! offsettable_memref_p (op))
2540			|| (strict < 0
2541			    && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2542			|| (reload_in_progress
2543			    && !(GET_CODE (op) == REG
2544				 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2545		  win = 1;
2546		break;
2547
2548	      case 'o':
2549		if ((strict > 0 && offsettable_memref_p (op))
2550		    || (strict == 0 && offsettable_nonstrict_memref_p (op))
2551		    /* Before reload, accept what reload can handle.  */
2552		    || (strict < 0
2553			&& (CONSTANT_P (op) || GET_CODE (op) == MEM))
2554		    /* During reload, accept a pseudo  */
2555		    || (reload_in_progress && GET_CODE (op) == REG
2556			&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
2557		  win = 1;
2558		break;
2559
2560	      default:
2561		{
2562		  enum reg_class class;
2563
2564		  class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2565		  if (class != NO_REGS)
2566		    {
2567		      if (strict < 0
2568			  || (strict == 0
2569			      && GET_CODE (op) == REG
2570			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2571			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2572			  || (GET_CODE (op) == REG
2573			      && reg_fits_class_p (op, class, offset, mode)))
2574		        win = 1;
2575		    }
2576#ifdef EXTRA_CONSTRAINT
2577		  else if (EXTRA_CONSTRAINT (op, c))
2578		    win = 1;
2579#endif
2580		  break;
2581		}
2582	      }
2583
2584	  constraints[opno] = p;
2585	  /* If this operand did not win somehow,
2586	     this alternative loses.  */
2587	  if (! win)
2588	    lose = 1;
2589	}
2590      /* This alternative won; the operands are ok.
2591	 Change whichever operands this alternative says to change.  */
2592      if (! lose)
2593	{
2594	  int opno, eopno;
2595
2596	  /* See if any earlyclobber operand conflicts with some other
2597	     operand.  */
2598
2599	  if (strict > 0)
2600	    for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2601	      /* Ignore earlyclobber operands now in memory,
2602		 because we would often report failure when we have
2603		 two memory operands, one of which was formerly a REG.  */
2604	      if (earlyclobber[eopno]
2605		  && GET_CODE (recog_data.operand[eopno]) == REG)
2606		for (opno = 0; opno < recog_data.n_operands; opno++)
2607		  if ((GET_CODE (recog_data.operand[opno]) == MEM
2608		       || recog_data.operand_type[opno] != OP_OUT)
2609		      && opno != eopno
2610		      /* Ignore things like match_operator operands.  */
2611		      && *recog_data.constraints[opno] != 0
2612		      && ! (matching_operands[opno] == eopno
2613			    && operands_match_p (recog_data.operand[opno],
2614						 recog_data.operand[eopno]))
2615		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2616						   recog_data.operand[eopno]))
2617		    lose = 1;
2618
2619	  if (! lose)
2620	    {
2621	      while (--funny_match_index >= 0)
2622		{
2623		  recog_data.operand[funny_match[funny_match_index].other]
2624		    = recog_data.operand[funny_match[funny_match_index].this];
2625		}
2626
2627	      return 1;
2628	    }
2629	}
2630
2631      which_alternative++;
2632    }
2633  while (which_alternative < recog_data.n_alternatives);
2634
2635  which_alternative = -1;
2636  /* If we are about to reject this, but we are not to test strictly,
2637     try a very loose test.  Only return failure if it fails also.  */
2638  if (strict == 0)
2639    return constrain_operands (-1);
2640  else
2641    return 0;
2642}
2643
2644/* Return 1 iff OPERAND (assumed to be a REG rtx)
2645   is a hard reg in class CLASS when its regno is offset by OFFSET
2646   and changed to mode MODE.
2647   If REG occupies multiple hard regs, all of them must be in CLASS.  */
2648
2649int
2650reg_fits_class_p (operand, class, offset, mode)
2651     rtx operand;
2652     enum reg_class class;
2653     int offset;
2654     enum machine_mode mode;
2655{
2656  int regno = REGNO (operand);
2657  if (regno < FIRST_PSEUDO_REGISTER
2658      && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2659			    regno + offset))
2660    {
2661      int sr;
2662      regno += offset;
2663      for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2664	   sr > 0; sr--)
2665	if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2666				 regno + sr))
2667	  break;
2668      return sr == 0;
2669    }
2670
2671  return 0;
2672}
2673
2674/* Split single instruction.  Helper function for split_all_insns.
2675   Return last insn in the sequence if successful, or NULL if unsuccessful.  */
2676static rtx
2677split_insn (insn)
2678     rtx insn;
2679{
2680  rtx set;
2681  if (!INSN_P (insn))
2682    ;
2683  /* Don't split no-op move insns.  These should silently
2684     disappear later in final.  Splitting such insns would
2685     break the code that handles REG_NO_CONFLICT blocks.  */
2686
2687  else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2688    {
2689      /* Nops get in the way while scheduling, so delete them
2690         now if register allocation has already been done.  It
2691         is too risky to try to do this before register
2692         allocation, and there are unlikely to be very many
2693         nops then anyways.  */
2694      if (reload_completed)
2695	{
2696	  PUT_CODE (insn, NOTE);
2697	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2698	  NOTE_SOURCE_FILE (insn) = 0;
2699	}
2700    }
2701  else
2702    {
2703      /* Split insns here to get max fine-grain parallelism.  */
2704      rtx first = PREV_INSN (insn);
2705      rtx last = try_split (PATTERN (insn), insn, 1);
2706
2707      if (last != insn)
2708	{
2709	  /* try_split returns the NOTE that INSN became.  */
2710	  PUT_CODE (insn, NOTE);
2711	  NOTE_SOURCE_FILE (insn) = 0;
2712	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2713
2714	  /* ??? Coddle to md files that generate subregs in post-
2715	     reload splitters instead of computing the proper
2716	     hard register.  */
2717	  if (reload_completed && first != last)
2718	    {
2719	      first = NEXT_INSN (first);
2720	      while (1)
2721		{
2722		  if (INSN_P (first))
2723		    cleanup_subreg_operands (first);
2724		  if (first == last)
2725		    break;
2726		  first = NEXT_INSN (first);
2727		}
2728	    }
2729	  return last;
2730	}
2731    }
2732  return NULL_RTX;
2733}
2734/* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2735
2736void
2737split_all_insns (upd_life)
2738     int upd_life;
2739{
2740  sbitmap blocks;
2741  int changed;
2742  int i;
2743
2744  blocks = sbitmap_alloc (n_basic_blocks);
2745  sbitmap_zero (blocks);
2746  changed = 0;
2747
2748  for (i = n_basic_blocks - 1; i >= 0; --i)
2749    {
2750      basic_block bb = BASIC_BLOCK (i);
2751      rtx insn, next;
2752
2753      for (insn = bb->head; insn ; insn = next)
2754	{
2755	  rtx last;
2756
2757	  /* Can't use `next_real_insn' because that might go across
2758	     CODE_LABELS and short-out basic blocks.  */
2759	  next = NEXT_INSN (insn);
2760	  last = split_insn (insn);
2761	  if (last)
2762	    {
2763	      /* The split sequence may include barrier, but the
2764		 BB boundary we are interested in will be set to previous
2765		 one.  */
2766
2767	      while (GET_CODE (last) == BARRIER)
2768		last = PREV_INSN (last);
2769	      SET_BIT (blocks, i);
2770	      changed = 1;
2771	      insn = last;
2772	    }
2773
2774	  if (insn == bb->end)
2775	    break;
2776	}
2777
2778      if (insn == NULL)
2779	abort ();
2780    }
2781
2782  if (changed)
2783    {
2784      find_many_sub_basic_blocks (blocks);
2785    }
2786
2787  if (changed && upd_life)
2788    {
2789      count_or_remove_death_notes (blocks, 1);
2790      update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2791    }
2792#ifdef ENABLE_CHECKING
2793  verify_flow_info ();
2794#endif
2795
2796  sbitmap_free (blocks);
2797}
2798
2799/* Same as split_all_insns, but do not expect CFG to be available.
2800   Used by machine depedent reorg passes.  */
2801
2802void
2803split_all_insns_noflow ()
2804{
2805  rtx next, insn;
2806
2807  for (insn = get_insns (); insn; insn = next)
2808    {
2809      next = NEXT_INSN (insn);
2810      split_insn (insn);
2811    }
2812  return;
2813}
2814
2815#ifdef HAVE_peephole2
2816struct peep2_insn_data
2817{
2818  rtx insn;
2819  regset live_before;
2820};
2821
2822static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2823static int peep2_current;
2824
2825/* A non-insn marker indicating the last insn of the block.
2826   The live_before regset for this element is correct, indicating
2827   global_live_at_end for the block.  */
2828#define PEEP2_EOB	pc_rtx
2829
2830/* Return the Nth non-note insn after `current', or return NULL_RTX if it
2831   does not exist.  Used by the recognizer to find the next insn to match
2832   in a multi-insn pattern.  */
2833
2834rtx
2835peep2_next_insn (n)
2836     int n;
2837{
2838  if (n >= MAX_INSNS_PER_PEEP2 + 1)
2839    abort ();
2840
2841  n += peep2_current;
2842  if (n >= MAX_INSNS_PER_PEEP2 + 1)
2843    n -= MAX_INSNS_PER_PEEP2 + 1;
2844
2845  if (peep2_insn_data[n].insn == PEEP2_EOB)
2846    return NULL_RTX;
2847  return peep2_insn_data[n].insn;
2848}
2849
2850/* Return true if REGNO is dead before the Nth non-note insn
2851   after `current'.  */
2852
2853int
2854peep2_regno_dead_p (ofs, regno)
2855     int ofs;
2856     int regno;
2857{
2858  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2859    abort ();
2860
2861  ofs += peep2_current;
2862  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2863    ofs -= MAX_INSNS_PER_PEEP2 + 1;
2864
2865  if (peep2_insn_data[ofs].insn == NULL_RTX)
2866    abort ();
2867
2868  return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2869}
2870
2871/* Similarly for a REG.  */
2872
2873int
2874peep2_reg_dead_p (ofs, reg)
2875     int ofs;
2876     rtx reg;
2877{
2878  int regno, n;
2879
2880  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2881    abort ();
2882
2883  ofs += peep2_current;
2884  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2885    ofs -= MAX_INSNS_PER_PEEP2 + 1;
2886
2887  if (peep2_insn_data[ofs].insn == NULL_RTX)
2888    abort ();
2889
2890  regno = REGNO (reg);
2891  n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2892  while (--n >= 0)
2893    if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2894      return 0;
2895  return 1;
2896}
2897
2898/* Try to find a hard register of mode MODE, matching the register class in
2899   CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2900   remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
2901   in which case the only condition is that the register must be available
2902   before CURRENT_INSN.
2903   Registers that already have bits set in REG_SET will not be considered.
2904
2905   If an appropriate register is available, it will be returned and the
2906   corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2907   returned.  */
2908
2909rtx
2910peep2_find_free_register (from, to, class_str, mode, reg_set)
2911     int from, to;
2912     const char *class_str;
2913     enum machine_mode mode;
2914     HARD_REG_SET *reg_set;
2915{
2916  static int search_ofs;
2917  enum reg_class class;
2918  HARD_REG_SET live;
2919  int i;
2920
2921  if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2922    abort ();
2923
2924  from += peep2_current;
2925  if (from >= MAX_INSNS_PER_PEEP2 + 1)
2926    from -= MAX_INSNS_PER_PEEP2 + 1;
2927  to += peep2_current;
2928  if (to >= MAX_INSNS_PER_PEEP2 + 1)
2929    to -= MAX_INSNS_PER_PEEP2 + 1;
2930
2931  if (peep2_insn_data[from].insn == NULL_RTX)
2932    abort ();
2933  REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2934
2935  while (from != to)
2936    {
2937      HARD_REG_SET this_live;
2938
2939      if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2940	from = 0;
2941      if (peep2_insn_data[from].insn == NULL_RTX)
2942	abort ();
2943      REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2944      IOR_HARD_REG_SET (live, this_live);
2945    }
2946
2947  class = (class_str[0] == 'r' ? GENERAL_REGS
2948	   : REG_CLASS_FROM_LETTER (class_str[0]));
2949
2950  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2951    {
2952      int raw_regno, regno, success, j;
2953
2954      /* Distribute the free registers as much as possible.  */
2955      raw_regno = search_ofs + i;
2956      if (raw_regno >= FIRST_PSEUDO_REGISTER)
2957	raw_regno -= FIRST_PSEUDO_REGISTER;
2958#ifdef REG_ALLOC_ORDER
2959      regno = reg_alloc_order[raw_regno];
2960#else
2961      regno = raw_regno;
2962#endif
2963
2964      /* Don't allocate fixed registers.  */
2965      if (fixed_regs[regno])
2966	continue;
2967      /* Make sure the register is of the right class.  */
2968      if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2969	continue;
2970      /* And can support the mode we need.  */
2971      if (! HARD_REGNO_MODE_OK (regno, mode))
2972	continue;
2973      /* And that we don't create an extra save/restore.  */
2974      if (! call_used_regs[regno] && ! regs_ever_live[regno])
2975	continue;
2976      /* And we don't clobber traceback for noreturn functions.  */
2977      if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2978	  && (! reload_completed || frame_pointer_needed))
2979	continue;
2980
2981      success = 1;
2982      for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2983	{
2984	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2985	      || TEST_HARD_REG_BIT (live, regno + j))
2986	    {
2987	      success = 0;
2988	      break;
2989	    }
2990	}
2991      if (success)
2992	{
2993	  for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2994	    SET_HARD_REG_BIT (*reg_set, regno + j);
2995
2996	  /* Start the next search with the next register.  */
2997	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2998	    raw_regno = 0;
2999	  search_ofs = raw_regno;
3000
3001	  return gen_rtx_REG (mode, regno);
3002	}
3003    }
3004
3005  search_ofs = 0;
3006  return NULL_RTX;
3007}
3008
3009/* Perform the peephole2 optimization pass.  */
3010
3011void
3012peephole2_optimize (dump_file)
3013     FILE *dump_file ATTRIBUTE_UNUSED;
3014{
3015  regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3016  rtx insn, prev;
3017  regset live;
3018  int i, b;
3019#ifdef HAVE_conditional_execution
3020  sbitmap blocks;
3021  int changed;
3022#endif
3023
3024  /* Initialize the regsets we're going to use.  */
3025  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3026    peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3027  live = INITIALIZE_REG_SET (rs_heads[i]);
3028
3029#ifdef HAVE_conditional_execution
3030  blocks = sbitmap_alloc (n_basic_blocks);
3031  sbitmap_zero (blocks);
3032  changed = 0;
3033#else
3034  count_or_remove_death_notes (NULL, 1);
3035#endif
3036
3037  for (b = n_basic_blocks - 1; b >= 0; --b)
3038    {
3039      basic_block bb = BASIC_BLOCK (b);
3040      struct propagate_block_info *pbi;
3041
3042      /* Indicate that all slots except the last holds invalid data.  */
3043      for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3044	peep2_insn_data[i].insn = NULL_RTX;
3045
3046      /* Indicate that the last slot contains live_after data.  */
3047      peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3048      peep2_current = MAX_INSNS_PER_PEEP2;
3049
3050      /* Start up propagation.  */
3051      COPY_REG_SET (live, bb->global_live_at_end);
3052      COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3053
3054#ifdef HAVE_conditional_execution
3055      pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3056#else
3057      pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3058#endif
3059
3060      for (insn = bb->end; ; insn = prev)
3061	{
3062	  prev = PREV_INSN (insn);
3063	  if (INSN_P (insn))
3064	    {
3065	      rtx try;
3066	      int match_len;
3067
3068	      /* Record this insn.  */
3069	      if (--peep2_current < 0)
3070		peep2_current = MAX_INSNS_PER_PEEP2;
3071	      peep2_insn_data[peep2_current].insn = insn;
3072	      propagate_one_insn (pbi, insn);
3073	      COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3074
3075	      /* Match the peephole.  */
3076	      try = peephole2_insns (PATTERN (insn), insn, &match_len);
3077	      if (try != NULL)
3078		{
3079		  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3080		     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3081		     cfg-related call notes.  */
3082		  for (i = 0; i <= match_len; ++i)
3083		    {
3084		      int j, k;
3085		      rtx old_insn, new_insn, note;
3086
3087		      j = i + peep2_current;
3088		      if (j >= MAX_INSNS_PER_PEEP2 + 1)
3089			j -= MAX_INSNS_PER_PEEP2 + 1;
3090		      old_insn = peep2_insn_data[j].insn;
3091		      if (GET_CODE (old_insn) != CALL_INSN)
3092			continue;
3093
3094		      new_insn = NULL_RTX;
3095		      if (GET_CODE (try) == SEQUENCE)
3096			for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
3097			  {
3098			    rtx x = XVECEXP (try, 0, k);
3099			    if (GET_CODE (x) == CALL_INSN)
3100			      {
3101				new_insn = x;
3102				break;
3103			      }
3104			  }
3105		      else if (GET_CODE (try) == CALL_INSN)
3106			new_insn = try;
3107		      if (! new_insn)
3108			abort ();
3109
3110		      CALL_INSN_FUNCTION_USAGE (new_insn)
3111			= CALL_INSN_FUNCTION_USAGE (old_insn);
3112
3113		      for (note = REG_NOTES (old_insn);
3114			   note;
3115			   note = XEXP (note, 1))
3116			switch (REG_NOTE_KIND (note))
3117			  {
3118			  case REG_EH_REGION:
3119			  case REG_NORETURN:
3120			  case REG_SETJMP:
3121			  case REG_ALWAYS_RETURN:
3122			    REG_NOTES (new_insn)
3123			      = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3124						   XEXP (note, 0),
3125						   REG_NOTES (new_insn));
3126			  default:
3127			    /* Discard all other reg notes.  */
3128			    break;
3129			  }
3130
3131		      /* Croak if there is another call in the sequence.  */
3132		      while (++i <= match_len)
3133			{
3134			  j = i + peep2_current;
3135			  if (j >= MAX_INSNS_PER_PEEP2 + 1)
3136			    j -= MAX_INSNS_PER_PEEP2 + 1;
3137			  old_insn = peep2_insn_data[j].insn;
3138			  if (GET_CODE (old_insn) == CALL_INSN)
3139			    abort ();
3140			}
3141		      break;
3142		    }
3143
3144		  i = match_len + peep2_current;
3145		  if (i >= MAX_INSNS_PER_PEEP2 + 1)
3146		    i -= MAX_INSNS_PER_PEEP2 + 1;
3147
3148		  /* Replace the old sequence with the new.  */
3149		  try = emit_insn_after (try, peep2_insn_data[i].insn);
3150		  delete_insn_chain (insn, peep2_insn_data[i].insn);
3151
3152#ifdef HAVE_conditional_execution
3153		  /* With conditional execution, we cannot back up the
3154		     live information so easily, since the conditional
3155		     death data structures are not so self-contained.
3156		     So record that we've made a modification to this
3157		     block and update life information at the end.  */
3158		  SET_BIT (blocks, b);
3159		  changed = 1;
3160
3161		  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3162		    peep2_insn_data[i].insn = NULL_RTX;
3163		  peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3164#else
3165		  /* Back up lifetime information past the end of the
3166		     newly created sequence.  */
3167		  if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3168		    i = 0;
3169		  COPY_REG_SET (live, peep2_insn_data[i].live_before);
3170
3171		  /* Update life information for the new sequence.  */
3172		  do
3173		    {
3174		      if (INSN_P (try))
3175			{
3176			  if (--i < 0)
3177			    i = MAX_INSNS_PER_PEEP2;
3178			  peep2_insn_data[i].insn = try;
3179			  propagate_one_insn (pbi, try);
3180			  COPY_REG_SET (peep2_insn_data[i].live_before, live);
3181			}
3182		      try = PREV_INSN (try);
3183		    }
3184		  while (try != prev);
3185
3186		  /* ??? Should verify that LIVE now matches what we
3187		     had before the new sequence.  */
3188
3189		  peep2_current = i;
3190#endif
3191		}
3192	    }
3193
3194	  if (insn == bb->head)
3195	    break;
3196	}
3197
3198      free_propagate_block_info (pbi);
3199    }
3200
3201  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3202    FREE_REG_SET (peep2_insn_data[i].live_before);
3203  FREE_REG_SET (live);
3204
3205#ifdef HAVE_conditional_execution
3206  count_or_remove_death_notes (blocks, 1);
3207  update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3208  sbitmap_free (blocks);
3209#endif
3210}
3211#endif /* HAVE_peephole2 */
3212