resource.c revision 132718
1218893Sdim/* Definitions for computing resource usage of specific insns.
2193326Sed   Copyright (C) 1999, 2000, 2001, 2002, 2003
3193326Sed   Free Software Foundation, Inc.
4193326Sed
5193326SedThis file is part of GCC.
6193326Sed
7193326SedGCC is free software; you can redistribute it and/or modify it under
8193326Sedthe terms of the GNU General Public License as published by the Free
9193326SedSoftware Foundation; either version 2, or (at your option) any later
10193326Sedversion.
11193326Sed
12193326SedGCC is distributed in the hope that it will be useful, but WITHOUT ANY
13193326SedWARRANTY; without even the implied warranty of MERCHANTABILITY or
14208600SrdivackyFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15193326Sedfor more details.
16193326Sed
17224145SdimYou should have received a copy of the GNU General Public License
18199512Srdivackyalong with GCC; see the file COPYING.  If not, write to the Free
19193326SedSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
20199512Srdivacky02111-1307, USA.  */
21212904Sdim
22193326Sed#include "config.h"
23212904Sdim#include "system.h"
24193326Sed#include "coretypes.h"
25226633Sdim#include "tm.h"
26224145Sdim#include "toplev.h"
27198092Srdivacky#include "rtl.h"
28218893Sdim#include "tm_p.h"
29218893Sdim#include "hard-reg-set.h"
30193326Sed#include "basic-block.h"
31218893Sdim#include "function.h"
32218893Sdim#include "regs.h"
33193326Sed#include "flags.h"
34193326Sed#include "output.h"
35193326Sed#include "resource.h"
36234353Sdim#include "except.h"
37224145Sdim#include "insn-attr.h"
38193326Sed#include "params.h"
39193326Sed
40226633Sdim/* This structure is used to record liveness information at the targets or
41193326Sed   fallthrough insns of branches.  We will most likely need the information
42198092Srdivacky   at targets again, so save them in a hash table rather than recomputing them
43193326Sed   each time.  */
44234353Sdim
45234353Sdimstruct target_info
46226633Sdim{
47226633Sdim  int uid;			/* INSN_UID of target.  */
48198092Srdivacky  struct target_info *next;	/* Next info for same hash bucket.  */
49234353Sdim  HARD_REG_SET live_regs;	/* Registers live at target.  */
50234353Sdim  int block;			/* Basic block number containing target.  */
51234353Sdim  int bb_tick;			/* Generation count of basic block info.  */
52234353Sdim};
53234353Sdim
54234353Sdim#define TARGET_HASH_PRIME 257
55234353Sdim
56212904Sdim/* Indicates what resources are required at the beginning of the epilogue.  */
57234353Sdimstatic struct resources start_of_epilogue_needs;
58234353Sdim
59234353Sdim/* Indicates what resources are required at function end.  */
60234353Sdimstatic struct resources end_of_function_needs;
61234353Sdim
62198092Srdivacky/* Define the hash table itself.  */
63198092Srdivackystatic struct target_info **target_hash_table = NULL;
64212904Sdim
65212904Sdim/* For each basic block, we maintain a generation number of its basic
66212904Sdim   block info, which is updated each time we move an insn from the
67212904Sdim   target of a jump.  This is the generation number indexed by block
68212904Sdim   number.  */
69212904Sdim
70212904Sdimstatic int *bb_ticks;
71212904Sdim
72212904Sdim/* Marks registers possibly live at the current place being scanned by
73212904Sdim   mark_target_live_regs.  Also used by update_live_status.  */
74218893Sdim
75218893Sdimstatic HARD_REG_SET current_live_regs;
76218893Sdim
77218893Sdim/* Marks registers for which we have seen a REG_DEAD note but no assignment.
78224145Sdim   Also only used by the next two functions.  */
79224145Sdim
80224145Sdimstatic HARD_REG_SET pending_dead_regs;
81224145Sdim
82224145Sdimstatic void update_live_status (rtx, rtx, void *);
83224145Sdimstatic int find_basic_block (rtx, int);
84224145Sdimstatic rtx next_insn_no_annul (rtx);
85224145Sdimstatic rtx find_dead_or_set_registers (rtx, struct resources*,
86224145Sdim				       rtx*, int, struct resources,
87224145Sdim				       struct resources);
88224145Sdim
89224145Sdim/* Utility function called from mark_target_live_regs via note_stores.
90224145Sdim   It deadens any CLOBBERed registers and livens any SET registers.  */
91224145Sdim
92224145Sdimstatic void
93224145Sdimupdate_live_status (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
94224145Sdim{
95224145Sdim  int first_regno, last_regno;
96234353Sdim  int i;
97234353Sdim
98234353Sdim  if (GET_CODE (dest) != REG
99234353Sdim      && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
100224145Sdim    return;
101224145Sdim
102224145Sdim  if (GET_CODE (dest) == SUBREG)
103224145Sdim    first_regno = subreg_regno (dest);
104224145Sdim  else
105224145Sdim    first_regno = REGNO (dest);
106234353Sdim
107224145Sdim  last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
108224145Sdim
109224145Sdim  if (GET_CODE (x) == CLOBBER)
110224145Sdim    for (i = first_regno; i < last_regno; i++)
111224145Sdim      CLEAR_HARD_REG_BIT (current_live_regs, i);
112224145Sdim  else
113224145Sdim    for (i = first_regno; i < last_regno; i++)
114224145Sdim      {
115224145Sdim	SET_HARD_REG_BIT (current_live_regs, i);
116226633Sdim	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
117226633Sdim      }
118226633Sdim}
119226633Sdim
120226633Sdim/* Find the number of the basic block with correct live register
121226633Sdim   information that starts closest to INSN.  Return -1 if we couldn't
122226633Sdim   find such a basic block or the beginning is more than
123202879Srdivacky   SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
124226633Sdim   an unlimited search.
125226633Sdim
126226633Sdim   The delay slot filling code destroys the control-flow graph so,
127226633Sdim   instead of finding the basic block containing INSN, we search
128226633Sdim   backwards toward a BARRIER where the live register information is
129226633Sdim   correct.  */
130226633Sdim
131226633Sdimstatic int
132226633Sdimfind_basic_block (rtx insn, int search_limit)
133226633Sdim{
134226633Sdim  basic_block bb;
135202879Srdivacky
136202879Srdivacky  /* Scan backwards to the previous BARRIER.  Then see if we can find a
137226633Sdim     label that starts a basic block.  Return the basic block number.  */
138226633Sdim  for (insn = prev_nonnote_insn (insn);
139226633Sdim       insn && GET_CODE (insn) != BARRIER && search_limit != 0;
140226633Sdim       insn = prev_nonnote_insn (insn), --search_limit)
141226633Sdim    ;
142226633Sdim
143226633Sdim  /* The closest BARRIER is too far away.  */
144226633Sdim  if (search_limit == 0)
145226633Sdim    return -1;
146226633Sdim
147202879Srdivacky  /* The start of the function.  */
148202879Srdivacky  else if (insn == 0)
149226633Sdim    return ENTRY_BLOCK_PTR->next_bb->index;
150202879Srdivacky
151202879Srdivacky  /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
152202879Srdivacky     anything other than a CODE_LABEL or note, we can't find this code.  */
153223017Sdim  for (insn = next_nonnote_insn (insn);
154221345Sdim       insn && GET_CODE (insn) == CODE_LABEL;
155202879Srdivacky       insn = next_nonnote_insn (insn))
156202879Srdivacky    {
157202879Srdivacky      FOR_EACH_BB (bb)
158202879Srdivacky	if (insn == BB_HEAD (bb))
159202879Srdivacky	  return bb->index;
160202879Srdivacky    }
161202879Srdivacky
162202879Srdivacky  return -1;
163202879Srdivacky}
164202879Srdivacky
165202879Srdivacky/* Similar to next_insn, but ignores insns in the delay slots of
166202879Srdivacky   an annulled branch.  */
167202879Srdivacky
168202879Srdivackystatic rtx
169198092Srdivackynext_insn_no_annul (rtx insn)
170193326Sed{
171193326Sed  if (insn)
172193326Sed    {
173193326Sed      /* If INSN is an annulled branch, skip any insns from the target
174193326Sed	 of the branch.  */
175193326Sed      if ((GET_CODE (insn) == JUMP_INSN
176226633Sdim	   || GET_CODE (insn) == CALL_INSN
177226633Sdim	   || GET_CODE (insn) == INSN)
178226633Sdim	  && INSN_ANNULLED_BRANCH_P (insn)
179212904Sdim	  && NEXT_INSN (PREV_INSN (insn)) != insn)
180212904Sdim	{
181212904Sdim	  rtx next = NEXT_INSN (insn);
182212904Sdim	  enum rtx_code code = GET_CODE (next);
183212904Sdim
184223017Sdim	  while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
185234353Sdim		 && INSN_FROM_TARGET_P (next))
186234353Sdim	    {
187234353Sdim	      insn = next;
188234353Sdim	      next = NEXT_INSN (insn);
189223017Sdim	      code = GET_CODE (next);
190212904Sdim	    }
191212904Sdim	}
192212904Sdim
193234353Sdim      insn = NEXT_INSN (insn);
194234353Sdim      if (insn && GET_CODE (insn) == INSN
195221345Sdim	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
196221345Sdim	insn = XVECEXP (PATTERN (insn), 0, 0);
197193326Sed    }
198221345Sdim
199221345Sdim  return insn;
200221345Sdim}
201221345Sdim
202221345Sdim/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
203226633Sdim   which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
204221345Sdim   is TRUE, resources used by the called routine will be included for
205226633Sdim   CALL_INSNs.  */
206226633Sdim
207221345Sdimvoid
208221345Sdimmark_referenced_resources (rtx x, struct resources *res,
209221345Sdim			   int include_delayed_effects)
210221345Sdim{
211193326Sed  enum rtx_code code = GET_CODE (x);
212193326Sed  int i, j;
213208600Srdivacky  unsigned int r;
214208600Srdivacky  const char *format_ptr;
215234353Sdim
216208600Srdivacky  /* Handle leaf items for which we set resource flags.  Also, special-case
217193326Sed     CALL, SET and CLOBBER operators.  */
218193326Sed  switch (code)
219193326Sed    {
220193326Sed    case CONST:
221193326Sed    case CONST_INT:
222226633Sdim    case CONST_DOUBLE:
223193326Sed    case CONST_VECTOR:
224193326Sed    case PC:
225193326Sed    case SYMBOL_REF:
226234353Sdim    case LABEL_REF:
227193326Sed      return;
228193326Sed
229193326Sed    case SUBREG:
230193326Sed      if (GET_CODE (SUBREG_REG (x)) != REG)
231208600Srdivacky	mark_referenced_resources (SUBREG_REG (x), res, 0);
232208600Srdivacky      else
233208600Srdivacky	{
234208600Srdivacky	  unsigned int regno = subreg_regno (x);
235208600Srdivacky	  unsigned int last_regno
236208600Srdivacky	    = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
237208600Srdivacky
238193326Sed	  if (last_regno > FIRST_PSEUDO_REGISTER)
239198092Srdivacky	    abort ();
240193326Sed	  for (r = regno; r < last_regno; r++)
241193326Sed	    SET_HARD_REG_BIT (res->regs, r);
242210299Sed	}
243210299Sed      return;
244226633Sdim
245226633Sdim    case REG:
246193326Sed	{
247193326Sed	  unsigned int regno = REGNO (x);
248193326Sed	  unsigned int last_regno
249193326Sed	    = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
250193326Sed
251193326Sed	  if (last_regno > FIRST_PSEUDO_REGISTER)
252207619Srdivacky	    abort ();
253234353Sdim	  for (r = regno; r < last_regno; r++)
254234353Sdim	    SET_HARD_REG_BIT (res->regs, r);
255198092Srdivacky	}
256218893Sdim      return;
257218893Sdim
258218893Sdim    case MEM:
259218893Sdim      /* If this memory shouldn't change, it really isn't referencing
260198092Srdivacky	 memory.  */
261212904Sdim      if (RTX_UNCHANGING_P (x))
262212904Sdim	res->unch_memory = 1;
263212904Sdim      else
264218893Sdim	res->memory = 1;
265218893Sdim      res->volatil |= MEM_VOLATILE_P (x);
266221345Sdim
267221345Sdim      /* Mark registers used to access memory.  */
268226633Sdim      mark_referenced_resources (XEXP (x, 0), res, 0);
269226633Sdim      return;
270226633Sdim
271226633Sdim    case CC0:
272226633Sdim      res->cc = 1;
273226633Sdim      return;
274226633Sdim
275226633Sdim    case UNSPEC_VOLATILE:
276218893Sdim    case ASM_INPUT:
277226633Sdim      /* Traditional asm's are always volatile.  */
278218893Sdim      res->volatil = 1;
279218893Sdim      return;
280218893Sdim
281226633Sdim    case TRAP_IF:
282218893Sdim      res->volatil = 1;
283218893Sdim      break;
284218893Sdim
285226633Sdim    case ASM_OPERANDS:
286218893Sdim      res->volatil |= MEM_VOLATILE_P (x);
287218893Sdim
288198092Srdivacky      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
289198092Srdivacky	 We can not just fall through here since then we would be confused
290198092Srdivacky	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
291198092Srdivacky	 traditional asms unlike their normal usage.  */
292198092Srdivacky
293210299Sed      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
294210299Sed	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
295210299Sed      return;
296210299Sed
297234353Sdim    case CALL:
298234353Sdim      /* The first operand will be a (MEM (xxx)) but doesn't really reference
299210299Sed	 memory.  The second operand may be referenced, though.  */
300210299Sed      mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
301234353Sdim      mark_referenced_resources (XEXP (x, 1), res, 0);
302210299Sed      return;
303210299Sed
304210299Sed    case SET:
305210299Sed      /* Usually, the first operand of SET is set, not referenced.  But
306226633Sdim	 registers used to access memory are referenced.  SET_DEST is
307210299Sed	 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT.  */
308210299Sed
309210299Sed      mark_referenced_resources (SET_SRC (x), res, 0);
310210299Sed
311210299Sed      x = SET_DEST (x);
312210299Sed      if (GET_CODE (x) == SIGN_EXTRACT
313210299Sed	  || GET_CODE (x) == ZERO_EXTRACT
314210299Sed	  || GET_CODE (x) == STRICT_LOW_PART)
315210299Sed	mark_referenced_resources (x, res, 0);
316210299Sed      else if (GET_CODE (x) == SUBREG)
317210299Sed	x = SUBREG_REG (x);
318210299Sed      if (GET_CODE (x) == MEM)
319210299Sed	mark_referenced_resources (XEXP (x, 0), res, 0);
320210299Sed      return;
321212904Sdim
322212904Sdim    case CLOBBER:
323212904Sdim      return;
324212904Sdim
325212904Sdim    case CALL_INSN:
326212904Sdim      if (include_delayed_effects)
327212904Sdim	{
328212904Sdim	  /* A CALL references memory, the frame pointer if it exists, the
329212904Sdim	     stack pointer, any global registers and any registers given in
330226633Sdim	     USE insns immediately in front of the CALL.
331212904Sdim
332212904Sdim	     However, we may have moved some of the parameter loading insns
333212904Sdim	     into the delay slot of this CALL.  If so, the USE's for them
334212904Sdim	     don't count and should be skipped.  */
335212904Sdim	  rtx insn = PREV_INSN (x);
336212904Sdim	  rtx sequence = 0;
337212904Sdim	  int seq_size = 0;
338212904Sdim	  int i;
339212904Sdim
340212904Sdim	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
341212904Sdim	  if (NEXT_INSN (insn) != x)
342212904Sdim	    {
343212904Sdim	      sequence = PATTERN (NEXT_INSN (insn));
344212904Sdim	      seq_size = XVECLEN (sequence, 0);
345212904Sdim	      if (GET_CODE (sequence) != SEQUENCE)
346212904Sdim		abort ();
347212904Sdim	    }
348212904Sdim
349218893Sdim	  res->memory = 1;
350218893Sdim	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
351212904Sdim	  if (frame_pointer_needed)
352212904Sdim	    {
353212904Sdim	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
354212904Sdim#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
355218893Sdim	      SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
356218893Sdim#endif
357210299Sed	    }
358198092Srdivacky
359198092Srdivacky	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
360224145Sdim	    if (global_regs[i])
361224145Sdim	      SET_HARD_REG_BIT (res->regs, i);
362226633Sdim
363226633Sdim	  /* Check for a REG_SETJMP.  If it exists, then we must
364224145Sdim	     assume that this call can need any register.
365224145Sdim
366224145Sdim	     This is done to be more conservative about how we handle setjmp.
367224145Sdim	     We assume that they both use and set all registers.  Using all
368224145Sdim	     registers ensures that a register will not be considered dead
369224145Sdim	     just because it crosses a setjmp call.  A register should be
370224145Sdim	     considered dead only if the setjmp call returns nonzero.  */
371224145Sdim	  if (find_reg_note (x, REG_SETJMP, NULL))
372234353Sdim	    SET_HARD_REG_SET (res->regs);
373234353Sdim
374234353Sdim	  {
375224145Sdim	    rtx link;
376234353Sdim
377224145Sdim	    for (link = CALL_INSN_FUNCTION_USAGE (x);
378224145Sdim		 link;
379224145Sdim		 link = XEXP (link, 1))
380224145Sdim	      if (GET_CODE (XEXP (link, 0)) == USE)
381224145Sdim		{
382224145Sdim		  for (i = 1; i < seq_size; i++)
383224145Sdim		    {
384224145Sdim		      rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
385224145Sdim		      if (GET_CODE (slot_pat) == SET
386224145Sdim			  && rtx_equal_p (SET_DEST (slot_pat),
387226633Sdim					  XEXP (XEXP (link, 0), 0)))
388224145Sdim			break;
389224145Sdim		    }
390224145Sdim		  if (i >= seq_size)
391224145Sdim		    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
392224145Sdim					       res, 0);
393226633Sdim		}
394224145Sdim	  }
395224145Sdim	}
396224145Sdim
397224145Sdim      /* ... fall through to other INSN processing ...  */
398224145Sdim
399224145Sdim    case INSN:
400224145Sdim    case JUMP_INSN:
401198092Srdivacky
402198092Srdivacky#ifdef INSN_REFERENCES_ARE_DELAYED
403234353Sdim      if (! include_delayed_effects
404234353Sdim	  && INSN_REFERENCES_ARE_DELAYED (x))
405234353Sdim	return;
406234353Sdim#endif
407234353Sdim
408234353Sdim      /* No special processing, just speed up.  */
409234353Sdim      mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
410234353Sdim      return;
411234353Sdim
412234353Sdim    default:
413202879Srdivacky      break;
414202879Srdivacky    }
415202879Srdivacky
416198092Srdivacky  /* Process each sub-expression and flag what it needs.  */
417198092Srdivacky  format_ptr = GET_RTX_FORMAT (code);
418198092Srdivacky  for (i = 0; i < GET_RTX_LENGTH (code); i++)
419198092Srdivacky    switch (*format_ptr++)
420198092Srdivacky      {
421198092Srdivacky      case 'e':
422226633Sdim	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
423198092Srdivacky	break;
424198092Srdivacky
425198092Srdivacky      case 'E':
426198092Srdivacky	for (j = 0; j < XVECLEN (x, i); j++)
427234353Sdim	  mark_referenced_resources (XVECEXP (x, i, j), res,
428234353Sdim				     include_delayed_effects);
429234353Sdim	break;
430234353Sdim      }
431234353Sdim}
432234353Sdim
433234353Sdim/* A subroutine of mark_target_live_regs.  Search forward from TARGET
434234353Sdim   looking for registers that are set before they are used.  These are dead.
435234353Sdim   Stop after passing a few conditional jumps, and/or a small
436234353Sdim   number of unconditional branches.  */
437234353Sdim
438234353Sdimstatic rtx
439234353Sdimfind_dead_or_set_registers (rtx target, struct resources *res,
440234353Sdim			    rtx *jump_target, int jump_count,
441234353Sdim			    struct resources set, struct resources needed)
442234353Sdim{
443234353Sdim  HARD_REG_SET scratch;
444234353Sdim  rtx insn, next;
445234353Sdim  rtx jump_insn = 0;
446234353Sdim  int i;
447234353Sdim
448234353Sdim  for (insn = target; insn; insn = next)
449234353Sdim    {
450234353Sdim      rtx this_jump_insn = insn;
451234353Sdim
452234353Sdim      next = NEXT_INSN (insn);
453234353Sdim
454234353Sdim      /* If this instruction can throw an exception, then we don't
455234353Sdim	 know where we might end up next.  That means that we have to
456234353Sdim	 assume that whatever we have already marked as live really is
457234353Sdim	 live.  */
458234353Sdim      if (can_throw_internal (insn))
459202879Srdivacky	break;
460202879Srdivacky
461198092Srdivacky      switch (GET_CODE (insn))
462202879Srdivacky	{
463202879Srdivacky	case CODE_LABEL:
464203955Srdivacky	  /* After a label, any pending dead registers that weren't yet
465221345Sdim	     used can be made dead.  */
466221345Sdim	  AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
467226633Sdim	  AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
468226633Sdim	  CLEAR_HARD_REG_SET (pending_dead_regs);
469226633Sdim
470202879Srdivacky	  continue;
471221345Sdim
472224145Sdim	case BARRIER:
473202879Srdivacky	case NOTE:
474202879Srdivacky	  continue;
475202879Srdivacky
476203955Srdivacky	case INSN:
477202879Srdivacky	  if (GET_CODE (PATTERN (insn)) == USE)
478203955Srdivacky	    {
479202879Srdivacky	      /* If INSN is a USE made by update_block, we care about the
480202879Srdivacky		 underlying insn.  Any registers set by the underlying insn
481218893Sdim		 are live since the insn is being done somewhere else.  */
482221345Sdim	      if (INSN_P (XEXP (PATTERN (insn), 0)))
483218893Sdim		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
484218893Sdim				    MARK_SRC_DEST_CALL);
485218893Sdim
486218893Sdim	      /* All other USE insns are to be ignored.  */
487218893Sdim	      continue;
488218893Sdim	    }
489218893Sdim	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
490224145Sdim	    continue;
491218893Sdim	  else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
492218893Sdim	    {
493224145Sdim	      /* An unconditional jump can be used to fill the delay slot
494224145Sdim		 of a call, so search for a JUMP_INSN in any position.  */
495218893Sdim	      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
496202879Srdivacky		{
497224145Sdim		  this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
498202879Srdivacky		  if (GET_CODE (this_jump_insn) == JUMP_INSN)
499226633Sdim		    break;
500224145Sdim		}
501224145Sdim	    }
502202879Srdivacky
503224145Sdim	default:
504224145Sdim	  break;
505224145Sdim	}
506224145Sdim
507224145Sdim      if (GET_CODE (this_jump_insn) == JUMP_INSN)
508226633Sdim	{
509224145Sdim	  if (jump_count++ < 10)
510224145Sdim	    {
511224145Sdim	      if (any_uncondjump_p (this_jump_insn)
512226633Sdim		  || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
513224145Sdim		{
514224145Sdim		  next = JUMP_LABEL (this_jump_insn);
515224145Sdim		  if (jump_insn == 0)
516224145Sdim		    {
517224145Sdim		      jump_insn = insn;
518224145Sdim		      if (jump_target)
519224145Sdim			*jump_target = JUMP_LABEL (this_jump_insn);
520224145Sdim		    }
521224145Sdim		}
522224145Sdim	      else if (any_condjump_p (this_jump_insn))
523224145Sdim		{
524224145Sdim		  struct resources target_set, target_res;
525224145Sdim		  struct resources fallthrough_res;
526198092Srdivacky
527198092Srdivacky		  /* We can handle conditional branches here by following
528212904Sdim		     both paths, and then IOR the results of the two paths
529201361Srdivacky		     together, which will give us registers that are dead
530193326Sed		     on both paths.  Since this is expensive, we give it
531203955Srdivacky		     a much higher cost than unconditional branches.  The
532221345Sdim		     cost was chosen so that we will follow at most 1
533221345Sdim		     conditional branch.  */
534221345Sdim
535224145Sdim		  jump_count += 4;
536224145Sdim		  if (jump_count >= 10)
537224145Sdim		    break;
538224145Sdim
539224145Sdim		  mark_referenced_resources (insn, &needed, 1);
540234353Sdim
541224145Sdim		  /* For an annulled branch, mark_set_resources ignores slots
542224145Sdim		     filled by instructions from the target.  This is correct
543226633Sdim		     if the branch is not taken.  Since we are following both
544224145Sdim		     paths from the branch, we must also compute correct info
545226633Sdim		     if the branch is taken.  We do this by inverting all of
546224145Sdim		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
547224145Sdim		     and then inverting the INSN_FROM_TARGET_P bits again.  */
548224145Sdim
549224145Sdim		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
550226633Sdim		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
551226633Sdim		    {
552224145Sdim		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
553224145Sdim			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
554224145Sdim			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
555224145Sdim
556224145Sdim		      target_set = set;
557224145Sdim		      mark_set_resources (insn, &target_set, 0,
558221345Sdim					  MARK_SRC_DEST_CALL);
559226633Sdim
560193326Sed		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
561221345Sdim			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
562221345Sdim			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
563221345Sdim
564226633Sdim		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
565221345Sdim		    }
566221345Sdim		  else
567221345Sdim		    {
568221345Sdim		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
569226633Sdim		      target_set = set;
570203955Srdivacky		    }
571226633Sdim
572226633Sdim		  target_res = *res;
573226633Sdim		  COPY_HARD_REG_SET (scratch, target_set.regs);
574226633Sdim		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
575226633Sdim		  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
576226633Sdim
577226633Sdim		  fallthrough_res = *res;
578226633Sdim		  COPY_HARD_REG_SET (scratch, set.regs);
579226633Sdim		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
580203955Srdivacky		  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
581226633Sdim
582226633Sdim		  find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
583234982Sdim					      &target_res, 0, jump_count,
584226633Sdim					      target_set, needed);
585226633Sdim		  find_dead_or_set_registers (next,
586226633Sdim					      &fallthrough_res, 0, jump_count,
587226633Sdim					      set, needed);
588226633Sdim		  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
589226633Sdim		  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
590226633Sdim		  break;
591226633Sdim		}
592226633Sdim	      else
593203955Srdivacky		break;
594226633Sdim	    }
595226633Sdim	  else
596226633Sdim	    {
597226633Sdim	      /* Don't try this optimization if we expired our jump count
598226633Sdim		 above, since that would mean there may be an infinite loop
599226633Sdim		 in the function being compiled.  */
600221345Sdim	      jump_insn = 0;
601193326Sed	      break;
602203955Srdivacky	    }
603221345Sdim	}
604221345Sdim
605226633Sdim      mark_referenced_resources (insn, &needed, 1);
606226633Sdim      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
607221345Sdim
608226633Sdim      COPY_HARD_REG_SET (scratch, set.regs);
609221345Sdim      AND_COMPL_HARD_REG_SET (scratch, needed.regs);
610221345Sdim      AND_COMPL_HARD_REG_SET (res->regs, scratch);
611221345Sdim    }
612221345Sdim
613221345Sdim  return jump_insn;
614226633Sdim}
615203955Srdivacky
616203955Srdivacky/* Given X, a part of an insn, and a pointer to a `struct resource',
617226633Sdim   RES, indicate which resources are modified by the insn. If
618203955Srdivacky   MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
619226633Sdim   set by the called routine.
620203955Srdivacky
621193326Sed   If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
622226633Sdim   objects are being referenced instead of set.
623198092Srdivacky
624212904Sdim   We never mark the insn as modifying the condition code unless it explicitly
625212904Sdim   SETs CC0 even though this is not totally correct.  The reason for this is
626226633Sdim   that we require a SET of CC0 to immediately precede the reference to CC0.
627203955Srdivacky   So if some other insn sets CC0 as a side-effect, we know it cannot affect
628221345Sdim   our computation and thus may be placed in a delay slot.  */
629221345Sdim
630226633Sdimvoid
631221345Sdimmark_set_resources (rtx x, struct resources *res, int in_dest,
632221345Sdim		    enum mark_resource_type mark_type)
633221345Sdim{
634221345Sdim  enum rtx_code code;
635198092Srdivacky  int i, j;
636210299Sed  unsigned int r;
637210299Sed  const char *format_ptr;
638212904Sdim
639212904Sdim restart:
640198092Srdivacky
641193326Sed  code = GET_CODE (x);
642198092Srdivacky
643221345Sdim  switch (code)
644221345Sdim    {
645221345Sdim    case NOTE:
646226633Sdim    case BARRIER:
647221345Sdim    case CODE_LABEL:
648221345Sdim    case USE:
649221345Sdim    case CONST_INT:
650203955Srdivacky    case CONST_DOUBLE:
651203955Srdivacky    case CONST_VECTOR:
652203955Srdivacky    case LABEL_REF:
653203955Srdivacky    case SYMBOL_REF:
654221345Sdim    case CONST:
655203955Srdivacky    case PC:
656203955Srdivacky      /* These don't set any resources.  */
657221345Sdim      return;
658226633Sdim
659203955Srdivacky    case CC0:
660203955Srdivacky      if (in_dest)
661221345Sdim	res->cc = 1;
662221345Sdim      return;
663221345Sdim
664203955Srdivacky    case CALL_INSN:
665203955Srdivacky      /* Called routine modifies the condition code, memory, any registers
666226633Sdim	 that aren't saved across calls, global registers and anything
667221345Sdim	 explicitly CLOBBERed immediately after the CALL_INSN.  */
668203955Srdivacky
669221345Sdim      if (mark_type == MARK_SRC_DEST_CALL)
670221345Sdim	{
671221345Sdim	  rtx link;
672221345Sdim
673221345Sdim	  res->cc = res->memory = 1;
674221345Sdim	  for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
675221345Sdim	    if (call_used_regs[r] || global_regs[r])
676221345Sdim	      SET_HARD_REG_BIT (res->regs, r);
677221345Sdim
678221345Sdim	  for (link = CALL_INSN_FUNCTION_USAGE (x);
679221345Sdim	       link; link = XEXP (link, 1))
680221345Sdim	    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
681212904Sdim	      mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
682203955Srdivacky				  MARK_SRC_DEST);
683218893Sdim
684218893Sdim	  /* Check for a REG_SETJMP.  If it exists, then we must
685218893Sdim	     assume that this call can clobber any register.  */
686218893Sdim	  if (find_reg_note (x, REG_SETJMP, NULL))
687218893Sdim	    SET_HARD_REG_SET (res->regs);
688218893Sdim	}
689218893Sdim
690218893Sdim      /* ... and also what its RTL says it modifies, if anything.  */
691218893Sdim
692218893Sdim    case JUMP_INSN:
693218893Sdim    case INSN:
694218893Sdim
695218893Sdim	/* An insn consisting of just a CLOBBER (or USE) is just for flow
696218893Sdim	   and doesn't actually do anything, so we ignore it.  */
697218893Sdim
698218893Sdim#ifdef INSN_SETS_ARE_DELAYED
699218893Sdim      if (mark_type != MARK_SRC_DEST_CALL
700218893Sdim	  && INSN_SETS_ARE_DELAYED (x))
701218893Sdim	return;
702218893Sdim#endif
703218893Sdim
704218893Sdim      x = PATTERN (x);
705218893Sdim      if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
706218893Sdim	goto restart;
707218893Sdim      return;
708218893Sdim
709218893Sdim    case SET:
710218893Sdim      /* If the source of a SET is a CALL, this is actually done by
711218893Sdim	 the called routine.  So only include it if we are to include the
712218893Sdim	 effects of the calling routine.  */
713218893Sdim
714218893Sdim      mark_set_resources (SET_DEST (x), res,
715218893Sdim			  (mark_type == MARK_SRC_DEST_CALL
716218893Sdim			   || GET_CODE (SET_SRC (x)) != CALL),
717234353Sdim			  mark_type);
718234353Sdim
719218893Sdim      mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
720218893Sdim      return;
721218893Sdim
722218893Sdim    case CLOBBER:
723218893Sdim      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
724218893Sdim      return;
725218893Sdim
726218893Sdim    case SEQUENCE:
727218893Sdim      for (i = 0; i < XVECLEN (x, 0); i++)
728218893Sdim	if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
729218893Sdim	       && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
730218893Sdim	  mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
731218893Sdim      return;
732218893Sdim
733218893Sdim    case POST_INC:
734218893Sdim    case PRE_INC:
735218893Sdim    case POST_DEC:
736218893Sdim    case PRE_DEC:
737218893Sdim      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
738218893Sdim      return;
739218893Sdim
740218893Sdim    case PRE_MODIFY:
741218893Sdim    case POST_MODIFY:
742218893Sdim      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
743223017Sdim      mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
744218893Sdim      mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
745218893Sdim      return;
746218893Sdim
747218893Sdim    case SIGN_EXTRACT:
748218893Sdim    case ZERO_EXTRACT:
749218893Sdim      mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
750218893Sdim      mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
751212904Sdim      mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
752212904Sdim      return;
753212904Sdim
754212904Sdim    case MEM:
755212904Sdim      if (in_dest)
756212904Sdim	{
757212904Sdim	  res->memory = 1;
758212904Sdim	  res->unch_memory |= RTX_UNCHANGING_P (x);
759212904Sdim	  res->volatil |= MEM_VOLATILE_P (x);
760212904Sdim	}
761212904Sdim
762212904Sdim      mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
763210299Sed      return;
764210299Sed
765193326Sed    case SUBREG:
766193326Sed      if (in_dest)
767193326Sed	{
768224145Sdim	  if (GET_CODE (SUBREG_REG (x)) != REG)
769224145Sdim	    mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
770224145Sdim	  else
771193326Sed	    {
772226633Sdim	      unsigned int regno = subreg_regno (x);
773224145Sdim	      unsigned int last_regno
774224145Sdim		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
775193326Sed
776193326Sed	      if (last_regno > FIRST_PSEUDO_REGISTER)
777218893Sdim		abort ();
778210299Sed	      for (r = regno; r < last_regno; r++)
779210299Sed		SET_HARD_REG_BIT (res->regs, r);
780193326Sed	    }
781198092Srdivacky	}
782193326Sed      return;
783193326Sed
784193326Sed    case REG:
785193326Sed      if (in_dest)
786193326Sed	{
787193326Sed	  unsigned int regno = REGNO (x);
788193326Sed	  unsigned int last_regno
789193326Sed	    = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
790221345Sdim
791226633Sdim	  if (last_regno > FIRST_PSEUDO_REGISTER)
792193326Sed	    abort ();
793193326Sed	  for (r = regno; r < last_regno; r++)
794221345Sdim	    SET_HARD_REG_BIT (res->regs, r);
795226633Sdim	}
796221345Sdim      return;
797221345Sdim
798193326Sed    case UNSPEC_VOLATILE:
799193326Sed    case ASM_INPUT:
800193326Sed      /* Traditional asm's are always volatile.  */
801193326Sed      res->volatil = 1;
802210299Sed      return;
803210299Sed
804218893Sdim    case TRAP_IF:
805218893Sdim      res->volatil = 1;
806218893Sdim      break;
807218893Sdim
808218893Sdim    case ASM_OPERANDS:
809218893Sdim      res->volatil |= MEM_VOLATILE_P (x);
810218893Sdim
811218893Sdim      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
812218893Sdim	 We can not just fall through here since then we would be confused
813218893Sdim	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
814223017Sdim	 traditional asms unlike their normal usage.  */
815218893Sdim
816218893Sdim      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
817218893Sdim	mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
818198092Srdivacky			    MARK_SRC_DEST);
819193326Sed      return;
820193326Sed
821193326Sed    default:
822193326Sed      break;
823199512Srdivacky    }
824193326Sed
825193326Sed  /* Process each sub-expression and flag what it needs.  */
826193326Sed  format_ptr = GET_RTX_FORMAT (code);
827193326Sed  for (i = 0; i < GET_RTX_LENGTH (code); i++)
828193326Sed    switch (*format_ptr++)
829193326Sed      {
830193326Sed      case 'e':
831210299Sed	mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
832193326Sed	break;
833198092Srdivacky
834193326Sed      case 'E':
835210299Sed	for (j = 0; j < XVECLEN (x, i); j++)
836210299Sed	  mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
837193326Sed	break;
838193326Sed      }
839193326Sed}
840210299Sed
841210299Sed/* Set the resources that are live at TARGET.
842210299Sed
843193326Sed   If TARGET is zero, we refer to the end of the current function and can
844193326Sed   return our precomputed value.
845193326Sed
846210299Sed   Otherwise, we try to find out what is live by consulting the basic block
847210299Sed   information.  This is tricky, because we must consider the actions of
848210299Sed   reload and jump optimization, which occur after the basic block information
849193326Sed   has been computed.
850193326Sed
851193326Sed   Accordingly, we proceed as follows::
852210299Sed
853193326Sed   We find the previous BARRIER and look at all immediately following labels
854193326Sed   (with no intervening active insns) to see if any of them start a basic
855193326Sed   block.  If we hit the start of the function first, we use block 0.
856210299Sed
857193326Sed   Once we have found a basic block and a corresponding first insns, we can
858193326Sed   accurately compute the live status from basic_block_live_regs and
859193326Sed   reg_renumber.  (By starting at a label following a BARRIER, we are immune
860210299Sed   to actions taken by reload and jump.)  Then we scan all insns between
861193326Sed   that point and our target.  For each CLOBBER (or for call-clobbered regs
862193326Sed   when we pass a CALL_INSN), mark the appropriate registers are dead.  For
863193326Sed   a SET, mark them as live.
864210299Sed
865210299Sed   We have to be careful when using REG_DEAD notes because they are not
866193326Sed   updated by such things as find_equiv_reg.  So keep track of registers
867193326Sed   marked as dead that haven't been assigned to, and mark them dead at the
868193326Sed   next CODE_LABEL since reload and jump won't propagate values across labels.
869210299Sed
870210299Sed   If we cannot find the start of a basic block (should be a very rare
871193326Sed   case, if it can happen at all), mark everything as potentially live.
872193326Sed
873193326Sed   Next, scan forward from TARGET looking for things set or clobbered
874210299Sed   before they are used.  These are not live.
875193326Sed
876193326Sed   Because we can be called many times on the same target, save our results
877193326Sed   in a hash table indexed by INSN_UID.  This is only done if the function
878210299Sed   init_resource_info () was invoked before we are called.  */
879193326Sed
880193326Sedvoid
881193326Sedmark_target_live_regs (rtx insns, rtx target, struct resources *res)
882193326Sed{
883198092Srdivacky  int b = -1;
884198092Srdivacky  unsigned int i;
885199512Srdivacky  struct target_info *tinfo = NULL;
886210299Sed  rtx insn;
887198092Srdivacky  rtx jump_insn = 0;
888198092Srdivacky  rtx jump_target;
889198092Srdivacky  HARD_REG_SET scratch;
890198092Srdivacky  struct resources set, needed;
891226633Sdim
892198092Srdivacky  /* Handle end of function.  */
893198092Srdivacky  if (target == 0)
894198092Srdivacky    {
895198092Srdivacky      *res = end_of_function_needs;
896198092Srdivacky      return;
897198092Srdivacky    }
898198092Srdivacky
899198092Srdivacky  /* We have to assume memory is needed, but the CC isn't.  */
900210299Sed  res->memory = 1;
901198092Srdivacky  res->volatil = res->unch_memory = 0;
902210299Sed  res->cc = 0;
903198092Srdivacky
904210299Sed  /* See if we have computed this value already.  */
905198092Srdivacky  if (target_hash_table != NULL)
906210299Sed    {
907198092Srdivacky      for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
908210299Sed	   tinfo; tinfo = tinfo->next)
909198092Srdivacky	if (tinfo->uid == INSN_UID (target))
910210299Sed	  break;
911198092Srdivacky
912210299Sed      /* Start by getting the basic block number.  If we have saved
913198092Srdivacky	 information, we can get it from there unless the insn at the
914210299Sed	 start of the basic block has been deleted.  */
915198092Srdivacky      if (tinfo && tinfo->block != -1
916198092Srdivacky	  && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
917210299Sed	b = tinfo->block;
918193326Sed    }
919198092Srdivacky
920198092Srdivacky  if (b == -1)
921198092Srdivacky    b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
922210299Sed
923198092Srdivacky  if (target_hash_table != NULL)
924210299Sed    {
925198092Srdivacky      if (tinfo)
926210299Sed	{
927198092Srdivacky	  /* If the information is up-to-date, use it.  Otherwise, we will
928210299Sed	     update it below.  */
929198092Srdivacky	  if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
930210299Sed	    {
931198092Srdivacky	      COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
932210299Sed	      return;
933198092Srdivacky	    }
934210299Sed	}
935193326Sed      else
936198092Srdivacky	{
937210299Sed	  /* Allocate a place to put our results and chain it into the
938198092Srdivacky	     hash table.  */
939198092Srdivacky	  tinfo = xmalloc (sizeof (struct target_info));
940210299Sed	  tinfo->uid = INSN_UID (target);
941198092Srdivacky	  tinfo->block = b;
942210299Sed	  tinfo->next
943198092Srdivacky	    = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
944210299Sed	  target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
945198092Srdivacky	}
946210299Sed    }
947198092Srdivacky
948210299Sed  CLEAR_HARD_REG_SET (pending_dead_regs);
949198092Srdivacky
950210299Sed  /* If we found a basic block, get the live registers from it and update
951198092Srdivacky     them with anything set or killed between its start and the insn before
952198092Srdivacky     TARGET.  Otherwise, we must assume everything is live.  */
953200583Srdivacky  if (b != -1)
954198092Srdivacky    {
955198092Srdivacky      regset regs_live = BASIC_BLOCK (b)->global_live_at_start;
956212904Sdim      unsigned int j;
957212904Sdim      unsigned int regno;
958212904Sdim      rtx start_insn, stop_insn;
959212904Sdim
960212904Sdim      /* Compute hard regs live at start of block -- this is the real hard regs
961226633Sdim	 marked live, plus live pseudo regs that have been renumbered to
962226633Sdim	 hard regs.  */
963226633Sdim
964226633Sdim      REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
965226633Sdim
966226633Sdim      EXECUTE_IF_SET_IN_REG_SET
967226633Sdim	(regs_live, FIRST_PSEUDO_REGISTER, i,
968226633Sdim	 {
969226633Sdim	   if (reg_renumber[i] >= 0)
970226633Sdim	     {
971226633Sdim	       regno = reg_renumber[i];
972226633Sdim	       for (j = regno;
973226633Sdim		    j < regno + HARD_REGNO_NREGS (regno,
974226633Sdim						  PSEUDO_REGNO_MODE (i));
975226633Sdim		    j++)
976226633Sdim		 SET_HARD_REG_BIT (current_live_regs, j);
977226633Sdim	     }
978226633Sdim	 });
979226633Sdim
980226633Sdim      /* Get starting and ending insn, handling the case where each might
981226633Sdim	 be a SEQUENCE.  */
982226633Sdim      start_insn = (b == 0 ? insns : BB_HEAD (BASIC_BLOCK (b)));
983193326Sed      stop_insn = target;
984198092Srdivacky
985193326Sed      if (GET_CODE (start_insn) == INSN
986198092Srdivacky	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
987193326Sed	start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
988193326Sed
989193326Sed      if (GET_CODE (stop_insn) == INSN
990193326Sed	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
991193326Sed	stop_insn = next_insn (PREV_INSN (stop_insn));
992201361Srdivacky
993201361Srdivacky      for (insn = start_insn; insn != stop_insn;
994201361Srdivacky	   insn = next_insn_no_annul (insn))
995201361Srdivacky	{
996201361Srdivacky	  rtx link;
997201361Srdivacky	  rtx real_insn = insn;
998203955Srdivacky	  enum rtx_code code = GET_CODE (insn);
999203955Srdivacky
1000203955Srdivacky	  /* If this insn is from the target of a branch, it isn't going to
1001203955Srdivacky	     be used in the sequel.  If it is used in both cases, this
1002203955Srdivacky	     test will not be true.  */
1003203955Srdivacky	  if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1004198092Srdivacky	      && INSN_FROM_TARGET_P (insn))
1005193326Sed	    continue;
1006193326Sed
1007193326Sed	  /* If this insn is a USE made by update_block, we care about the
1008198092Srdivacky	     underlying insn.  */
1009193326Sed	  if (code == INSN && GET_CODE (PATTERN (insn)) == USE
1010193326Sed	      && INSN_P (XEXP (PATTERN (insn), 0)))
1011193326Sed	      real_insn = XEXP (PATTERN (insn), 0);
1012193326Sed
1013193326Sed	  if (GET_CODE (real_insn) == CALL_INSN)
1014221345Sdim	    {
1015221345Sdim	      /* CALL clobbers all call-used regs that aren't fixed except
1016221345Sdim		 sp, ap, and fp.  Do this before setting the result of the
1017221345Sdim		 call live.  */
1018221345Sdim	      AND_COMPL_HARD_REG_SET (current_live_regs,
1019207619Srdivacky				      regs_invalidated_by_call);
1020207619Srdivacky
1021207619Srdivacky	      /* A CALL_INSN sets any global register live, since it may
1022207619Srdivacky		 have been modified by the call.  */
1023207619Srdivacky	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1024234353Sdim		if (global_regs[i])
1025234353Sdim		  SET_HARD_REG_BIT (current_live_regs, i);
1026234353Sdim	    }
1027234353Sdim
1028212904Sdim	  /* Mark anything killed in an insn to be deadened at the next
1029226633Sdim	     label.  Ignore USE insns; the only REG_DEAD notes will be for
1030226633Sdim	     parameters.  But they might be early.  A CALL_INSN will usually
1031226633Sdim	     clobber registers used for parameters.  It isn't worth bothering
1032212904Sdim	     with the unlikely case when it won't.  */
1033212904Sdim	  if ((GET_CODE (real_insn) == INSN
1034193326Sed	       && GET_CODE (PATTERN (real_insn)) != USE
1035193326Sed	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1036193326Sed	      || GET_CODE (real_insn) == JUMP_INSN
1037193326Sed	      || GET_CODE (real_insn) == CALL_INSN)
1038234353Sdim	    {
1039234353Sdim	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1040234353Sdim		if (REG_NOTE_KIND (link) == REG_DEAD
1041234353Sdim		    && GET_CODE (XEXP (link, 0)) == REG
1042234353Sdim		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1043234353Sdim		  {
1044234353Sdim		    unsigned int first_regno = REGNO (XEXP (link, 0));
1045234353Sdim		    unsigned int last_regno
1046234353Sdim		      = (first_regno
1047234353Sdim			 + HARD_REGNO_NREGS (first_regno,
1048234353Sdim					     GET_MODE (XEXP (link, 0))));
1049234353Sdim
1050234353Sdim		    for (i = first_regno; i < last_regno; i++)
1051234353Sdim		      SET_HARD_REG_BIT (pending_dead_regs, i);
1052234353Sdim		  }
1053234353Sdim
1054234353Sdim	      note_stores (PATTERN (real_insn), update_live_status, NULL);
1055234353Sdim
1056234353Sdim	      /* If any registers were unused after this insn, kill them.
1057234353Sdim		 These notes will always be accurate.  */
1058234353Sdim	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1059234353Sdim		if (REG_NOTE_KIND (link) == REG_UNUSED
1060234353Sdim		    && GET_CODE (XEXP (link, 0)) == REG
1061234353Sdim		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1062234353Sdim		  {
1063234353Sdim		    unsigned int first_regno = REGNO (XEXP (link, 0));
1064234353Sdim		    unsigned int last_regno
1065234353Sdim		      = (first_regno
1066234353Sdim			 + HARD_REGNO_NREGS (first_regno,
1067234353Sdim					     GET_MODE (XEXP (link, 0))));
1068234353Sdim
1069234353Sdim		    for (i = first_regno; i < last_regno; i++)
1070234353Sdim		      CLEAR_HARD_REG_BIT (current_live_regs, i);
1071234353Sdim		  }
1072234353Sdim	    }
1073234353Sdim
1074234353Sdim	  else if (GET_CODE (real_insn) == CODE_LABEL)
1075234353Sdim	    {
1076234353Sdim	      /* A label clobbers the pending dead registers since neither
1077234353Sdim		 reload nor jump will propagate a value across a label.  */
1078234353Sdim	      AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1079234353Sdim	      CLEAR_HARD_REG_SET (pending_dead_regs);
1080234353Sdim	    }
1081234353Sdim
1082234353Sdim	  /* The beginning of the epilogue corresponds to the end of the
1083234353Sdim	     RTL chain when there are no epilogue insns.  Certain resources
1084234353Sdim	     are implicitly required at that point.  */
1085234353Sdim	  else if (GET_CODE (real_insn) == NOTE
1086234353Sdim		   && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1087234353Sdim	    IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1088234353Sdim	}
1089234982Sdim
1090234353Sdim      COPY_HARD_REG_SET (res->regs, current_live_regs);
1091234353Sdim      if (tinfo != NULL)
1092234353Sdim	{
1093234353Sdim	  tinfo->block = b;
1094234353Sdim	  tinfo->bb_tick = bb_ticks[b];
1095234353Sdim	}
1096234353Sdim    }
1097234353Sdim  else
1098234353Sdim    /* We didn't find the start of a basic block.  Assume everything
1099234353Sdim       in use.  This should happen only extremely rarely.  */
1100234353Sdim    SET_HARD_REG_SET (res->regs);
1101234353Sdim
1102234353Sdim  CLEAR_RESOURCE (&set);
1103234353Sdim  CLEAR_RESOURCE (&needed);
1104234353Sdim
1105234353Sdim  jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1106234353Sdim					  set, needed);
1107234982Sdim
1108234353Sdim  /* If we hit an unconditional branch, we have another way of finding out
1109234353Sdim     what is live: we can see what is live at the branch target and include
1110234353Sdim     anything used but not set before the branch.  We add the live
1111234353Sdim     resources found using the test below to those found until now.  */
1112234353Sdim
1113234353Sdim  if (jump_insn)
1114234353Sdim    {
1115234353Sdim      struct resources new_resources;
1116234353Sdim      rtx stop_insn = next_active_insn (jump_insn);
1117234353Sdim
1118234353Sdim      mark_target_live_regs (insns, next_active_insn (jump_target),
1119234353Sdim			     &new_resources);
1120234353Sdim      CLEAR_RESOURCE (&set);
1121234353Sdim      CLEAR_RESOURCE (&needed);
1122234353Sdim
1123234353Sdim      /* Include JUMP_INSN in the needed registers.  */
1124234353Sdim      for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1125234353Sdim	{
1126234353Sdim	  mark_referenced_resources (insn, &needed, 1);
1127234353Sdim
1128234353Sdim	  COPY_HARD_REG_SET (scratch, needed.regs);
1129234353Sdim	  AND_COMPL_HARD_REG_SET (scratch, set.regs);
1130234353Sdim	  IOR_HARD_REG_SET (new_resources.regs, scratch);
1131234353Sdim
1132234353Sdim	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1133234353Sdim	}
1134234353Sdim
1135234353Sdim      IOR_HARD_REG_SET (res->regs, new_resources.regs);
1136234353Sdim    }
1137234353Sdim
1138234353Sdim  if (tinfo != NULL)
1139234353Sdim    {
1140234353Sdim      COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1141234353Sdim    }
1142234353Sdim}
1143234353Sdim
1144234353Sdim/* Initialize the resources required by mark_target_live_regs ().
1145234353Sdim   This should be invoked before the first call to mark_target_live_regs.  */
1146234353Sdim
1147234353Sdimvoid
1148234353Sdiminit_resource_info (rtx epilogue_insn)
1149234353Sdim{
1150234353Sdim  int i;
1151234353Sdim
1152234353Sdim  /* Indicate what resources are required to be valid at the end of the current
1153234353Sdim     function.  The condition code never is and memory always is.  If the
1154234353Sdim     frame pointer is needed, it is and so is the stack pointer unless
1155234353Sdim     EXIT_IGNORE_STACK is nonzero.  If the frame pointer is not needed, the
1156234353Sdim     stack pointer is.  Registers used to return the function value are
1157234353Sdim     needed.  Registers holding global variables are needed.  */
1158234353Sdim
1159234353Sdim  end_of_function_needs.cc = 0;
1160234353Sdim  end_of_function_needs.memory = 1;
1161234353Sdim  end_of_function_needs.unch_memory = 0;
1162234353Sdim  CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1163234353Sdim
1164234353Sdim  if (frame_pointer_needed)
1165234353Sdim    {
1166234353Sdim      SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1167234353Sdim#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1168234353Sdim      SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1169234353Sdim#endif
1170234353Sdim      if (! EXIT_IGNORE_STACK
1171234353Sdim	  || current_function_sp_is_unchanging)
1172234353Sdim	SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1173234353Sdim    }
1174234353Sdim  else
1175234353Sdim    SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1176234353Sdim
1177234353Sdim  if (current_function_return_rtx != 0)
1178234353Sdim    mark_referenced_resources (current_function_return_rtx,
1179234353Sdim			       &end_of_function_needs, 1);
1180234353Sdim
1181234353Sdim  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1182234353Sdim    if (global_regs[i]
1183234353Sdim#ifdef EPILOGUE_USES
1184234353Sdim	|| EPILOGUE_USES (i)
1185234353Sdim#endif
1186234353Sdim	)
1187234353Sdim      SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1188234353Sdim
1189234353Sdim  /* The registers required to be live at the end of the function are
1190234353Sdim     represented in the flow information as being dead just prior to
1191234353Sdim     reaching the end of the function.  For example, the return of a value
1192234353Sdim     might be represented by a USE of the return register immediately
1193234353Sdim     followed by an unconditional jump to the return label where the
1194234353Sdim     return label is the end of the RTL chain.  The end of the RTL chain
1195234353Sdim     is then taken to mean that the return register is live.
1196234353Sdim
1197234353Sdim     This sequence is no longer maintained when epilogue instructions are
1198234353Sdim     added to the RTL chain.  To reconstruct the original meaning, the
1199234353Sdim     start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1200234353Sdim     point where these registers become live (start_of_epilogue_needs).
1201234353Sdim     If epilogue instructions are present, the registers set by those
1202234353Sdim     instructions won't have been processed by flow.  Thus, those
1203234353Sdim     registers are additionally required at the end of the RTL chain
1204234353Sdim     (end_of_function_needs).  */
1205234353Sdim
1206234353Sdim  start_of_epilogue_needs = end_of_function_needs;
1207234353Sdim
1208234353Sdim  while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1209234353Sdim    mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1210234353Sdim			MARK_SRC_DEST_CALL);
1211234353Sdim
1212234353Sdim  /* Allocate and initialize the tables used by mark_target_live_regs.  */
1213234353Sdim  target_hash_table = xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
1214234353Sdim  bb_ticks = xcalloc (last_basic_block, sizeof (int));
1215234353Sdim}
1216234353Sdim
1217234353Sdim/* Free up the resources allocated to mark_target_live_regs ().  This
1218234353Sdim   should be invoked after the last call to mark_target_live_regs ().  */
1219234353Sdim
1220234353Sdimvoid
1221234353Sdimfree_resource_info (void)
1222234353Sdim{
1223234353Sdim  if (target_hash_table != NULL)
1224234353Sdim    {
1225234353Sdim      int i;
1226234353Sdim
1227234353Sdim      for (i = 0; i < TARGET_HASH_PRIME; ++i)
1228234353Sdim	{
1229234353Sdim	  struct target_info *ti = target_hash_table[i];
1230234353Sdim
1231234353Sdim	  while (ti)
1232234353Sdim	    {
1233234353Sdim	      struct target_info *next = ti->next;
1234234353Sdim	      free (ti);
1235234353Sdim	      ti = next;
1236234353Sdim	    }
1237234353Sdim	}
1238234353Sdim
1239234353Sdim      free (target_hash_table);
1240234353Sdim      target_hash_table = NULL;
1241234353Sdim    }
1242234353Sdim
1243234353Sdim  if (bb_ticks != NULL)
1244234353Sdim    {
1245234353Sdim      free (bb_ticks);
1246234353Sdim      bb_ticks = NULL;
1247234353Sdim    }
1248234353Sdim}
1249234353Sdim
1250234353Sdim/* Clear any hashed information that we have stored for INSN.  */
1251234353Sdim
1252234353Sdimvoid
1253234353Sdimclear_hashed_info_for_insn (rtx insn)
1254234353Sdim{
1255234353Sdim  struct target_info *tinfo;
1256234353Sdim
1257234353Sdim  if (target_hash_table != NULL)
1258234353Sdim    {
1259234353Sdim      for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1260234353Sdim	   tinfo; tinfo = tinfo->next)
1261234353Sdim	if (tinfo->uid == INSN_UID (insn))
1262234353Sdim	  break;
1263234353Sdim
1264234353Sdim      if (tinfo)
1265234353Sdim	tinfo->block = -1;
1266234353Sdim    }
1267234353Sdim}
1268234353Sdim
1269234353Sdim/* Increment the tick count for the basic block that contains INSN.  */
1270234353Sdim
1271234353Sdimvoid
1272234353Sdimincr_ticks_for_insn (rtx insn)
1273234353Sdim{
1274234353Sdim  int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1275234353Sdim
1276234353Sdim  if (b != -1)
1277234353Sdim    bb_ticks[b]++;
1278234353Sdim}
1279234353Sdim
1280234353Sdim/* Add TRIAL to the set of resources used at the end of the current
1281234353Sdim   function.  */
1282234353Sdimvoid
1283234353Sdimmark_end_of_function_resources (rtx trial, int include_delayed_effects)
1284234353Sdim{
1285234353Sdim  mark_referenced_resources (trial, &end_of_function_needs,
1286234353Sdim			     include_delayed_effects);
1287234353Sdim}
1288234353Sdim