flow.c revision 70635
1/* Data flow analysis for GNU compiler.
2   Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22/* This file contains the data flow analysis pass of the compiler.  It
23   computes data flow information which tells combine_instructions
24   which insns to consider combining and controls register allocation.
25
26   Additional data flow information that is too bulky to record is
27   generated during the analysis, and is used at that time to create
28   autoincrement and autodecrement addressing.
29
30   The first step is dividing the function into basic blocks.
31   find_basic_blocks does this.  Then life_analysis determines
32   where each register is live and where it is dead.
33
34   ** find_basic_blocks **
35
36   find_basic_blocks divides the current function's rtl into basic
37   blocks and constructs the CFG.  The blocks are recorded in the
38   basic_block_info array; the CFG exists in the edge structures
39   referenced by the blocks.
40
41   find_basic_blocks also finds any unreachable loops and deletes them.
42
43   ** life_analysis **
44
45   life_analysis is called immediately after find_basic_blocks.
46   It uses the basic block information to determine where each
47   hard or pseudo register is live.
48
49   ** live-register info **
50
51   The information about where each register is live is in two parts:
52   the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54   basic_block->global_live_at_start has an element for each basic
55   block, and the element is a bit-vector with a bit for each hard or
56   pseudo register.  The bit is 1 if the register is live at the
57   beginning of the basic block.
58
59   Two types of elements can be added to an insn's REG_NOTES.
60   A REG_DEAD note is added to an insn's REG_NOTES for any register
61   that meets both of two conditions:  The value in the register is not
62   needed in subsequent insns and the insn does not replace the value in
63   the register (in the case of multi-word hard registers, the value in
64   each register must be replaced by the insn to avoid a REG_DEAD note).
65
66   In the vast majority of cases, an object in a REG_DEAD note will be
67   used somewhere in the insn.  The (rare) exception to this is if an
68   insn uses a multi-word hard register and only some of the registers are
69   needed in subsequent insns.  In that case, REG_DEAD notes will be
70   provided for those hard registers that are not subsequently needed.
71   Partial REG_DEAD notes of this type do not occur when an insn sets
72   only some of the hard registers used in such a multi-word operand;
73   omitting REG_DEAD notes for objects stored in an insn is optional and
74   the desire to do so does not justify the complexity of the partial
75   REG_DEAD notes.
76
77   REG_UNUSED notes are added for each register that is set by the insn
78   but is unused subsequently (if every register set by the insn is unused
79   and the insn does not reference memory or have some other side-effect,
80   the insn is deleted instead).  If only part of a multi-word hard
81   register is used in a subsequent insn, REG_UNUSED notes are made for
82   the parts that will not be used.
83
84   To determine which registers are live after any insn, one can
85   start from the beginning of the basic block and scan insns, noting
86   which registers are set by each insn and which die there.
87
88   ** Other actions of life_analysis **
89
90   life_analysis sets up the LOG_LINKS fields of insns because the
91   information needed to do so is readily available.
92
93   life_analysis deletes insns whose only effect is to store a value
94   that is never used.
95
96   life_analysis notices cases where a reference to a register as
97   a memory address can be combined with a preceding or following
98   incrementation or decrementation of the register.  The separate
99   instruction to increment or decrement is deleted and the address
100   is changed to a POST_INC or similar rtx.
101
102   Each time an incrementing or decrementing address is created,
103   a REG_INC element is added to the insn's REG_NOTES list.
104
105   life_analysis fills in certain vectors containing information about
106   register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107   reg_n_calls_crosses and reg_basic_block.
108
109   life_analysis sets current_function_sp_is_unchanging if the function
110   doesn't modify the stack pointer.  */
111
112/* TODO:
113
114   Split out from life_analysis:
115	- local property discovery (bb->local_live, bb->local_set)
116	- global property computation
117	- log links creation
118	- pre/post modify transformation
119*/
120
121#include "config.h"
122#include "system.h"
123#include "rtl.h"
124#include "basic-block.h"
125#include "insn-config.h"
126#include "regs.h"
127#include "hard-reg-set.h"
128#include "flags.h"
129#include "output.h"
130#include "except.h"
131#include "toplev.h"
132#include "recog.h"
133#include "insn-flags.h"
134
135#include "obstack.h"
136#define obstack_chunk_alloc xmalloc
137#define obstack_chunk_free free
138
139
140/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
141   the stack pointer does not matter.  The value is tested only in
142   functions that have frame pointers.
143   No definition is equivalent to always zero.  */
144#ifndef EXIT_IGNORE_STACK
145#define EXIT_IGNORE_STACK 0
146#endif
147
148
149/* The contents of the current function definition are allocated
150   in this obstack, and all are freed at the end of the function.
151   For top-level functions, this is temporary_obstack.
152   Separate obstacks are made for nested functions.  */
153
154extern struct obstack *function_obstack;
155
156/* List of labels that must never be deleted.  */
157extern rtx forced_labels;
158
159/* Number of basic blocks in the current function.  */
160
161int n_basic_blocks;
162
163/* The basic block array.  */
164
165varray_type basic_block_info;
166
167/* The special entry and exit blocks.  */
168
169struct basic_block_def entry_exit_blocks[2] =
170{
171  {
172    NULL,			/* head */
173    NULL,			/* end */
174    NULL,			/* pred */
175    NULL,			/* succ */
176    NULL,			/* local_set */
177    NULL,			/* global_live_at_start */
178    NULL,			/* global_live_at_end */
179    NULL,			/* aux */
180    ENTRY_BLOCK,		/* index */
181    0				/* loop_depth */
182  },
183  {
184    NULL,			/* head */
185    NULL,			/* end */
186    NULL,			/* pred */
187    NULL,			/* succ */
188    NULL,			/* local_set */
189    NULL,			/* global_live_at_start */
190    NULL,			/* global_live_at_end */
191    NULL,			/* aux */
192    EXIT_BLOCK,			/* index */
193    0				/* loop_depth */
194  }
195};
196
197/* Nonzero if the second flow pass has completed.  */
198int flow2_completed;
199
200/* Maximum register number used in this function, plus one.  */
201
202int max_regno;
203
204/* Indexed by n, giving various register information */
205
206varray_type reg_n_info;
207
208/* Size of the reg_n_info table.  */
209
210unsigned int reg_n_max;
211
212/* Element N is the next insn that uses (hard or pseudo) register number N
213   within the current basic block; or zero, if there is no such insn.
214   This is valid only during the final backward scan in propagate_block.  */
215
216static rtx *reg_next_use;
217
218/* Size of a regset for the current function,
219   in (1) bytes and (2) elements.  */
220
221int regset_bytes;
222int regset_size;
223
224/* Regset of regs live when calls to `setjmp'-like functions happen.  */
225/* ??? Does this exist only for the setjmp-clobbered warning message?  */
226
227regset regs_live_at_setjmp;
228
229/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
230   that have to go in the same hard reg.
231   The first two regs in the list are a pair, and the next two
232   are another pair, etc.  */
233rtx regs_may_share;
234
235/* Depth within loops of basic block being scanned for lifetime analysis,
236   plus one.  This is the weight attached to references to registers.  */
237
238static int loop_depth;
239
240/* During propagate_block, this is non-zero if the value of CC0 is live.  */
241
242static int cc0_live;
243
244/* During propagate_block, this contains a list of all the MEMs we are
245   tracking for dead store elimination.
246
247   ?!? Note we leak memory by not free-ing items on this list.  We need to
248   write some generic routines to operate on memory lists since cse, gcse,
249   loop, sched, flow and possibly other passes all need to do basically the
250   same operations on these lists.  */
251
252static rtx mem_set_list;
253
254/* Set of registers that may be eliminable.  These are handled specially
255   in updating regs_ever_live.  */
256
257static HARD_REG_SET elim_reg_set;
258
259/* The basic block structure for every insn, indexed by uid.  */
260
261varray_type basic_block_for_insn;
262
263/* The labels mentioned in non-jump rtl.  Valid during find_basic_blocks.  */
264/* ??? Should probably be using LABEL_NUSES instead.  It would take a
265   bit of surgery to be able to use or co-opt the routines in jump.  */
266
267static rtx label_value_list;
268
269/* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile.  */
270
271#define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
272#define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
273static bitmap uid_volatile;
274
275/* Forward declarations */
276static int count_basic_blocks		PROTO((rtx));
277static rtx find_basic_blocks_1		PROTO((rtx, rtx*));
278static void create_basic_block		PROTO((int, rtx, rtx, rtx));
279static void compute_bb_for_insn		PROTO((varray_type, int));
280static void clear_edges			PROTO((void));
281static void make_edges			PROTO((rtx, rtx*));
282static void make_edge			PROTO((basic_block, basic_block, int));
283static void make_label_edge		PROTO((basic_block, rtx, int));
284static void mark_critical_edges		PROTO((void));
285
286static void commit_one_edge_insertion	PROTO((edge));
287
288static void delete_unreachable_blocks	PROTO((void));
289static void delete_eh_regions		PROTO((void));
290static int can_delete_note_p		PROTO((rtx));
291static void delete_insn_chain		PROTO((rtx, rtx));
292static int delete_block			PROTO((basic_block));
293static void expunge_block		PROTO((basic_block));
294static rtx flow_delete_insn		PROTO((rtx));
295static int can_delete_label_p		PROTO((rtx));
296static void merge_blocks_nomove		PROTO((basic_block, basic_block));
297static int merge_blocks			PROTO((edge,basic_block,basic_block));
298static void tidy_fallthru_edge		PROTO((edge,basic_block,basic_block));
299static void calculate_loop_depth	PROTO((rtx));
300
301static int set_noop_p			PROTO((rtx));
302static int noop_move_p			PROTO((rtx));
303static void notice_stack_pointer_modification PROTO ((rtx, rtx));
304static void record_volatile_insns	PROTO((rtx));
305static void mark_regs_live_at_end	PROTO((regset));
306static void life_analysis_1		PROTO((rtx, int, int));
307static void init_regset_vector		PROTO ((regset *, int,
308						struct obstack *));
309static void propagate_block		PROTO((regset, rtx, rtx, int,
310					       regset, int, int));
311static int insn_dead_p			PROTO((rtx, regset, int, rtx));
312static int libcall_dead_p		PROTO((rtx, regset, rtx, rtx));
313static void mark_set_regs		PROTO((regset, regset, rtx,
314					       rtx, regset));
315static void mark_set_1			PROTO((regset, regset, rtx,
316					       rtx, regset));
317#ifdef AUTO_INC_DEC
318static void find_auto_inc		PROTO((regset, rtx, rtx));
319static int try_pre_increment_1		PROTO((rtx));
320static int try_pre_increment		PROTO((rtx, rtx, HOST_WIDE_INT));
321#endif
322static void mark_used_regs		PROTO((regset, regset, rtx, int, rtx));
323void dump_flow_info			PROTO((FILE *));
324static void dump_edge_info		PROTO((FILE *, edge, int));
325
326static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
327static int_list_ptr add_int_list_node   PROTO ((int_list_block **,
328						int_list **, int));
329
330static void add_pred_succ		PROTO ((int, int, int_list_ptr *,
331						int_list_ptr *, int *, int *));
332
333static void count_reg_sets_1		PROTO ((rtx));
334static void count_reg_sets		PROTO ((rtx));
335static void count_reg_references	PROTO ((rtx));
336static void notice_stack_pointer_modification PROTO ((rtx, rtx));
337static void invalidate_mems_from_autoinc	PROTO ((rtx));
338void verify_flow_info			PROTO ((void));
339
340/* Find basic blocks of the current function.
341   F is the first insn of the function and NREGS the number of register
342   numbers in use.  */
343
344void
345find_basic_blocks (f, nregs, file, do_cleanup)
346     rtx f;
347     int nregs ATTRIBUTE_UNUSED;
348     FILE *file ATTRIBUTE_UNUSED;
349     int do_cleanup;
350{
351  rtx *bb_eh_end;
352  int max_uid;
353
354  /* Flush out existing data.  */
355  if (basic_block_info != NULL)
356    {
357      int i;
358
359      clear_edges ();
360
361      /* Clear bb->aux on all extant basic blocks.  We'll use this as a
362	 tag for reuse during create_basic_block, just in case some pass
363	 copies around basic block notes improperly.  */
364      for (i = 0; i < n_basic_blocks; ++i)
365	BASIC_BLOCK (i)->aux = NULL;
366
367      VARRAY_FREE (basic_block_info);
368    }
369
370  n_basic_blocks = count_basic_blocks (f);
371
372  /* Size the basic block table.  The actual structures will be allocated
373     by find_basic_blocks_1, since we want to keep the structure pointers
374     stable across calls to find_basic_blocks.  */
375  /* ??? This whole issue would be much simpler if we called find_basic_blocks
376     exactly once, and thereafter we don't have a single long chain of
377     instructions at all until close to the end of compilation when we
378     actually lay them out.  */
379
380  VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
381
382  /* An array to record the active exception region at the end of each
383     basic block.  It is filled in by find_basic_blocks_1 for make_edges.  */
384  bb_eh_end = (rtx *) alloca (n_basic_blocks * sizeof (rtx));
385
386  label_value_list = find_basic_blocks_1 (f, bb_eh_end);
387
388  /* Record the block to which an insn belongs.  */
389  /* ??? This should be done another way, by which (perhaps) a label is
390     tagged directly with the basic block that it starts.  It is used for
391     more than that currently, but IMO that is the only valid use.  */
392
393  max_uid = get_max_uid ();
394#ifdef AUTO_INC_DEC
395  /* Leave space for insns life_analysis makes in some cases for auto-inc.
396     These cases are rare, so we don't need too much space.  */
397  max_uid += max_uid / 10;
398#endif
399
400  VARRAY_BB_INIT (basic_block_for_insn, max_uid, "basic_block_for_insn");
401  compute_bb_for_insn (basic_block_for_insn, max_uid);
402
403  /* Discover the edges of our cfg.  */
404
405  make_edges (label_value_list, bb_eh_end);
406
407  /* Delete unreachable blocks.  */
408
409  if (do_cleanup)
410    delete_unreachable_blocks ();
411
412  /* Mark critical edges.  */
413
414  mark_critical_edges ();
415
416  /* Discover the loop depth at the start of each basic block to aid
417     register allocation.  */
418  calculate_loop_depth (f);
419
420  /* Kill the data we won't maintain.  */
421  label_value_list = 0;
422
423#ifdef ENABLE_CHECKING
424  verify_flow_info ();
425#endif
426}
427
428/* Count the basic blocks of the function.  */
429
430static int
431count_basic_blocks (f)
432     rtx f;
433{
434  register rtx insn;
435  register RTX_CODE prev_code;
436  register int count = 0;
437  int eh_region = 0;
438  int call_had_abnormal_edge = 0;
439  rtx prev_call = NULL_RTX;
440
441  prev_code = JUMP_INSN;
442  for (insn = f; insn; insn = NEXT_INSN (insn))
443    {
444      register RTX_CODE code = GET_CODE (insn);
445
446      if (code == CODE_LABEL
447	  || (GET_RTX_CLASS (code) == 'i'
448	      && (prev_code == JUMP_INSN
449		  || prev_code == BARRIER
450		  || (prev_code == CALL_INSN && call_had_abnormal_edge))))
451	{
452	  count++;
453
454	  /* If the previous insn was a call that did not create an
455	     abnormal edge, we want to add a nop so that the CALL_INSN
456	     itself is not at basic_block_end.  This allows us to
457	     easily distinguish between normal calls and those which
458	     create abnormal edges in the flow graph.  */
459
460	  if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
461	    {
462	      rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
463	      emit_insn_after (nop, prev_call);
464	    }
465	}
466
467      /* Record whether this call created an edge.  */
468      if (code == CALL_INSN)
469	{
470	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
471	  int region = (note ? XINT (XEXP (note, 0), 0) : 1);
472	  prev_call = insn;
473	  call_had_abnormal_edge = 0;
474
475	  /* If there is a specified EH region, we have an edge.  */
476	  if (eh_region && region > 0)
477	    call_had_abnormal_edge = 1;
478	  else
479	    {
480	      /* If there is a nonlocal goto label and the specified
481		 region number isn't -1, we have an edge. (0 means
482		 no throw, but might have a nonlocal goto).  */
483	      if (nonlocal_goto_handler_labels && region >= 0)
484		call_had_abnormal_edge = 1;
485	    }
486	}
487      else if (code != NOTE)
488	prev_call = NULL_RTX;
489
490      if (code != NOTE)
491	prev_code = code;
492      else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
493	++eh_region;
494      else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
495	--eh_region;
496
497    }
498
499  /* The rest of the compiler works a bit smoother when we don't have to
500     check for the edge case of do-nothing functions with no basic blocks.  */
501  if (count == 0)
502    {
503      emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
504      count = 1;
505    }
506
507  return count;
508}
509
510/* Find all basic blocks of the function whose first insn is F.
511   Store the correct data in the tables that describe the basic blocks,
512   set up the chains of references for each CODE_LABEL, and
513   delete any entire basic blocks that cannot be reached.
514
515   NONLOCAL_LABEL_LIST is a list of non-local labels in the function.  Blocks
516   that are otherwise unreachable may be reachable with a non-local goto.
517
518   BB_EH_END is an array in which we record the list of exception regions
519   active at the end of every basic block.  */
520
521static rtx
522find_basic_blocks_1 (f, bb_eh_end)
523     rtx f;
524     rtx *bb_eh_end;
525{
526  register rtx insn, next;
527  int call_has_abnormal_edge = 0;
528  int i = 0;
529  rtx bb_note = NULL_RTX;
530  rtx eh_list = NULL_RTX;
531  rtx label_value_list = NULL_RTX;
532  rtx head = NULL_RTX;
533  rtx end = NULL_RTX;
534
535  /* We process the instructions in a slightly different way than we did
536     previously.  This is so that we see a NOTE_BASIC_BLOCK after we have
537     closed out the previous block, so that it gets attached at the proper
538     place.  Since this form should be equivalent to the previous,
539     find_basic_blocks_0 continues to use the old form as a check.  */
540
541  for (insn = f; insn; insn = next)
542    {
543      enum rtx_code code = GET_CODE (insn);
544
545      next = NEXT_INSN (insn);
546
547      if (code == CALL_INSN)
548	{
549	  /* Record whether this call created an edge.  */
550	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
551	  int region = (note ? XINT (XEXP (note, 0), 0) : 1);
552	  call_has_abnormal_edge = 0;
553
554	  /* If there is an EH region, we have an edge.  */
555	  if (eh_list && region > 0)
556	    call_has_abnormal_edge = 1;
557	  else
558	    {
559	      /* If there is a nonlocal goto label and the specified
560		 region number isn't -1, we have an edge. (0 means
561		 no throw, but might have a nonlocal goto).  */
562	      if (nonlocal_goto_handler_labels && region >= 0)
563		call_has_abnormal_edge = 1;
564	    }
565	}
566
567      switch (code)
568	{
569	case NOTE:
570	  {
571	    int kind = NOTE_LINE_NUMBER (insn);
572
573	    /* Keep a LIFO list of the currently active exception notes.  */
574	    if (kind == NOTE_INSN_EH_REGION_BEG)
575	      eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
576	    else if (kind == NOTE_INSN_EH_REGION_END)
577	      eh_list = XEXP (eh_list, 1);
578
579	    /* Look for basic block notes with which to keep the
580	       basic_block_info pointers stable.  Unthread the note now;
581	       we'll put it back at the right place in create_basic_block.
582	       Or not at all if we've already found a note in this block.  */
583	    else if (kind == NOTE_INSN_BASIC_BLOCK)
584	      {
585		if (bb_note == NULL_RTX)
586		  bb_note = insn;
587		next = flow_delete_insn (insn);
588	      }
589
590	    break;
591	  }
592
593	case CODE_LABEL:
594	  /* A basic block starts at a label.  If we've closed one off due
595	     to a barrier or some such, no need to do it again.  */
596	  if (head != NULL_RTX)
597	    {
598	      /* While we now have edge lists with which other portions of
599		 the compiler might determine a call ending a basic block
600		 does not imply an abnormal edge, it will be a bit before
601		 everything can be updated.  So continue to emit a noop at
602		 the end of such a block.  */
603	      if (GET_CODE (end) == CALL_INSN)
604		{
605		  rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
606		  end = emit_insn_after (nop, end);
607		}
608
609	      bb_eh_end[i] = eh_list;
610	      create_basic_block (i++, head, end, bb_note);
611	      bb_note = NULL_RTX;
612	    }
613	  head = end = insn;
614	  break;
615
616	case JUMP_INSN:
617	  /* A basic block ends at a jump.  */
618	  if (head == NULL_RTX)
619	    head = insn;
620	  else
621	    {
622	      /* ??? Make a special check for table jumps.  The way this
623		 happens is truely and amazingly gross.  We are about to
624		 create a basic block that contains just a code label and
625		 an addr*vec jump insn.  Worse, an addr_diff_vec creates
626		 its own natural loop.
627
628		 Prevent this bit of brain damage, pasting things together
629		 correctly in make_edges.
630
631		 The correct solution involves emitting the table directly
632		 on the tablejump instruction as a note, or JUMP_LABEL.  */
633
634	      if (GET_CODE (PATTERN (insn)) == ADDR_VEC
635		  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
636		{
637		  head = end = NULL;
638		  n_basic_blocks--;
639		  break;
640		}
641	    }
642	  end = insn;
643	  goto new_bb_inclusive;
644
645	case BARRIER:
646	  /* A basic block ends at a barrier.  It may be that an unconditional
647	     jump already closed the basic block -- no need to do it again.  */
648	  if (head == NULL_RTX)
649	    break;
650
651	  /* While we now have edge lists with which other portions of the
652	     compiler might determine a call ending a basic block does not
653	     imply an abnormal edge, it will be a bit before everything can
654	     be updated.  So continue to emit a noop at the end of such a
655	     block.  */
656	  if (GET_CODE (end) == CALL_INSN)
657	    {
658	      rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
659	      end = emit_insn_after (nop, end);
660	    }
661	  goto new_bb_exclusive;
662
663	case CALL_INSN:
664	  /* A basic block ends at a call that can either throw or
665	     do a non-local goto.  */
666	  if (call_has_abnormal_edge)
667	    {
668	    new_bb_inclusive:
669	      if (head == NULL_RTX)
670		head = insn;
671	      end = insn;
672
673	    new_bb_exclusive:
674	      bb_eh_end[i] = eh_list;
675	      create_basic_block (i++, head, end, bb_note);
676	      head = end = NULL_RTX;
677	      bb_note = NULL_RTX;
678	      break;
679	    }
680	  /* FALLTHRU */
681
682	default:
683	  if (GET_RTX_CLASS (code) == 'i')
684	    {
685	      if (head == NULL_RTX)
686		head = insn;
687	      end = insn;
688	    }
689	  break;
690	}
691
692      if (GET_RTX_CLASS (code) == 'i')
693	{
694	  rtx note;
695
696	  /* Make a list of all labels referred to other than by jumps
697	     (which just don't have the REG_LABEL notes).
698
699	     Make a special exception for labels followed by an ADDR*VEC,
700	     as this would be a part of the tablejump setup code.
701
702	     Make a special exception for the eh_return_stub_label, which
703	     we know isn't part of any otherwise visible control flow.  */
704
705	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
706	    if (REG_NOTE_KIND (note) == REG_LABEL)
707	      {
708	        rtx lab = XEXP (note, 0), next;
709
710		if (lab == eh_return_stub_label)
711		  ;
712		else if ((next = next_nonnote_insn (lab)) != NULL
713			 && GET_CODE (next) == JUMP_INSN
714			 && (GET_CODE (PATTERN (next)) == ADDR_VEC
715			     || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
716		  ;
717		else
718		  label_value_list
719		    = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
720				         label_value_list);
721	      }
722	}
723    }
724
725  if (head != NULL_RTX)
726    {
727      bb_eh_end[i] = eh_list;
728      create_basic_block (i++, head, end, bb_note);
729    }
730
731  if (i != n_basic_blocks)
732    abort ();
733
734  return label_value_list;
735}
736
737/* Create a new basic block consisting of the instructions between
738   HEAD and END inclusive.  Reuses the note and basic block struct
739   in BB_NOTE, if any.  */
740
741static void
742create_basic_block (index, head, end, bb_note)
743     int index;
744     rtx head, end, bb_note;
745{
746  basic_block bb;
747
748  if (bb_note
749      && ! RTX_INTEGRATED_P (bb_note)
750      && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
751      && bb->aux == NULL)
752    {
753      /* If we found an existing note, thread it back onto the chain.  */
754
755      if (GET_CODE (head) == CODE_LABEL)
756	add_insn_after (bb_note, head);
757      else
758	{
759	  add_insn_before (bb_note, head);
760	  head = bb_note;
761	}
762    }
763  else
764    {
765      /* Otherwise we must create a note and a basic block structure.
766	 Since we allow basic block structs in rtl, give the struct
767	 the same lifetime by allocating it off the function obstack
768	 rather than using malloc.  */
769
770      bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
771      memset (bb, 0, sizeof (*bb));
772
773      if (GET_CODE (head) == CODE_LABEL)
774	bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
775      else
776	{
777	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
778	  head = bb_note;
779	}
780      NOTE_BASIC_BLOCK (bb_note) = bb;
781    }
782
783  /* Always include the bb note in the block.  */
784  if (NEXT_INSN (end) == bb_note)
785    end = bb_note;
786
787  bb->head = head;
788  bb->end = end;
789  bb->index = index;
790  BASIC_BLOCK (index) = bb;
791
792  /* Tag the block so that we know it has been used when considering
793     other basic block notes.  */
794  bb->aux = bb;
795}
796
797/* Records the basic block struct in BB_FOR_INSN, for every instruction
798   indexed by INSN_UID.  MAX is the size of the array.  */
799
800static void
801compute_bb_for_insn (bb_for_insn, max)
802     varray_type bb_for_insn;
803     int max;
804{
805  int i;
806
807  for (i = 0; i < n_basic_blocks; ++i)
808    {
809      basic_block bb = BASIC_BLOCK (i);
810      rtx insn, end;
811
812      end = bb->end;
813      insn = bb->head;
814      while (1)
815	{
816	  int uid = INSN_UID (insn);
817	  if (uid < max)
818	    VARRAY_BB (bb_for_insn, uid) = bb;
819	  if (insn == end)
820	    break;
821	  insn = NEXT_INSN (insn);
822	}
823    }
824}
825
826/* Free the memory associated with the edge structures.  */
827
828static void
829clear_edges ()
830{
831  int i;
832  edge n, e;
833
834  for (i = 0; i < n_basic_blocks; ++i)
835    {
836      basic_block bb = BASIC_BLOCK (i);
837
838      for (e = bb->succ; e ; e = n)
839	{
840	  n = e->succ_next;
841	  free (e);
842	}
843
844      bb->succ = 0;
845      bb->pred = 0;
846    }
847
848  for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
849    {
850      n = e->succ_next;
851      free (e);
852    }
853
854  ENTRY_BLOCK_PTR->succ = 0;
855  EXIT_BLOCK_PTR->pred = 0;
856}
857
858/* Identify the edges between basic blocks.
859
860   NONLOCAL_LABEL_LIST is a list of non-local labels in the function.  Blocks
861   that are otherwise unreachable may be reachable with a non-local goto.
862
863   BB_EH_END is an array indexed by basic block number in which we record
864   the list of exception regions active at the end of the basic block.  */
865
866static void
867make_edges (label_value_list, bb_eh_end)
868     rtx label_value_list;
869     rtx *bb_eh_end;
870{
871  int i;
872
873  /* Assume no computed jump; revise as we create edges.  */
874  current_function_has_computed_jump = 0;
875
876  /* By nature of the way these get numbered, block 0 is always the entry.  */
877  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
878
879  for (i = 0; i < n_basic_blocks; ++i)
880    {
881      basic_block bb = BASIC_BLOCK (i);
882      rtx insn, x, eh_list;
883      enum rtx_code code;
884      int force_fallthru = 0;
885
886      /* If we have asynchronous exceptions, scan the notes for all exception
887	 regions active in the block.  In the normal case, we only need the
888	 one active at the end of the block, which is bb_eh_end[i].  */
889
890      eh_list = bb_eh_end[i];
891      if (asynchronous_exceptions)
892	{
893	  for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
894	    {
895	      if (GET_CODE (insn) == NOTE
896		  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
897		eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
898	    }
899	}
900
901      /* Now examine the last instruction of the block, and discover the
902	 ways we can leave the block.  */
903
904      insn = bb->end;
905      code = GET_CODE (insn);
906
907      /* A branch.  */
908      if (code == JUMP_INSN)
909	{
910	  rtx tmp;
911
912	  /* ??? Recognize a tablejump and do the right thing.  */
913	  if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
914	      && (tmp = NEXT_INSN (tmp)) != NULL_RTX
915	      && GET_CODE (tmp) == JUMP_INSN
916	      && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
917		  || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
918	    {
919	      rtvec vec;
920	      int j;
921
922	      if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
923		vec = XVEC (PATTERN (tmp), 0);
924	      else
925		vec = XVEC (PATTERN (tmp), 1);
926
927	      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
928		make_label_edge (bb, XEXP (RTVEC_ELT (vec, j), 0), 0);
929
930	      /* Some targets (eg, ARM) emit a conditional jump that also
931		 contains the out-of-range target.  Scan for these and
932		 add an edge if necessary.  */
933	      if ((tmp = single_set (insn)) != NULL
934		  && SET_DEST (tmp) == pc_rtx
935		  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
936		  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
937		make_label_edge (bb, XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
938
939#ifdef CASE_DROPS_THROUGH
940	      /* Silly VAXen.  The ADDR_VEC is going to be in the way of
941		 us naturally detecting fallthru into the next block.  */
942	      force_fallthru = 1;
943#endif
944	    }
945
946	  /* If this is a computed jump, then mark it as reaching
947	     everything on the label_value_list and forced_labels list.  */
948	  else if (computed_jump_p (insn))
949	    {
950	      current_function_has_computed_jump = 1;
951
952	      for (x = label_value_list; x; x = XEXP (x, 1))
953		make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
954
955	      for (x = forced_labels; x; x = XEXP (x, 1))
956		make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
957	    }
958
959	  /* Returns create an exit out.  */
960	  else if (returnjump_p (insn))
961	    make_edge (bb, EXIT_BLOCK_PTR, 0);
962
963	  /* Otherwise, we have a plain conditional or unconditional jump.  */
964	  else
965	    {
966	      if (! JUMP_LABEL (insn))
967		abort ();
968	      make_label_edge (bb, JUMP_LABEL (insn), 0);
969	    }
970	}
971
972      /* If this is a CALL_INSN, then mark it as reaching the active EH
973	 handler for this CALL_INSN.  If we're handling asynchronous
974	 exceptions then any insn can reach any of the active handlers.
975
976	 Also mark the CALL_INSN as reaching any nonlocal goto handler.  */
977
978      if (code == CALL_INSN || asynchronous_exceptions)
979	{
980	  int is_call = (code == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
981	  handler_info *ptr;
982
983	  /* Use REG_EH_RETHROW and REG_EH_REGION if available.  */
984	  /* ??? REG_EH_REGION is not generated presently.  Is it
985	     inteded that there be multiple notes for the regions?
986	     or is my eh_list collection redundant with handler linking?  */
987
988	  x = find_reg_note (insn, REG_EH_RETHROW, 0);
989	  if (!x)
990	    x = find_reg_note (insn, REG_EH_REGION, 0);
991	  if (x)
992	    {
993	      if (XINT (XEXP (x, 0), 0) > 0)
994		{
995		  ptr = get_first_handler (XINT (XEXP (x, 0), 0));
996		  while (ptr)
997		    {
998		      make_label_edge (bb, ptr->handler_label,
999				       EDGE_ABNORMAL | EDGE_EH | is_call);
1000		      ptr = ptr->next;
1001		    }
1002		}
1003	    }
1004	  else
1005	    {
1006	      for (x = eh_list; x; x = XEXP (x, 1))
1007		{
1008		  ptr = get_first_handler (NOTE_BLOCK_NUMBER (XEXP (x, 0)));
1009		  while (ptr)
1010		    {
1011		      make_label_edge (bb, ptr->handler_label,
1012				       EDGE_ABNORMAL | EDGE_EH | is_call);
1013		      ptr = ptr->next;
1014		    }
1015		}
1016	    }
1017
1018	  if (code == CALL_INSN && nonlocal_goto_handler_labels)
1019	    {
1020	      /* ??? This could be made smarter: in some cases it's possible
1021		 to tell that certain calls will not do a nonlocal goto.
1022
1023		 For example, if the nested functions that do the nonlocal
1024		 gotos do not have their addresses taken, then only calls to
1025		 those functions or to other nested functions that use them
1026		 could possibly do nonlocal gotos.  */
1027
1028	      for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1029	        make_label_edge (bb, XEXP (x, 0),
1030			         EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1031	    }
1032	}
1033
1034      /* We know something about the structure of the function __throw in
1035	 libgcc2.c.  It is the only function that ever contains eh_stub
1036	 labels.  It modifies its return address so that the last block
1037	 returns to one of the eh_stub labels within it.  So we have to
1038	 make additional edges in the flow graph.  */
1039      if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1040	make_label_edge (bb, eh_return_stub_label, EDGE_EH);
1041
1042      /* Find out if we can drop through to the next block.  */
1043      insn = next_nonnote_insn (insn);
1044      if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1045	make_edge (bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1046      else if (i + 1 < n_basic_blocks)
1047	{
1048	  rtx tmp = BLOCK_HEAD (i + 1);
1049	  if (GET_CODE (tmp) == NOTE)
1050	    tmp = next_nonnote_insn (tmp);
1051	  if (force_fallthru || insn == tmp)
1052	    make_edge (bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1053	}
1054    }
1055}
1056
1057/* Create an edge between two basic blocks.  FLAGS are auxiliary information
1058   about the edge that is accumulated between calls.  */
1059
1060static void
1061make_edge (src, dst, flags)
1062     basic_block src, dst;
1063     int flags;
1064{
1065  edge e;
1066
1067  /* Make sure we don't add duplicate edges.  */
1068
1069  for (e = src->succ; e ; e = e->succ_next)
1070    if (e->dest == dst)
1071      {
1072	e->flags |= flags;
1073	return;
1074      }
1075
1076  e = (edge) xcalloc (1, sizeof (*e));
1077
1078  e->succ_next = src->succ;
1079  e->pred_next = dst->pred;
1080  e->src = src;
1081  e->dest = dst;
1082  e->flags = flags;
1083
1084  src->succ = e;
1085  dst->pred = e;
1086}
1087
1088/* Create an edge from a basic block to a label.  */
1089
1090static void
1091make_label_edge (src, label, flags)
1092     basic_block src;
1093     rtx label;
1094     int flags;
1095{
1096  if (GET_CODE (label) != CODE_LABEL)
1097    abort ();
1098
1099  /* If the label was never emitted, this insn is junk, but avoid a
1100     crash trying to refer to BLOCK_FOR_INSN (label).  This can happen
1101     as a result of a syntax error and a diagnostic has already been
1102     printed.  */
1103
1104  if (INSN_UID (label) == 0)
1105    return;
1106
1107  make_edge (src, BLOCK_FOR_INSN (label), flags);
1108}
1109
1110/* Identify critical edges and set the bits appropriately.  */
1111static void
1112mark_critical_edges ()
1113{
1114  int i, n = n_basic_blocks;
1115  basic_block bb;
1116
1117  /* We begin with the entry block.  This is not terribly important now,
1118     but could be if a front end (Fortran) implemented alternate entry
1119     points.  */
1120  bb = ENTRY_BLOCK_PTR;
1121  i = -1;
1122
1123  while (1)
1124    {
1125      edge e;
1126
1127      /* (1) Critical edges must have a source with multiple successors.  */
1128      if (bb->succ && bb->succ->succ_next)
1129	{
1130	  for (e = bb->succ; e ; e = e->succ_next)
1131	    {
1132	      /* (2) Critical edges must have a destination with multiple
1133		 predecessors.  Note that we know there is at least one
1134		 predecessor -- the edge we followed to get here.  */
1135	      if (e->dest->pred->pred_next)
1136		e->flags |= EDGE_CRITICAL;
1137	      else
1138		e->flags &= ~EDGE_CRITICAL;
1139	    }
1140	}
1141      else
1142	{
1143	  for (e = bb->succ; e ; e = e->succ_next)
1144	    e->flags &= ~EDGE_CRITICAL;
1145	}
1146
1147      if (++i >= n)
1148	break;
1149      bb = BASIC_BLOCK (i);
1150    }
1151}
1152
1153/* Split a (typically critical) edge.  Return the new block.
1154   Abort on abnormal edges.
1155
1156   ??? The code generally expects to be called on critical edges.
1157   The case of a block ending in an unconditional jump to a
1158   block with multiple predecessors is not handled optimally.  */
1159
1160basic_block
1161split_edge (edge_in)
1162     edge edge_in;
1163{
1164  basic_block old_pred, bb, old_succ;
1165  edge edge_out;
1166  rtx bb_note;
1167  int i;
1168
1169  /* Abnormal edges cannot be split.  */
1170  if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1171    abort ();
1172
1173  old_pred = edge_in->src;
1174  old_succ = edge_in->dest;
1175
1176  /* Remove the existing edge from the destination's pred list.  */
1177  {
1178    edge *pp;
1179    for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1180      continue;
1181    *pp = edge_in->pred_next;
1182    edge_in->pred_next = NULL;
1183  }
1184
1185  /* Create the new structures.  */
1186  bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1187  edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1188
1189  memset (bb, 0, sizeof (*bb));
1190  bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
1191  bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1192  bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1193
1194  /* ??? This info is likely going to be out of date very soon.  */
1195  CLEAR_REG_SET (bb->local_set);
1196  if (old_succ->global_live_at_start)
1197    {
1198      COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1199      COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1200    }
1201  else
1202    {
1203      CLEAR_REG_SET (bb->global_live_at_start);
1204      CLEAR_REG_SET (bb->global_live_at_end);
1205    }
1206
1207  /* Wire them up.  */
1208  bb->pred = edge_in;
1209  bb->succ = edge_out;
1210
1211  edge_in->dest = bb;
1212  edge_in->flags &= ~EDGE_CRITICAL;
1213
1214  edge_out->pred_next = old_succ->pred;
1215  edge_out->succ_next = NULL;
1216  edge_out->src = bb;
1217  edge_out->dest = old_succ;
1218  edge_out->flags = EDGE_FALLTHRU;
1219  edge_out->probability = REG_BR_PROB_BASE;
1220
1221  old_succ->pred = edge_out;
1222
1223  /* Tricky case -- if there existed a fallthru into the successor
1224     (and we're not it) we must add a new unconditional jump around
1225     the new block we're actually interested in.
1226
1227     Further, if that edge is critical, this means a second new basic
1228     block must be created to hold it.  In order to simplify correct
1229     insn placement, do this before we touch the existing basic block
1230     ordering for the block we were really wanting.  */
1231  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1232    {
1233      edge e;
1234      for (e = edge_out->pred_next; e ; e = e->pred_next)
1235	if (e->flags & EDGE_FALLTHRU)
1236	  break;
1237
1238      if (e)
1239	{
1240	  basic_block jump_block;
1241	  rtx pos;
1242
1243	  if ((e->flags & EDGE_CRITICAL) == 0)
1244	    {
1245	      /* Non critical -- we can simply add a jump to the end
1246		 of the existing predecessor.  */
1247	      jump_block = e->src;
1248	    }
1249	  else
1250	    {
1251	      /* We need a new block to hold the jump.  The simplest
1252	         way to do the bulk of the work here is to recursively
1253	         call ourselves.  */
1254	      jump_block = split_edge (e);
1255	      e = jump_block->succ;
1256	    }
1257
1258	  /* Now add the jump insn ...  */
1259	  pos = emit_jump_insn_after (gen_jump (old_succ->head),
1260				      jump_block->end);
1261	  jump_block->end = pos;
1262	  emit_barrier_after (pos);
1263
1264	  /* ... let jump know that label is in use, ...  */
1265	  ++LABEL_NUSES (old_succ->head);
1266
1267	  /* ... and clear fallthru on the outgoing edge.  */
1268	  e->flags &= ~EDGE_FALLTHRU;
1269
1270	  /* Continue splitting the interesting edge.  */
1271	}
1272    }
1273
1274  /* Place the new block just in front of the successor.  */
1275  VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1276  for (i = n_basic_blocks - 1; i > old_succ->index; --i)
1277    {
1278      basic_block tmp = BASIC_BLOCK (i - 1);
1279      BASIC_BLOCK (i) = tmp;
1280      tmp->index = i;
1281    }
1282  BASIC_BLOCK (i) = bb;
1283  bb->index = i;
1284
1285  /* Create the basic block note.  */
1286  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1287  NOTE_BASIC_BLOCK (bb_note) = bb;
1288  bb->head = bb->end = bb_note;
1289
1290  /* Not quite simple -- for non-fallthru edges, we must adjust the
1291     predecessor's jump instruction to target our new block.  */
1292  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1293    {
1294      rtx tmp, insn = old_pred->end;
1295      rtx old_label = old_succ->head;
1296      rtx new_label = gen_label_rtx ();
1297
1298      if (GET_CODE (insn) != JUMP_INSN)
1299	abort ();
1300
1301      /* ??? Recognize a tablejump and adjust all matching cases.  */
1302      if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1303	  && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1304	  && GET_CODE (tmp) == JUMP_INSN
1305	  && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1306	      || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1307	{
1308	  rtvec vec;
1309	  int j;
1310
1311	  if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1312	    vec = XVEC (PATTERN (tmp), 0);
1313	  else
1314	    vec = XVEC (PATTERN (tmp), 1);
1315
1316	  for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1317	    if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1318	      {
1319	        RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1320		--LABEL_NUSES (old_label);
1321		++LABEL_NUSES (new_label);
1322	      }
1323	}
1324      else
1325	{
1326	  /* This would have indicated an abnormal edge.  */
1327	  if (computed_jump_p (insn))
1328	    abort ();
1329
1330	  /* A return instruction can't be redirected.  */
1331	  if (returnjump_p (insn))
1332	    abort ();
1333
1334	  /* If the insn doesn't go where we think, we're confused.  */
1335	  if (JUMP_LABEL (insn) != old_label)
1336	    abort ();
1337
1338	  redirect_jump (insn, new_label);
1339	}
1340
1341      emit_label_before (new_label, bb_note);
1342      bb->head = new_label;
1343    }
1344
1345  return bb;
1346}
1347
1348/* Queue instructions for insertion on an edge between two basic blocks.
1349   The new instructions and basic blocks (if any) will not appear in the
1350   CFG until commit_edge_insertions is called.  */
1351
1352void
1353insert_insn_on_edge (pattern, e)
1354     rtx pattern;
1355     edge e;
1356{
1357  /* We cannot insert instructions on an abnormal critical edge.
1358     It will be easier to find the culprit if we die now.  */
1359  if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1360      == (EDGE_ABNORMAL|EDGE_CRITICAL))
1361    abort ();
1362
1363  if (e->insns == NULL_RTX)
1364    start_sequence ();
1365  else
1366    push_to_sequence (e->insns);
1367
1368  emit_insn (pattern);
1369
1370  e->insns = get_insns ();
1371  end_sequence();
1372}
1373
1374/* Update the CFG for the instructions queued on edge E.  */
1375
1376static void
1377commit_one_edge_insertion (e)
1378     edge e;
1379{
1380  rtx before = NULL_RTX, after = NULL_RTX, tmp;
1381  basic_block bb;
1382
1383  /* Figure out where to put these things.  If the destination has
1384     one predecessor, insert there.  Except for the exit block.  */
1385  if (e->dest->pred->pred_next == NULL
1386      && e->dest != EXIT_BLOCK_PTR)
1387    {
1388      bb = e->dest;
1389
1390      /* Get the location correct wrt a code label, and "nice" wrt
1391	 a basic block note, and before everything else.  */
1392      tmp = bb->head;
1393      if (GET_CODE (tmp) == CODE_LABEL)
1394	tmp = NEXT_INSN (tmp);
1395      if (GET_CODE (tmp) == NOTE
1396	  && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1397	tmp = NEXT_INSN (tmp);
1398      if (tmp == bb->head)
1399	before = tmp;
1400      else
1401	after = PREV_INSN (tmp);
1402    }
1403
1404  /* If the source has one successor and the edge is not abnormal,
1405     insert there.  Except for the entry block.  */
1406  else if ((e->flags & EDGE_ABNORMAL) == 0
1407	   && e->src->succ->succ_next == NULL
1408	   && e->src != ENTRY_BLOCK_PTR)
1409    {
1410      bb = e->src;
1411      if (GET_CODE (bb->end) == JUMP_INSN)
1412	{
1413	  /* ??? Is it possible to wind up with non-simple jumps?  Perhaps
1414	     a jump with delay slots already filled?  */
1415	  if (! simplejump_p (bb->end))
1416	    abort ();
1417
1418	  before = bb->end;
1419	}
1420      else
1421	{
1422	  /* We'd better be fallthru, or we've lost track of what's what.  */
1423	  if ((e->flags & EDGE_FALLTHRU) == 0)
1424	    abort ();
1425
1426	  after = bb->end;
1427	}
1428    }
1429
1430  /* Otherwise we must split the edge.  */
1431  else
1432    {
1433      bb = split_edge (e);
1434      after = bb->end;
1435    }
1436
1437  /* Now that we've found the spot, do the insertion.  */
1438  tmp = e->insns;
1439  e->insns = NULL_RTX;
1440  if (before)
1441    {
1442      emit_insns_before (tmp, before);
1443      if (before == bb->head)
1444	bb->head = before;
1445    }
1446  else
1447    {
1448      tmp = emit_insns_after (tmp, after);
1449      if (after == bb->end)
1450	bb->end = tmp;
1451    }
1452}
1453
1454/* Update the CFG for all queued instructions.  */
1455
1456void
1457commit_edge_insertions ()
1458{
1459  int i;
1460  basic_block bb;
1461
1462  i = -1;
1463  bb = ENTRY_BLOCK_PTR;
1464  while (1)
1465    {
1466      edge e, next;
1467
1468      for (e = bb->succ; e ; e = next)
1469	{
1470	  next = e->succ_next;
1471	  if (e->insns)
1472	    commit_one_edge_insertion (e);
1473	}
1474
1475      if (++i >= n_basic_blocks)
1476	break;
1477      bb = BASIC_BLOCK (i);
1478    }
1479}
1480
1481/* Delete all unreachable basic blocks.   */
1482
1483static void
1484delete_unreachable_blocks ()
1485{
1486  basic_block *worklist, *tos;
1487  int deleted_handler;
1488  edge e;
1489  int i, n;
1490
1491  n = n_basic_blocks;
1492  tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1493
1494  /* Use basic_block->aux as a marker.  Clear them all.  */
1495
1496  for (i = 0; i < n; ++i)
1497    BASIC_BLOCK (i)->aux = NULL;
1498
1499  /* Add our starting points to the worklist.  Almost always there will
1500     be only one.  It isn't inconcievable that we might one day directly
1501     support Fortran alternate entry points.  */
1502
1503  for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1504    {
1505      *tos++ = e->dest;
1506
1507      /* Mark the block with a handy non-null value.  */
1508      e->dest->aux = e;
1509    }
1510
1511  /* Iterate: find everything reachable from what we've already seen.  */
1512
1513  while (tos != worklist)
1514    {
1515      basic_block b = *--tos;
1516
1517      for (e = b->succ; e ; e = e->succ_next)
1518	if (!e->dest->aux)
1519	  {
1520	    *tos++ = e->dest;
1521	    e->dest->aux = e;
1522	  }
1523    }
1524
1525  /* Delete all unreachable basic blocks.  Count down so that we don't
1526     interfere with the block renumbering that happens in delete_block.  */
1527
1528  deleted_handler = 0;
1529
1530  for (i = n - 1; i >= 0; --i)
1531    {
1532      basic_block b = BASIC_BLOCK (i);
1533
1534      if (b->aux != NULL)
1535	/* This block was found.  Tidy up the mark.  */
1536	b->aux = NULL;
1537      else
1538	deleted_handler |= delete_block (b);
1539    }
1540
1541  /* Fix up edges that now fall through, or rather should now fall through
1542     but previously required a jump around now deleted blocks.  Simplify
1543     the search by only examining blocks numerically adjacent, since this
1544     is how find_basic_blocks created them.  */
1545
1546  for (i = 1; i < n_basic_blocks; ++i)
1547    {
1548      basic_block b = BASIC_BLOCK (i - 1);
1549      basic_block c = BASIC_BLOCK (i);
1550      edge s;
1551
1552      /* We care about simple conditional or unconditional jumps with
1553	 a single successor.
1554
1555	 If we had a conditional branch to the next instruction when
1556	 find_basic_blocks was called, then there will only be one
1557	 out edge for the block which ended with the conditional
1558	 branch (since we do not create duplicate edges).
1559
1560	 Furthermore, the edge will be marked as a fallthru because we
1561	 merge the flags for the duplicate edges.  So we do not want to
1562	 check that the edge is not a FALLTHRU edge.  */
1563      if ((s = b->succ) != NULL
1564	  && s->succ_next == NULL
1565	  && s->dest == c
1566	  /* If the last insn is not a normal conditional jump
1567	     (or an unconditional jump), then we can not tidy the
1568	     fallthru edge because we can not delete the jump.  */
1569	  && GET_CODE (b->end) == JUMP_INSN
1570	  && condjump_p (b->end))
1571	tidy_fallthru_edge (s, b, c);
1572    }
1573
1574  /* Attempt to merge blocks as made possible by edge removal.  If a block
1575     has only one successor, and the successor has only one predecessor,
1576     they may be combined.  */
1577
1578  for (i = 0; i < n_basic_blocks; )
1579    {
1580      basic_block c, b = BASIC_BLOCK (i);
1581      edge s;
1582
1583      /* A loop because chains of blocks might be combineable.  */
1584      while ((s = b->succ) != NULL
1585	     && s->succ_next == NULL
1586	     && (s->flags & EDGE_EH) == 0
1587	     && (c = s->dest) != EXIT_BLOCK_PTR
1588	     && c->pred->pred_next == NULL
1589	     /* If the last insn is not a normal conditional jump
1590		(or an unconditional jump), then we can not merge
1591		the blocks because we can not delete the jump.  */
1592	     && GET_CODE (b->end) == JUMP_INSN
1593	     && condjump_p (b->end)
1594	     && merge_blocks (s, b, c))
1595	continue;
1596
1597      /* Don't get confused by the index shift caused by deleting blocks.  */
1598      i = b->index + 1;
1599    }
1600
1601  /* If we deleted an exception handler, we may have EH region begin/end
1602     blocks to remove as well. */
1603  if (deleted_handler)
1604    delete_eh_regions ();
1605}
1606
1607/* Find EH regions for which there is no longer a handler, and delete them.  */
1608
1609static void
1610delete_eh_regions ()
1611{
1612  rtx insn;
1613
1614  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1615    if (GET_CODE (insn) == NOTE)
1616      {
1617	if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1618	    (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1619	  {
1620	    int num = CODE_LABEL_NUMBER (insn);
1621	    /* A NULL handler indicates a region is no longer needed */
1622	    if (get_first_handler (num) == NULL)
1623	      {
1624		NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1625		NOTE_SOURCE_FILE (insn) = 0;
1626	      }
1627	  }
1628      }
1629}
1630
1631/* Return true if NOTE is not one of the ones that must be kept paired,
1632   so that we may simply delete them.  */
1633
1634static int
1635can_delete_note_p (note)
1636     rtx note;
1637{
1638  return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1639	  || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1640}
1641
1642/* Unlink a chain of insns between START and FINISH, leaving notes
1643   that must be paired.  */
1644
1645static void
1646delete_insn_chain (start, finish)
1647     rtx start, finish;
1648{
1649  /* Unchain the insns one by one.  It would be quicker to delete all
1650     of these with a single unchaining, rather than one at a time, but
1651     we need to keep the NOTE's.  */
1652
1653  rtx next;
1654
1655  while (1)
1656    {
1657      next = NEXT_INSN (start);
1658      if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1659	;
1660      else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1661	;
1662      else
1663	next = flow_delete_insn (start);
1664
1665      if (start == finish)
1666	break;
1667      start = next;
1668    }
1669}
1670
1671/* Delete the insns in a (non-live) block.  We physically delete every
1672   non-deleted-note insn, and update the flow graph appropriately.
1673
1674   Return nonzero if we deleted an exception handler.  */
1675
1676/* ??? Preserving all such notes strikes me as wrong.  It would be nice
1677   to post-process the stream to remove empty blocks, loops, ranges, etc.  */
1678
1679static int
1680delete_block (b)
1681     basic_block b;
1682{
1683  int deleted_handler = 0;
1684  rtx insn, end, tmp;
1685
1686  /* If the head of this block is a CODE_LABEL, then it might be the
1687     label for an exception handler which can't be reached.
1688
1689     We need to remove the label from the exception_handler_label list
1690     and remove the associated NOTE_EH_REGION_BEG and NOTE_EH_REGION_END
1691     notes.  */
1692
1693  insn = b->head;
1694
1695  if (GET_CODE (insn) == CODE_LABEL)
1696    {
1697      rtx x, *prev = &exception_handler_labels;
1698
1699      for (x = exception_handler_labels; x; x = XEXP (x, 1))
1700	{
1701	  if (XEXP (x, 0) == insn)
1702	    {
1703	      /* Found a match, splice this label out of the EH label list.  */
1704	      *prev = XEXP (x, 1);
1705	      XEXP (x, 1) = NULL_RTX;
1706	      XEXP (x, 0) = NULL_RTX;
1707
1708	      /* Remove the handler from all regions */
1709	      remove_handler (insn);
1710	      deleted_handler = 1;
1711	      break;
1712	    }
1713	  prev = &XEXP (x, 1);
1714	}
1715
1716      /* This label may be referenced by code solely for its value, or
1717	 referenced by static data, or something.  We have determined
1718	 that it is not reachable, but cannot delete the label itself.
1719	 Save code space and continue to delete the balance of the block,
1720	 along with properly updating the cfg.  */
1721      if (!can_delete_label_p (insn))
1722	{
1723	  /* If we've only got one of these, skip the whole deleting
1724	     insns thing.  */
1725	  if (insn == b->end)
1726	    goto no_delete_insns;
1727	  insn = NEXT_INSN (insn);
1728	}
1729    }
1730
1731  /* Include any jump table following the basic block.  */
1732  end = b->end;
1733  if (GET_CODE (end) == JUMP_INSN
1734      && (tmp = JUMP_LABEL (end)) != NULL_RTX
1735      && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1736      && GET_CODE (tmp) == JUMP_INSN
1737      && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1738	  || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1739    end = tmp;
1740
1741  /* Include any barrier that may follow the basic block.  */
1742  tmp = next_nonnote_insn (b->end);
1743  if (tmp && GET_CODE (tmp) == BARRIER)
1744    end = tmp;
1745
1746  delete_insn_chain (insn, end);
1747
1748no_delete_insns:
1749
1750  /* Remove the edges into and out of this block.  Note that there may
1751     indeed be edges in, if we are removing an unreachable loop.  */
1752  {
1753    edge e, next, *q;
1754
1755    for (e = b->pred; e ; e = next)
1756      {
1757	for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1758	  continue;
1759	*q = e->succ_next;
1760	next = e->pred_next;
1761	free (e);
1762      }
1763    for (e = b->succ; e ; e = next)
1764      {
1765	for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1766	  continue;
1767	*q = e->pred_next;
1768	next = e->succ_next;
1769	free (e);
1770      }
1771
1772    b->pred = NULL;
1773    b->succ = NULL;
1774  }
1775
1776  /* Remove the basic block from the array, and compact behind it.  */
1777  expunge_block (b);
1778
1779  return deleted_handler;
1780}
1781
1782/* Remove block B from the basic block array and compact behind it.  */
1783
1784static void
1785expunge_block (b)
1786     basic_block b;
1787{
1788  int i, n = n_basic_blocks;
1789
1790  for (i = b->index; i + 1 < n; ++i)
1791    {
1792      basic_block x = BASIC_BLOCK (i + 1);
1793      BASIC_BLOCK (i) = x;
1794      x->index = i;
1795    }
1796
1797  basic_block_info->num_elements--;
1798  n_basic_blocks--;
1799}
1800
1801/* Delete INSN by patching it out.  Return the next insn.  */
1802
1803static rtx
1804flow_delete_insn (insn)
1805     rtx insn;
1806{
1807  rtx prev = PREV_INSN (insn);
1808  rtx next = NEXT_INSN (insn);
1809  rtx note;
1810
1811  PREV_INSN (insn) = NULL_RTX;
1812  NEXT_INSN (insn) = NULL_RTX;
1813
1814  if (prev)
1815    NEXT_INSN (prev) = next;
1816  if (next)
1817    PREV_INSN (next) = prev;
1818  else
1819    set_last_insn (prev);
1820
1821  if (GET_CODE (insn) == CODE_LABEL)
1822    remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1823
1824  /* If deleting a jump, decrement the use count of the label.  Deleting
1825     the label itself should happen in the normal course of block merging.  */
1826  if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1827    LABEL_NUSES (JUMP_LABEL (insn))--;
1828
1829  /* Also if deleting an insn that references a label.  */
1830  else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
1831    LABEL_NUSES (XEXP (note, 0))--;
1832
1833  return next;
1834}
1835
1836/* True if a given label can be deleted.  */
1837
1838static int
1839can_delete_label_p (label)
1840     rtx label;
1841{
1842  rtx x;
1843
1844  if (LABEL_PRESERVE_P (label))
1845    return 0;
1846
1847  for (x = forced_labels; x ; x = XEXP (x, 1))
1848    if (label == XEXP (x, 0))
1849      return 0;
1850  for (x = label_value_list; x ; x = XEXP (x, 1))
1851    if (label == XEXP (x, 0))
1852      return 0;
1853  for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1854    if (label == XEXP (x, 0))
1855      return 0;
1856
1857  /* User declared labels must be preserved.  */
1858  if (LABEL_NAME (label) != 0)
1859    return 0;
1860
1861  return 1;
1862}
1863
1864/* Blocks A and B are to be merged into a single block.  The insns
1865   are already contiguous, hence `nomove'.  */
1866
1867static void
1868merge_blocks_nomove (a, b)
1869     basic_block a, b;
1870{
1871  edge e;
1872  rtx b_head, b_end, a_end;
1873  int b_empty = 0;
1874
1875  /* If there was a CODE_LABEL beginning B, delete it.  */
1876  b_head = b->head;
1877  b_end = b->end;
1878  if (GET_CODE (b_head) == CODE_LABEL)
1879    {
1880      /* Detect basic blocks with nothing but a label.  This can happen
1881	 in particular at the end of a function.  */
1882      if (b_head == b_end)
1883	b_empty = 1;
1884      b_head = flow_delete_insn (b_head);
1885    }
1886
1887  /* Delete the basic block note.  */
1888  if (GET_CODE (b_head) == NOTE
1889      && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
1890    {
1891      if (b_head == b_end)
1892	b_empty = 1;
1893      b_head = flow_delete_insn (b_head);
1894    }
1895
1896  /* If there was a jump out of A, delete it.  */
1897  a_end = a->end;
1898  if (GET_CODE (a_end) == JUMP_INSN)
1899    {
1900      rtx prev;
1901
1902      prev = prev_nonnote_insn (a_end);
1903      if (!prev)
1904	prev = a->head;
1905
1906#ifdef HAVE_cc0
1907      /* If this was a conditional jump, we need to also delete
1908	 the insn that set cc0.  */
1909
1910      if (prev && sets_cc0_p (prev))
1911	{
1912          rtx tmp = prev;
1913	  prev = prev_nonnote_insn (prev);
1914	  if (!prev)
1915	    prev = a->head;
1916	  flow_delete_insn (tmp);
1917	}
1918#endif
1919
1920      /* Note that a->head != a->end, since we should have at least a
1921	 bb note plus the jump, so prev != insn.  */
1922      flow_delete_insn (a_end);
1923      a_end = prev;
1924    }
1925
1926  /* By definition, there should only be one successor of A, and that is
1927     B.  Free that edge struct.  */
1928  free (a->succ);
1929
1930  /* Adjust the edges out of B for the new owner.  */
1931  for (e = b->succ; e ; e = e->succ_next)
1932    e->src = a;
1933  a->succ = b->succ;
1934
1935  /* Reassociate the insns of B with A.  */
1936  if (!b_empty)
1937    {
1938      BLOCK_FOR_INSN (b_head) = a;
1939      while (b_head != b_end)
1940	{
1941	  b_head = NEXT_INSN (b_head);
1942	  BLOCK_FOR_INSN (b_head) = a;
1943	}
1944      a_end = b_head;
1945    }
1946  a->end = a_end;
1947
1948  /* Compact the basic block array.  */
1949  expunge_block (b);
1950}
1951
1952/* Attempt to merge basic blocks that are potentially non-adjacent.
1953   Return true iff the attempt succeeded.  */
1954
1955static int
1956merge_blocks (e, b, c)
1957     edge e;
1958     basic_block b, c;
1959{
1960  /* If B has a fallthru edge to C, no need to move anything.  */
1961  if (!(e->flags & EDGE_FALLTHRU))
1962    {
1963      /* ??? From here on out we must make sure to not munge nesting
1964	 of exception regions and lexical blocks.  Need to think about
1965	 these cases before this gets implemented.  */
1966      return 0;
1967
1968      /* If C has an outgoing fallthru, and B does not have an incoming
1969	 fallthru, move B before C.  The later clause is somewhat arbitrary,
1970	 but avoids modifying blocks other than the two we've been given.  */
1971
1972      /* Otherwise, move C after B.  If C had a fallthru, which doesn't
1973	 happen to be the physical successor to B, insert an unconditional
1974	 branch.  If C already ended with a conditional branch, the new
1975	 jump must go in a new basic block D.  */
1976    }
1977
1978  /* If a label still appears somewhere and we cannot delete the label,
1979     then we cannot merge the blocks.  The edge was tidied already.  */
1980  {
1981    rtx insn, stop = NEXT_INSN (c->head);
1982    for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
1983      if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
1984	return 0;
1985  }
1986
1987  merge_blocks_nomove (b, c);
1988  return 1;
1989}
1990
1991/* The given edge should potentially a fallthru edge.  If that is in
1992   fact true, delete the unconditional jump and barriers that are in
1993   the way.  */
1994
1995static void
1996tidy_fallthru_edge (e, b, c)
1997     edge e;
1998     basic_block b, c;
1999{
2000  rtx q;
2001
2002  /* ??? In a late-running flow pass, other folks may have deleted basic
2003     blocks by nopping out blocks, leaving multiple BARRIERs between here
2004     and the target label. They ought to be chastized and fixed.
2005
2006     We can also wind up with a sequence of undeletable labels between
2007     one block and the next.
2008
2009     So search through a sequence of barriers, labels, and notes for
2010     the head of block C and assert that we really do fall through.  */
2011
2012  if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2013    return;
2014
2015  /* Remove what will soon cease being the jump insn from the source block.
2016     If block B consisted only of this single jump, turn it into a deleted
2017     note.  */
2018  q = b->end;
2019  if (GET_CODE (q) == JUMP_INSN)
2020    {
2021#ifdef HAVE_cc0
2022      /* If this was a conditional jump, we need to also delete
2023	 the insn that set cc0.  */
2024      if (! simplejump_p (q) && condjump_p (q))
2025	q = PREV_INSN (q);
2026#endif
2027
2028      if (b->head == q)
2029	{
2030	  PUT_CODE (q, NOTE);
2031	  NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2032	  NOTE_SOURCE_FILE (q) = 0;
2033	}
2034      else
2035	b->end = q = PREV_INSN (q);
2036    }
2037
2038  /* Selectively unlink the sequence.  */
2039  if (q != PREV_INSN (c->head))
2040    delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2041
2042  e->flags |= EDGE_FALLTHRU;
2043}
2044
2045/* Discover and record the loop depth at the head of each basic block.  */
2046
2047static void
2048calculate_loop_depth (insns)
2049     rtx insns;
2050{
2051  basic_block bb;
2052  rtx insn;
2053  int i = 0, depth = 1;
2054
2055  bb = BASIC_BLOCK (i);
2056  for (insn = insns; insn ; insn = NEXT_INSN (insn))
2057    {
2058      if (insn == bb->head)
2059	{
2060	  bb->loop_depth = depth;
2061	  if (++i >= n_basic_blocks)
2062	    break;
2063	  bb = BASIC_BLOCK (i);
2064	}
2065
2066      if (GET_CODE (insn) == NOTE)
2067	{
2068	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2069	    depth++;
2070	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2071	    depth--;
2072
2073	  /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2074	  if (depth == 0)
2075	    abort ();
2076	}
2077    }
2078}
2079
2080/* Perform data flow analysis.
2081   F is the first insn of the function and NREGS the number of register numbers
2082   in use.  */
2083
2084void
2085life_analysis (f, nregs, file, remove_dead_code)
2086     rtx f;
2087     int nregs;
2088     FILE *file;
2089     int remove_dead_code;
2090{
2091#ifdef ELIMINABLE_REGS
2092  register size_t i;
2093  static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2094#endif
2095
2096  /* Record which registers will be eliminated.  We use this in
2097     mark_used_regs.  */
2098
2099  CLEAR_HARD_REG_SET (elim_reg_set);
2100
2101#ifdef ELIMINABLE_REGS
2102  for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2103    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2104#else
2105  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2106#endif
2107
2108  /* Allocate a bitmap to be filled in by record_volatile_insns.  */
2109  uid_volatile = BITMAP_ALLOCA ();
2110
2111  /* We want alias analysis information for local dead store elimination.  */
2112  init_alias_analysis ();
2113  life_analysis_1 (f, nregs, remove_dead_code);
2114  end_alias_analysis ();
2115
2116  if (file)
2117    dump_flow_info (file);
2118
2119  BITMAP_FREE (uid_volatile);
2120  free_basic_block_vars (1);
2121}
2122
2123/* Free the variables allocated by find_basic_blocks.
2124
2125   KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed.  */
2126
2127void
2128free_basic_block_vars (keep_head_end_p)
2129     int keep_head_end_p;
2130{
2131  if (basic_block_for_insn)
2132    {
2133      VARRAY_FREE (basic_block_for_insn);
2134      basic_block_for_insn = NULL;
2135    }
2136
2137  if (! keep_head_end_p)
2138    {
2139      clear_edges ();
2140      VARRAY_FREE (basic_block_info);
2141      n_basic_blocks = 0;
2142
2143      ENTRY_BLOCK_PTR->aux = NULL;
2144      ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2145      EXIT_BLOCK_PTR->aux = NULL;
2146      EXIT_BLOCK_PTR->global_live_at_start = NULL;
2147    }
2148}
2149
2150/* Return nonzero if the destination of SET equals the source.  */
2151static int
2152set_noop_p (set)
2153     rtx set;
2154{
2155  rtx src = SET_SRC (set);
2156  rtx dst = SET_DEST (set);
2157  if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2158      && REGNO (src) == REGNO (dst))
2159    return 1;
2160  if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2161      || SUBREG_WORD (src) != SUBREG_WORD (dst))
2162    return 0;
2163  src = SUBREG_REG (src);
2164  dst = SUBREG_REG (dst);
2165  if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2166      && REGNO (src) == REGNO (dst))
2167    return 1;
2168  return 0;
2169}
2170
2171/* Return nonzero if an insn consists only of SETs, each of which only sets a
2172   value to itself.  */
2173static int
2174noop_move_p (insn)
2175     rtx insn;
2176{
2177  rtx pat = PATTERN (insn);
2178
2179  /* Insns carrying these notes are useful later on.  */
2180  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2181    return 0;
2182
2183  if (GET_CODE (pat) == SET && set_noop_p (pat))
2184    return 1;
2185
2186  if (GET_CODE (pat) == PARALLEL)
2187    {
2188      int i;
2189      /* If nothing but SETs of registers to themselves,
2190	 this insn can also be deleted.  */
2191      for (i = 0; i < XVECLEN (pat, 0); i++)
2192	{
2193	  rtx tem = XVECEXP (pat, 0, i);
2194
2195	  if (GET_CODE (tem) == USE
2196	      || GET_CODE (tem) == CLOBBER)
2197	    continue;
2198
2199	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2200	    return 0;
2201	}
2202
2203      return 1;
2204    }
2205  return 0;
2206}
2207
2208static void
2209notice_stack_pointer_modification (x, pat)
2210     rtx x;
2211     rtx pat ATTRIBUTE_UNUSED;
2212{
2213  if (x == stack_pointer_rtx
2214      /* The stack pointer is only modified indirectly as the result
2215	 of a push until later in flow.  See the comments in rtl.texi
2216	 regarding Embedded Side-Effects on Addresses.  */
2217      || (GET_CODE (x) == MEM
2218	  && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2219	      || GET_CODE (XEXP (x, 0)) == PRE_INC
2220	      || GET_CODE (XEXP (x, 0)) == POST_DEC
2221	      || GET_CODE (XEXP (x, 0)) == POST_INC)
2222	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2223    current_function_sp_is_unchanging = 0;
2224}
2225
2226/* Record which insns refer to any volatile memory
2227   or for any reason can't be deleted just because they are dead stores.
2228   Also, delete any insns that copy a register to itself.
2229   And see if the stack pointer is modified.  */
2230static void
2231record_volatile_insns (f)
2232     rtx f;
2233{
2234  rtx insn;
2235  for (insn = f; insn; insn = NEXT_INSN (insn))
2236    {
2237      enum rtx_code code1 = GET_CODE (insn);
2238      if (code1 == CALL_INSN)
2239	SET_INSN_VOLATILE (insn);
2240      else if (code1 == INSN || code1 == JUMP_INSN)
2241	{
2242	  if (GET_CODE (PATTERN (insn)) != USE
2243	      && volatile_refs_p (PATTERN (insn)))
2244	    SET_INSN_VOLATILE (insn);
2245
2246	  /* A SET that makes space on the stack cannot be dead.
2247	     (Such SETs occur only for allocating variable-size data,
2248	     so they will always have a PLUS or MINUS according to the
2249	     direction of stack growth.)
2250	     Even if this function never uses this stack pointer value,
2251	     signal handlers do!  */
2252	  else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2253		   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2254#ifdef STACK_GROWS_DOWNWARD
2255		   && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2256#else
2257		   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2258#endif
2259		   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2260	    SET_INSN_VOLATILE (insn);
2261
2262	  /* Delete (in effect) any obvious no-op moves.  */
2263	  else if (noop_move_p (insn))
2264	    {
2265	      PUT_CODE (insn, NOTE);
2266	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2267	      NOTE_SOURCE_FILE (insn) = 0;
2268	    }
2269	}
2270
2271      /* Check if insn modifies the stack pointer.  */
2272      if ( current_function_sp_is_unchanging
2273	   && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2274	note_stores (PATTERN (insn), notice_stack_pointer_modification);
2275    }
2276}
2277
2278/* Mark those regs which are needed at the end of the function as live
2279   at the end of the last basic block.  */
2280static void
2281mark_regs_live_at_end (set)
2282     regset set;
2283{
2284  int i;
2285
2286  /* If exiting needs the right stack value, consider the stack pointer
2287     live at the end of the function.  */
2288  if (! EXIT_IGNORE_STACK
2289      || (! FRAME_POINTER_REQUIRED
2290	  && ! current_function_calls_alloca
2291	  && flag_omit_frame_pointer)
2292      || current_function_sp_is_unchanging)
2293    {
2294      SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2295    }
2296
2297  /* Mark the frame pointer if needed at the end of the function.  If
2298     we end up eliminating it, it will be removed from the live list
2299     of each basic block by reload.  */
2300
2301  if (! reload_completed || frame_pointer_needed)
2302    {
2303      SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2304#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2305      /* If they are different, also mark the hard frame pointer as live */
2306      SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2307#endif
2308    }
2309
2310  /* Mark all global registers, and all registers used by the epilogue
2311     as being live at the end of the function since they may be
2312     referenced by our caller.  */
2313  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2314    if (global_regs[i]
2315#ifdef EPILOGUE_USES
2316	|| EPILOGUE_USES (i)
2317#endif
2318	)
2319      SET_REGNO_REG_SET (set, i);
2320
2321  /* ??? Mark function return value here rather than as uses.  */
2322}
2323
2324/* Determine which registers are live at the start of each
2325   basic block of the function whose first insn is F.
2326   NREGS is the number of registers used in F.
2327   We allocate the vector basic_block_live_at_start
2328   and the regsets that it points to, and fill them with the data.
2329   regset_size and regset_bytes are also set here.  */
2330
2331static void
2332life_analysis_1 (f, nregs, remove_dead_code)
2333     rtx f;
2334     int nregs;
2335     int remove_dead_code;
2336{
2337  int first_pass;
2338  int changed;
2339  register int i;
2340  char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2341  regset *new_live_at_end;
2342
2343  struct obstack flow_obstack;
2344
2345  gcc_obstack_init (&flow_obstack);
2346
2347  max_regno = nregs;
2348
2349  /* Allocate and zero out many data structures
2350     that will record the data from lifetime analysis.  */
2351
2352  allocate_reg_life_data ();
2353  allocate_bb_life_data ();
2354
2355  reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2356  memset (reg_next_use, 0, nregs * sizeof (rtx));
2357
2358  /* Set up regset-vectors used internally within this function.
2359     Their meanings are documented above, with their declarations.  */
2360
2361  new_live_at_end = (regset *) alloca ((n_basic_blocks + 1) * sizeof (regset));
2362  init_regset_vector (new_live_at_end, n_basic_blocks + 1, &flow_obstack);
2363
2364  /* Stick these vectors into the AUX field of the basic block, so that
2365     we don't have to keep going through the index.  */
2366
2367  for (i = 0; i < n_basic_blocks; ++i)
2368    BASIC_BLOCK (i)->aux = new_live_at_end[i];
2369  ENTRY_BLOCK_PTR->aux = new_live_at_end[i];
2370
2371  /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2372     This will be cleared by record_volatile_insns if it encounters an insn
2373     which modifies the stack pointer.  */
2374  current_function_sp_is_unchanging = !current_function_calls_alloca;
2375
2376  record_volatile_insns (f);
2377
2378  if (n_basic_blocks > 0)
2379    {
2380      regset theend;
2381      register edge e;
2382
2383      theend = EXIT_BLOCK_PTR->global_live_at_start;
2384      mark_regs_live_at_end (theend);
2385
2386      /* Propogate this exit data to each of EXIT's predecessors.  */
2387      for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2388	{
2389	  COPY_REG_SET (e->src->global_live_at_end, theend);
2390	  COPY_REG_SET ((regset) e->src->aux, theend);
2391	}
2392    }
2393
2394  /* The post-reload life analysis have (on a global basis) the same registers
2395     live as was computed by reload itself.
2396
2397     Otherwise elimination offsets and such may be incorrect.
2398
2399     Reload will make some registers as live even though they do not appear
2400     in the rtl.  */
2401  if (reload_completed)
2402    memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2403  memset (regs_ever_live, 0, sizeof regs_ever_live);
2404
2405  /* Propagate life info through the basic blocks
2406     around the graph of basic blocks.
2407
2408     This is a relaxation process: each time a new register
2409     is live at the end of the basic block, we must scan the block
2410     to determine which registers are, as a consequence, live at the beginning
2411     of that block.  These registers must then be marked live at the ends
2412     of all the blocks that can transfer control to that block.
2413     The process continues until it reaches a fixed point.  */
2414
2415  first_pass = 1;
2416  changed = 1;
2417  while (changed)
2418    {
2419      changed = 0;
2420      for (i = n_basic_blocks - 1; i >= 0; i--)
2421	{
2422	  basic_block bb = BASIC_BLOCK (i);
2423	  int consider = first_pass;
2424	  int must_rescan = first_pass;
2425	  register int j;
2426
2427	  if (!first_pass)
2428	    {
2429	      /* Set CONSIDER if this block needs thinking about at all
2430		 (that is, if the regs live now at the end of it
2431		 are not the same as were live at the end of it when
2432		 we last thought about it).
2433		 Set must_rescan if it needs to be thought about
2434		 instruction by instruction (that is, if any additional
2435		 reg that is live at the end now but was not live there before
2436		 is one of the significant regs of this basic block).  */
2437
2438	      EXECUTE_IF_AND_COMPL_IN_REG_SET
2439		((regset) bb->aux, bb->global_live_at_end, 0, j,
2440		 {
2441		   consider = 1;
2442		   if (REGNO_REG_SET_P (bb->local_set, j))
2443		     {
2444		       must_rescan = 1;
2445		       goto done;
2446		     }
2447		 });
2448	    done:
2449	      if (! consider)
2450		continue;
2451	    }
2452
2453	  /* The live_at_start of this block may be changing,
2454	     so another pass will be required after this one.  */
2455	  changed = 1;
2456
2457	  if (! must_rescan)
2458	    {
2459	      /* No complete rescan needed;
2460		 just record those variables newly known live at end
2461		 as live at start as well.  */
2462	      IOR_AND_COMPL_REG_SET (bb->global_live_at_start,
2463				     (regset) bb->aux,
2464				     bb->global_live_at_end);
2465
2466	      IOR_AND_COMPL_REG_SET (bb->global_live_at_end,
2467				     (regset) bb->aux,
2468				     bb->global_live_at_end);
2469	    }
2470	  else
2471	    {
2472	      /* Update the basic_block_live_at_start
2473		 by propagation backwards through the block.  */
2474	      COPY_REG_SET (bb->global_live_at_end, (regset) bb->aux);
2475	      COPY_REG_SET (bb->global_live_at_start,
2476			    bb->global_live_at_end);
2477	      propagate_block (bb->global_live_at_start,
2478			       bb->head, bb->end, 0,
2479			       first_pass ? bb->local_set : (regset) 0,
2480			       i, remove_dead_code);
2481	    }
2482
2483	  /* Update the new_live_at_end's of the block's predecessors.  */
2484	  {
2485	    register edge e;
2486
2487	    for (e = bb->pred; e ; e = e->pred_next)
2488	      IOR_REG_SET ((regset) e->src->aux, bb->global_live_at_start);
2489	  }
2490
2491#ifdef USE_C_ALLOCA
2492	  alloca (0);
2493#endif
2494	}
2495      first_pass = 0;
2496    }
2497
2498  /* The only pseudos that are live at the beginning of the function are
2499     those that were not set anywhere in the function.  local-alloc doesn't
2500     know how to handle these correctly, so mark them as not local to any
2501     one basic block.  */
2502
2503  if (n_basic_blocks > 0)
2504    EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start,
2505			       FIRST_PSEUDO_REGISTER, i,
2506			       {
2507				 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2508			       });
2509
2510  /* Now the life information is accurate.  Make one more pass over each
2511     basic block to delete dead stores, create autoincrement addressing
2512     and record how many times each register is used, is set, or dies.  */
2513
2514  for (i = 0; i < n_basic_blocks; i++)
2515    {
2516      basic_block bb = BASIC_BLOCK (i);
2517
2518      /* We start with global_live_at_end to determine which stores are
2519	 dead.  This process is destructive, and we wish to preserve the
2520	 contents of global_live_at_end for posterity.  Fortunately,
2521	 new_live_at_end, due to the way we converged on a solution,
2522	 contains a duplicate of global_live_at_end that we can kill.  */
2523      propagate_block ((regset) bb->aux, bb->head, bb->end, 1, (regset) 0, i, remove_dead_code);
2524
2525#ifdef USE_C_ALLOCA
2526      alloca (0);
2527#endif
2528    }
2529
2530  /* We have a problem with any pseudoreg that lives across the setjmp.
2531     ANSI says that if a user variable does not change in value between
2532     the setjmp and the longjmp, then the longjmp preserves it.  This
2533     includes longjmp from a place where the pseudo appears dead.
2534     (In principle, the value still exists if it is in scope.)
2535     If the pseudo goes in a hard reg, some other value may occupy
2536     that hard reg where this pseudo is dead, thus clobbering the pseudo.
2537     Conclusion: such a pseudo must not go in a hard reg.  */
2538  EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2539			     FIRST_PSEUDO_REGISTER, i,
2540			     {
2541			       if (regno_reg_rtx[i] != 0)
2542				 {
2543				   REG_LIVE_LENGTH (i) = -1;
2544				   REG_BASIC_BLOCK (i) = -1;
2545				 }
2546			     });
2547
2548  /* Restore regs_ever_live that was provided by reload.  */
2549  if (reload_completed)
2550    memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2551
2552  free_regset_vector (new_live_at_end, n_basic_blocks);
2553  obstack_free (&flow_obstack, NULL_PTR);
2554
2555  for (i = 0; i < n_basic_blocks; ++i)
2556    BASIC_BLOCK (i)->aux = NULL;
2557  ENTRY_BLOCK_PTR->aux = NULL;
2558}
2559
2560/* Subroutines of life analysis.  */
2561
2562/* Allocate the permanent data structures that represent the results
2563   of life analysis.  Not static since used also for stupid life analysis.  */
2564
2565void
2566allocate_bb_life_data ()
2567{
2568  register int i;
2569
2570  for (i = 0; i < n_basic_blocks; i++)
2571    {
2572      basic_block bb = BASIC_BLOCK (i);
2573
2574      bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
2575      bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
2576      bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
2577    }
2578
2579  ENTRY_BLOCK_PTR->global_live_at_end
2580    = OBSTACK_ALLOC_REG_SET (function_obstack);
2581  EXIT_BLOCK_PTR->global_live_at_start
2582    = OBSTACK_ALLOC_REG_SET (function_obstack);
2583
2584  regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
2585}
2586
2587void
2588allocate_reg_life_data ()
2589{
2590  int i;
2591
2592  /* Recalculate the register space, in case it has grown.  Old style
2593     vector oriented regsets would set regset_{size,bytes} here also.  */
2594  allocate_reg_info (max_regno, FALSE, FALSE);
2595
2596  /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2597     information, explicitly reset it here.  The allocation should have
2598     already happened on the previous reg_scan pass.  Make sure in case
2599     some more registers were allocated.  */
2600  for (i = 0; i < max_regno; i++)
2601    REG_N_SETS (i) = 0;
2602}
2603
2604/* Make each element of VECTOR point at a regset.  The vector has
2605   NELTS elements, and space is allocated from the ALLOC_OBSTACK
2606   obstack.  */
2607
2608static void
2609init_regset_vector (vector, nelts, alloc_obstack)
2610     regset *vector;
2611     int nelts;
2612     struct obstack *alloc_obstack;
2613{
2614  register int i;
2615
2616  for (i = 0; i < nelts; i++)
2617    {
2618      vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
2619      CLEAR_REG_SET (vector[i]);
2620    }
2621}
2622
2623/* Release any additional space allocated for each element of VECTOR point
2624   other than the regset header itself.  The vector has NELTS elements.  */
2625
2626void
2627free_regset_vector (vector, nelts)
2628     regset *vector;
2629     int nelts;
2630{
2631  register int i;
2632
2633  for (i = 0; i < nelts; i++)
2634    FREE_REG_SET (vector[i]);
2635}
2636
2637/* Compute the registers live at the beginning of a basic block
2638   from those live at the end.
2639
2640   When called, OLD contains those live at the end.
2641   On return, it contains those live at the beginning.
2642   FIRST and LAST are the first and last insns of the basic block.
2643
2644   FINAL is nonzero if we are doing the final pass which is not
2645   for computing the life info (since that has already been done)
2646   but for acting on it.  On this pass, we delete dead stores,
2647   set up the logical links and dead-variables lists of instructions,
2648   and merge instructions for autoincrement and autodecrement addresses.
2649
2650   SIGNIFICANT is nonzero only the first time for each basic block.
2651   If it is nonzero, it points to a regset in which we store
2652   a 1 for each register that is set within the block.
2653
2654   BNUM is the number of the basic block.  */
2655
2656static void
2657propagate_block (old, first, last, final, significant, bnum, remove_dead_code)
2658     register regset old;
2659     rtx first;
2660     rtx last;
2661     int final;
2662     regset significant;
2663     int bnum;
2664     int remove_dead_code;
2665{
2666  register rtx insn;
2667  rtx prev;
2668  regset live;
2669  regset dead;
2670
2671  /* Find the loop depth for this block.  Ignore loop level changes in the
2672     middle of the basic block -- for register allocation purposes, the
2673     important uses will be in the blocks wholely contained within the loop
2674     not in the loop pre-header or post-trailer.  */
2675  loop_depth = BASIC_BLOCK (bnum)->loop_depth;
2676
2677  dead = ALLOCA_REG_SET ();
2678  live = ALLOCA_REG_SET ();
2679
2680  cc0_live = 0;
2681  mem_set_list = NULL_RTX;
2682
2683  if (final)
2684    {
2685      register int i;
2686
2687      /* Process the regs live at the end of the block.
2688	 Mark them as not local to any one basic block. */
2689      EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2690				 {
2691				   REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2692				 });
2693    }
2694
2695  /* Scan the block an insn at a time from end to beginning.  */
2696
2697  for (insn = last; ; insn = prev)
2698    {
2699      prev = PREV_INSN (insn);
2700
2701      if (GET_CODE (insn) == NOTE)
2702	{
2703	  /* If this is a call to `setjmp' et al,
2704	     warn if any non-volatile datum is live.  */
2705
2706	  if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2707	    IOR_REG_SET (regs_live_at_setjmp, old);
2708	}
2709
2710      /* Update the life-status of regs for this insn.
2711	 First DEAD gets which regs are set in this insn
2712	 then LIVE gets which regs are used in this insn.
2713	 Then the regs live before the insn
2714	 are those live after, with DEAD regs turned off,
2715	 and then LIVE regs turned on.  */
2716
2717      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2718	{
2719	  register int i;
2720	  rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
2721	  int insn_is_dead = 0;
2722	  int libcall_is_dead = 0;
2723
2724	  if (remove_dead_code)
2725	    {
2726	      insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
2727	                      /* Don't delete something that refers to volatile storage!  */
2728	                      && ! INSN_VOLATILE (insn));
2729	      libcall_is_dead = (insn_is_dead && note != 0
2730	                         && libcall_dead_p (PATTERN (insn), old, note, insn));
2731	    }
2732
2733	  /* If an instruction consists of just dead store(s) on final pass,
2734	     "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2735	     We could really delete it with delete_insn, but that
2736	     can cause trouble for first or last insn in a basic block.  */
2737	  if (final && insn_is_dead)
2738	    {
2739	      rtx inote;
2740	      /* If the insn referred to a label, note that the label is
2741		 now less used.  */
2742	      for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
2743		{
2744		  if (REG_NOTE_KIND (inote) == REG_LABEL)
2745		    {
2746		      rtx label = XEXP (inote, 0);
2747		      rtx next;
2748		      LABEL_NUSES (label)--;
2749
2750		      /* If this label was attached to an ADDR_VEC, it's
2751			 safe to delete the ADDR_VEC.  In fact, it's pretty much
2752			 mandatory to delete it, because the ADDR_VEC may
2753			 be referencing labels that no longer exist.  */
2754		      if (LABEL_NUSES (label) == 0
2755			  && (next = next_nonnote_insn (label)) != NULL
2756			  && GET_CODE (next) == JUMP_INSN
2757			  && (GET_CODE (PATTERN (next)) == ADDR_VEC
2758			      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
2759			{
2760			  rtx pat = PATTERN (next);
2761			  int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2762			  int len = XVECLEN (pat, diff_vec_p);
2763			  int i;
2764			  for (i = 0; i < len; i++)
2765			    LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2766
2767			  flow_delete_insn (next);
2768			}
2769		    }
2770		}
2771
2772	      PUT_CODE (insn, NOTE);
2773	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2774	      NOTE_SOURCE_FILE (insn) = 0;
2775
2776	      /* CC0 is now known to be dead.  Either this insn used it,
2777		 in which case it doesn't anymore, or clobbered it,
2778		 so the next insn can't use it.  */
2779	      cc0_live = 0;
2780
2781	      /* If this insn is copying the return value from a library call,
2782		 delete the entire library call.  */
2783	      if (libcall_is_dead)
2784		{
2785		  rtx first = XEXP (note, 0);
2786		  rtx p = insn;
2787		  while (INSN_DELETED_P (first))
2788		    first = NEXT_INSN (first);
2789		  while (p != first)
2790		    {
2791		      p = PREV_INSN (p);
2792		      PUT_CODE (p, NOTE);
2793		      NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
2794		      NOTE_SOURCE_FILE (p) = 0;
2795		    }
2796		}
2797	      goto flushed;
2798	    }
2799
2800	  CLEAR_REG_SET (dead);
2801	  CLEAR_REG_SET (live);
2802
2803	  /* See if this is an increment or decrement that can be
2804	     merged into a following memory address.  */
2805#ifdef AUTO_INC_DEC
2806	  {
2807	    register rtx x = single_set (insn);
2808
2809	    /* Does this instruction increment or decrement a register?  */
2810	    if (!reload_completed
2811		&& final && x != 0
2812		&& GET_CODE (SET_DEST (x)) == REG
2813		&& (GET_CODE (SET_SRC (x)) == PLUS
2814		    || GET_CODE (SET_SRC (x)) == MINUS)
2815		&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
2816		&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2817		/* Ok, look for a following memory ref we can combine with.
2818		   If one is found, change the memory ref to a PRE_INC
2819		   or PRE_DEC, cancel this insn, and return 1.
2820		   Return 0 if nothing has been done.  */
2821		&& try_pre_increment_1 (insn))
2822	      goto flushed;
2823	  }
2824#endif /* AUTO_INC_DEC */
2825
2826	  /* If this is not the final pass, and this insn is copying the
2827	     value of a library call and it's dead, don't scan the
2828	     insns that perform the library call, so that the call's
2829	     arguments are not marked live.  */
2830	  if (libcall_is_dead)
2831	    {
2832	      /* Mark the dest reg as `significant'.  */
2833	      mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
2834
2835	      insn = XEXP (note, 0);
2836	      prev = PREV_INSN (insn);
2837	    }
2838	  else if (GET_CODE (PATTERN (insn)) == SET
2839		   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2840		   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2841		   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
2842		   && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
2843	    /* We have an insn to pop a constant amount off the stack.
2844	       (Such insns use PLUS regardless of the direction of the stack,
2845	       and any insn to adjust the stack by a constant is always a pop.)
2846	       These insns, if not dead stores, have no effect on life.  */
2847	    ;
2848	  else
2849	    {
2850	      /* Any regs live at the time of a call instruction
2851		 must not go in a register clobbered by calls.
2852		 Find all regs now live and record this for them.  */
2853
2854	      if (GET_CODE (insn) == CALL_INSN && final)
2855		EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2856					   {
2857					     REG_N_CALLS_CROSSED (i)++;
2858					   });
2859
2860	      /* LIVE gets the regs used in INSN;
2861		 DEAD gets those set by it.  Dead insns don't make anything
2862		 live.  */
2863
2864	      mark_set_regs (old, dead, PATTERN (insn),
2865			     final ? insn : NULL_RTX, significant);
2866
2867	      /* If an insn doesn't use CC0, it becomes dead since we
2868		 assume that every insn clobbers it.  So show it dead here;
2869		 mark_used_regs will set it live if it is referenced.  */
2870	      cc0_live = 0;
2871
2872	      if (! insn_is_dead)
2873		mark_used_regs (old, live, PATTERN (insn), final, insn);
2874
2875	      /* Sometimes we may have inserted something before INSN (such as
2876		 a move) when we make an auto-inc.  So ensure we will scan
2877		 those insns.  */
2878#ifdef AUTO_INC_DEC
2879	      prev = PREV_INSN (insn);
2880#endif
2881
2882	      if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
2883		{
2884		  register int i;
2885
2886		  rtx note;
2887
2888	          for (note = CALL_INSN_FUNCTION_USAGE (insn);
2889		       note;
2890		       note = XEXP (note, 1))
2891		    if (GET_CODE (XEXP (note, 0)) == USE)
2892		      mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
2893				      final, insn);
2894
2895		  /* Each call clobbers all call-clobbered regs that are not
2896		     global or fixed.  Note that the function-value reg is a
2897		     call-clobbered reg, and mark_set_regs has already had
2898		     a chance to handle it.  */
2899
2900		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2901		    if (call_used_regs[i] && ! global_regs[i]
2902			&& ! fixed_regs[i])
2903		      SET_REGNO_REG_SET (dead, i);
2904
2905		  /* The stack ptr is used (honorarily) by a CALL insn.  */
2906		  SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2907
2908		  /* Calls may also reference any of the global registers,
2909		     so they are made live.  */
2910		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2911		    if (global_regs[i])
2912		      mark_used_regs (old, live,
2913				      gen_rtx_REG (reg_raw_mode[i], i),
2914				      final, insn);
2915
2916		  /* Calls also clobber memory.  */
2917		  mem_set_list = NULL_RTX;
2918		}
2919
2920	      /* Update OLD for the registers used or set.  */
2921	      AND_COMPL_REG_SET (old, dead);
2922	      IOR_REG_SET (old, live);
2923
2924	    }
2925
2926	  /* On final pass, update counts of how many insns each reg is live
2927	     at.  */
2928	  if (final)
2929	    EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2930				       { REG_LIVE_LENGTH (i)++; });
2931	}
2932    flushed: ;
2933      if (insn == first)
2934	break;
2935    }
2936
2937  FREE_REG_SET (dead);
2938  FREE_REG_SET (live);
2939}
2940
2941/* Return 1 if X (the body of an insn, or part of it) is just dead stores
2942   (SET expressions whose destinations are registers dead after the insn).
2943   NEEDED is the regset that says which regs are alive after the insn.
2944
2945   Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2946
2947   If X is the entire body of an insn, NOTES contains the reg notes
2948   pertaining to the insn.  */
2949
2950static int
2951insn_dead_p (x, needed, call_ok, notes)
2952     rtx x;
2953     regset needed;
2954     int call_ok;
2955     rtx notes ATTRIBUTE_UNUSED;
2956{
2957  enum rtx_code code = GET_CODE (x);
2958
2959#ifdef AUTO_INC_DEC
2960  /* If flow is invoked after reload, we must take existing AUTO_INC
2961     expresions into account.  */
2962  if (reload_completed)
2963    {
2964      for ( ; notes; notes = XEXP (notes, 1))
2965	{
2966	  if (REG_NOTE_KIND (notes) == REG_INC)
2967	    {
2968	      int regno = REGNO (XEXP (notes, 0));
2969
2970	      /* Don't delete insns to set global regs.  */
2971	      if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2972		  || REGNO_REG_SET_P (needed, regno))
2973		return 0;
2974	    }
2975	}
2976    }
2977#endif
2978
2979  /* If setting something that's a reg or part of one,
2980     see if that register's altered value will be live.  */
2981
2982  if (code == SET)
2983    {
2984      rtx r = SET_DEST (x);
2985
2986      /* A SET that is a subroutine call cannot be dead.  */
2987      if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
2988	return 0;
2989
2990#ifdef HAVE_cc0
2991      if (GET_CODE (r) == CC0)
2992	return ! cc0_live;
2993#endif
2994
2995      if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
2996	{
2997	  rtx temp;
2998	  /* Walk the set of memory locations we are currently tracking
2999	     and see if one is an identical match to this memory location.
3000	     If so, this memory write is dead (remember, we're walking
3001	     backwards from the end of the block to the start.  */
3002	  temp = mem_set_list;
3003	  while (temp)
3004	    {
3005	      if (rtx_equal_p (XEXP (temp, 0), r))
3006		return 1;
3007	      temp = XEXP (temp, 1);
3008	    }
3009	}
3010
3011      while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3012	     || GET_CODE (r) == ZERO_EXTRACT)
3013	r = SUBREG_REG (r);
3014
3015      if (GET_CODE (r) == REG)
3016	{
3017	  int regno = REGNO (r);
3018
3019	  /* Don't delete insns to set global regs.  */
3020	  if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3021	      /* Make sure insns to set frame pointer aren't deleted.  */
3022	      || (regno == FRAME_POINTER_REGNUM
3023		  && (! reload_completed || frame_pointer_needed))
3024#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3025	      || (regno == HARD_FRAME_POINTER_REGNUM
3026		  && (! reload_completed || frame_pointer_needed))
3027#endif
3028#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3029	      /* Make sure insns to set arg pointer are never deleted
3030		 (if the arg pointer isn't fixed, there will be a USE for
3031		 it, so we can treat it normally).  */
3032	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3033#endif
3034	      || REGNO_REG_SET_P (needed, regno))
3035	    return 0;
3036
3037	  /* If this is a hard register, verify that subsequent words are
3038	     not needed.  */
3039	  if (regno < FIRST_PSEUDO_REGISTER)
3040	    {
3041	      int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3042
3043	      while (--n > 0)
3044		if (REGNO_REG_SET_P (needed, regno+n))
3045		  return 0;
3046	    }
3047
3048	  return 1;
3049	}
3050    }
3051
3052  /* If performing several activities,
3053     insn is dead if each activity is individually dead.
3054     Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3055     that's inside a PARALLEL doesn't make the insn worth keeping.  */
3056  else if (code == PARALLEL)
3057    {
3058      int i = XVECLEN (x, 0);
3059
3060      for (i--; i >= 0; i--)
3061	if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3062	    && GET_CODE (XVECEXP (x, 0, i)) != USE
3063	    && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3064	  return 0;
3065
3066      return 1;
3067    }
3068
3069  /* A CLOBBER of a pseudo-register that is dead serves no purpose.  That
3070     is not necessarily true for hard registers.  */
3071  else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3072	   && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3073	   && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3074    return 1;
3075
3076  /* We do not check other CLOBBER or USE here.  An insn consisting of just
3077     a CLOBBER or just a USE should not be deleted.  */
3078  return 0;
3079}
3080
3081/* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3082   return 1 if the entire library call is dead.
3083   This is true if X copies a register (hard or pseudo)
3084   and if the hard return  reg of the call insn is dead.
3085   (The caller should have tested the destination of X already for death.)
3086
3087   If this insn doesn't just copy a register, then we don't
3088   have an ordinary libcall.  In that case, cse could not have
3089   managed to substitute the source for the dest later on,
3090   so we can assume the libcall is dead.
3091
3092   NEEDED is the bit vector of pseudoregs live before this insn.
3093   NOTE is the REG_RETVAL note of the insn.  INSN is the insn itself.  */
3094
3095static int
3096libcall_dead_p (x, needed, note, insn)
3097     rtx x;
3098     regset needed;
3099     rtx note;
3100     rtx insn;
3101{
3102  register RTX_CODE code = GET_CODE (x);
3103
3104  if (code == SET)
3105    {
3106      register rtx r = SET_SRC (x);
3107      if (GET_CODE (r) == REG)
3108	{
3109	  rtx call = XEXP (note, 0);
3110	  rtx call_pat;
3111	  register int i;
3112
3113	  /* Find the call insn.  */
3114	  while (call != insn && GET_CODE (call) != CALL_INSN)
3115	    call = NEXT_INSN (call);
3116
3117	  /* If there is none, do nothing special,
3118	     since ordinary death handling can understand these insns.  */
3119	  if (call == insn)
3120	    return 0;
3121
3122	  /* See if the hard reg holding the value is dead.
3123	     If this is a PARALLEL, find the call within it.  */
3124	  call_pat = PATTERN (call);
3125	  if (GET_CODE (call_pat) == PARALLEL)
3126	    {
3127	      for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3128		if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3129		    && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3130		  break;
3131
3132	      /* This may be a library call that is returning a value
3133		 via invisible pointer.  Do nothing special, since
3134		 ordinary death handling can understand these insns.  */
3135	      if (i < 0)
3136		return 0;
3137
3138	      call_pat = XVECEXP (call_pat, 0, i);
3139	    }
3140
3141	  return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3142	}
3143    }
3144  return 1;
3145}
3146
3147/* Return 1 if register REGNO was used before it was set, i.e. if it is
3148   live at function entry.  Don't count global register variables, variables
3149   in registers that can be used for function arg passing, or variables in
3150   fixed hard registers.  */
3151
3152int
3153regno_uninitialized (regno)
3154     int regno;
3155{
3156  if (n_basic_blocks == 0
3157      || (regno < FIRST_PSEUDO_REGISTER
3158	  && (global_regs[regno]
3159	      || fixed_regs[regno]
3160	      || FUNCTION_ARG_REGNO_P (regno))))
3161    return 0;
3162
3163  return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3164}
3165
3166/* 1 if register REGNO was alive at a place where `setjmp' was called
3167   and was set more than once or is an argument.
3168   Such regs may be clobbered by `longjmp'.  */
3169
3170int
3171regno_clobbered_at_setjmp (regno)
3172     int regno;
3173{
3174  if (n_basic_blocks == 0)
3175    return 0;
3176
3177  return ((REG_N_SETS (regno) > 1
3178	   || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3179	  && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3180}
3181
3182/* INSN references memory, possibly using autoincrement addressing modes.
3183   Find any entries on the mem_set_list that need to be invalidated due
3184   to an address change.  */
3185static void
3186invalidate_mems_from_autoinc (insn)
3187     rtx insn;
3188{
3189  rtx note = REG_NOTES (insn);
3190  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3191    {
3192      if (REG_NOTE_KIND (note) == REG_INC)
3193        {
3194          rtx temp = mem_set_list;
3195          rtx prev = NULL_RTX;
3196
3197          while (temp)
3198	    {
3199	      if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3200	        {
3201	          /* Splice temp out of list.  */
3202	          if (prev)
3203	            XEXP (prev, 1) = XEXP (temp, 1);
3204	          else
3205	            mem_set_list = XEXP (temp, 1);
3206	        }
3207	      else
3208	        prev = temp;
3209              temp = XEXP (temp, 1);
3210	    }
3211	}
3212    }
3213}
3214
3215/* Process the registers that are set within X.
3216   Their bits are set to 1 in the regset DEAD,
3217   because they are dead prior to this insn.
3218
3219   If INSN is nonzero, it is the insn being processed
3220   and the fact that it is nonzero implies this is the FINAL pass
3221   in propagate_block.  In this case, various info about register
3222   usage is stored, LOG_LINKS fields of insns are set up.  */
3223
3224static void
3225mark_set_regs (needed, dead, x, insn, significant)
3226     regset needed;
3227     regset dead;
3228     rtx x;
3229     rtx insn;
3230     regset significant;
3231{
3232  register RTX_CODE code = GET_CODE (x);
3233
3234  if (code == SET || code == CLOBBER)
3235    mark_set_1 (needed, dead, x, insn, significant);
3236  else if (code == PARALLEL)
3237    {
3238      register int i;
3239      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3240	{
3241	  code = GET_CODE (XVECEXP (x, 0, i));
3242	  if (code == SET || code == CLOBBER)
3243	    mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
3244	}
3245    }
3246}
3247
3248/* Process a single SET rtx, X.  */
3249
3250static void
3251mark_set_1 (needed, dead, x, insn, significant)
3252     regset needed;
3253     regset dead;
3254     rtx x;
3255     rtx insn;
3256     regset significant;
3257{
3258  register int regno;
3259  register rtx reg = SET_DEST (x);
3260
3261  /* Some targets place small structures in registers for
3262     return values of functions.  We have to detect this
3263     case specially here to get correct flow information.  */
3264  if (GET_CODE (reg) == PARALLEL
3265      && GET_MODE (reg) == BLKmode)
3266    {
3267      register int i;
3268
3269      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3270	  mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
3271      return;
3272    }
3273
3274  /* Modifying just one hardware register of a multi-reg value
3275     or just a byte field of a register
3276     does not mean the value from before this insn is now dead.
3277     But it does mean liveness of that register at the end of the block
3278     is significant.
3279
3280     Within mark_set_1, however, we treat it as if the register is
3281     indeed modified.  mark_used_regs will, however, also treat this
3282     register as being used.  Thus, we treat these insns as setting a
3283     new value for the register as a function of its old value.  This
3284     cases LOG_LINKS to be made appropriately and this will help combine.  */
3285
3286  while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3287	 || GET_CODE (reg) == SIGN_EXTRACT
3288	 || GET_CODE (reg) == STRICT_LOW_PART)
3289    reg = XEXP (reg, 0);
3290
3291  /* If this set is a MEM, then it kills any aliased writes.
3292     If this set is a REG, then it kills any MEMs which use the reg.  */
3293  if (GET_CODE (reg) == MEM
3294      || GET_CODE (reg) == REG)
3295    {
3296      rtx temp = mem_set_list;
3297      rtx prev = NULL_RTX;
3298
3299      while (temp)
3300	{
3301	  if ((GET_CODE (reg) == MEM
3302	       && output_dependence (XEXP (temp, 0), reg))
3303	      || (GET_CODE (reg) == REG
3304		  && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3305	    {
3306	      /* Splice this entry out of the list.  */
3307	      if (prev)
3308		XEXP (prev, 1) = XEXP (temp, 1);
3309	      else
3310		mem_set_list = XEXP (temp, 1);
3311	    }
3312	  else
3313	    prev = temp;
3314	  temp = XEXP (temp, 1);
3315	}
3316    }
3317
3318  /* If the memory reference had embedded side effects (autoincrement
3319     address modes.  Then we may need to kill some entries on the
3320     memory set list.  */
3321  if (insn && GET_CODE (reg) == MEM)
3322    invalidate_mems_from_autoinc (insn);
3323
3324  if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3325      /* We do not know the size of a BLKmode store, so we do not track
3326	 them for redundant store elimination.  */
3327      && GET_MODE (reg) != BLKmode
3328      /* There are no REG_INC notes for SP, so we can't assume we'll see
3329	 everything that invalidates it.  To be safe, don't eliminate any
3330	 stores though SP; none of them should be redundant anyway.  */
3331      && ! reg_mentioned_p (stack_pointer_rtx, reg))
3332    mem_set_list = gen_rtx_EXPR_LIST (VOIDmode, reg, mem_set_list);
3333
3334  if (GET_CODE (reg) == REG
3335      && (regno = REGNO (reg), ! (regno == FRAME_POINTER_REGNUM
3336				  && (! reload_completed || frame_pointer_needed)))
3337#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3338      && ! (regno == HARD_FRAME_POINTER_REGNUM
3339	    && (! reload_completed || frame_pointer_needed))
3340#endif
3341#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3342      && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3343#endif
3344      && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3345    /* && regno != STACK_POINTER_REGNUM) -- let's try without this.  */
3346    {
3347      int some_needed = REGNO_REG_SET_P (needed, regno);
3348      int some_not_needed = ! some_needed;
3349
3350      /* Mark it as a significant register for this basic block.  */
3351      if (significant)
3352	SET_REGNO_REG_SET (significant, regno);
3353
3354      /* Mark it as dead before this insn.  */
3355      SET_REGNO_REG_SET (dead, regno);
3356
3357      /* A hard reg in a wide mode may really be multiple registers.
3358	 If so, mark all of them just like the first.  */
3359      if (regno < FIRST_PSEUDO_REGISTER)
3360	{
3361	  int n;
3362
3363	  /* Nothing below is needed for the stack pointer; get out asap.
3364	     Eg, log links aren't needed, since combine won't use them.  */
3365	  if (regno == STACK_POINTER_REGNUM)
3366	    return;
3367
3368	  n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3369	  while (--n > 0)
3370	    {
3371	      int regno_n = regno + n;
3372	      int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3373	      if (significant)
3374		SET_REGNO_REG_SET (significant, regno_n);
3375
3376	      SET_REGNO_REG_SET (dead, regno_n);
3377	      some_needed |= needed_regno;
3378	      some_not_needed |= ! needed_regno;
3379	    }
3380	}
3381      /* Additional data to record if this is the final pass.  */
3382      if (insn)
3383	{
3384	  register rtx y = reg_next_use[regno];
3385	  register int blocknum = BLOCK_NUM (insn);
3386
3387	  /* If this is a hard reg, record this function uses the reg.  */
3388
3389	  if (regno < FIRST_PSEUDO_REGISTER)
3390	    {
3391	      register int i;
3392	      int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3393
3394	      for (i = regno; i < endregno; i++)
3395		{
3396		  /* The next use is no longer "next", since a store
3397		     intervenes.  */
3398		  reg_next_use[i] = 0;
3399
3400		  regs_ever_live[i] = 1;
3401		  REG_N_SETS (i)++;
3402		}
3403	    }
3404	  else
3405	    {
3406	      /* The next use is no longer "next", since a store
3407		 intervenes.  */
3408	      reg_next_use[regno] = 0;
3409
3410	      /* Keep track of which basic blocks each reg appears in.  */
3411
3412	      if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3413		REG_BASIC_BLOCK (regno) = blocknum;
3414	      else if (REG_BASIC_BLOCK (regno) != blocknum)
3415		REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3416
3417	      /* Count (weighted) references, stores, etc.  This counts a
3418		 register twice if it is modified, but that is correct.  */
3419	      REG_N_SETS (regno)++;
3420
3421	      REG_N_REFS (regno) += loop_depth;
3422
3423	      /* The insns where a reg is live are normally counted
3424		 elsewhere, but we want the count to include the insn
3425		 where the reg is set, and the normal counting mechanism
3426		 would not count it.  */
3427	      REG_LIVE_LENGTH (regno)++;
3428	    }
3429
3430	  if (! some_not_needed)
3431	    {
3432	      /* Make a logical link from the next following insn
3433		 that uses this register, back to this insn.
3434		 The following insns have already been processed.
3435
3436		 We don't build a LOG_LINK for hard registers containing
3437		 in ASM_OPERANDs.  If these registers get replaced,
3438		 we might wind up changing the semantics of the insn,
3439		 even if reload can make what appear to be valid assignments
3440		 later.  */
3441	      if (y && (BLOCK_NUM (y) == blocknum)
3442		  && (regno >= FIRST_PSEUDO_REGISTER
3443		      || asm_noperands (PATTERN (y)) < 0))
3444		LOG_LINKS (y)
3445		  = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
3446	    }
3447	  else if (! some_needed)
3448	    {
3449	      /* Note that dead stores have already been deleted when possible
3450		 If we get here, we have found a dead store that cannot
3451		 be eliminated (because the same insn does something useful).
3452		 Indicate this by marking the reg being set as dying here.  */
3453	      REG_NOTES (insn)
3454		= gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3455	      REG_N_DEATHS (REGNO (reg))++;
3456	    }
3457	  else
3458	    {
3459	      /* This is a case where we have a multi-word hard register
3460		 and some, but not all, of the words of the register are
3461		 needed in subsequent insns.  Write REG_UNUSED notes
3462		 for those parts that were not needed.  This case should
3463		 be rare.  */
3464
3465	      int i;
3466
3467	      for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
3468		   i >= 0; i--)
3469		if (!REGNO_REG_SET_P (needed, regno + i))
3470		  REG_NOTES (insn)
3471		    = gen_rtx_EXPR_LIST (REG_UNUSED,
3472					 gen_rtx_REG (reg_raw_mode[regno + i],
3473						      regno + i),
3474					 REG_NOTES (insn));
3475	    }
3476	}
3477    }
3478  else if (GET_CODE (reg) == REG)
3479    reg_next_use[regno] = 0;
3480
3481  /* If this is the last pass and this is a SCRATCH, show it will be dying
3482     here and count it.  */
3483  else if (GET_CODE (reg) == SCRATCH && insn != 0)
3484    {
3485      REG_NOTES (insn)
3486	= gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3487    }
3488}
3489
3490#ifdef AUTO_INC_DEC
3491
3492/* X is a MEM found in INSN.  See if we can convert it into an auto-increment
3493   reference.  */
3494
3495static void
3496find_auto_inc (needed, x, insn)
3497     regset needed;
3498     rtx x;
3499     rtx insn;
3500{
3501  rtx addr = XEXP (x, 0);
3502  HOST_WIDE_INT offset = 0;
3503  rtx set;
3504
3505  /* Here we detect use of an index register which might be good for
3506     postincrement, postdecrement, preincrement, or predecrement.  */
3507
3508  if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3509    offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3510
3511  if (GET_CODE (addr) == REG)
3512    {
3513      register rtx y;
3514      register int size = GET_MODE_SIZE (GET_MODE (x));
3515      rtx use;
3516      rtx incr;
3517      int regno = REGNO (addr);
3518
3519      /* Is the next use an increment that might make auto-increment? */
3520      if ((incr = reg_next_use[regno]) != 0
3521	  && (set = single_set (incr)) != 0
3522	  && GET_CODE (set) == SET
3523	  && BLOCK_NUM (incr) == BLOCK_NUM (insn)
3524	  /* Can't add side effects to jumps; if reg is spilled and
3525	     reloaded, there's no way to store back the altered value.  */
3526	  && GET_CODE (insn) != JUMP_INSN
3527	  && (y = SET_SRC (set), GET_CODE (y) == PLUS)
3528	  && XEXP (y, 0) == addr
3529	  && GET_CODE (XEXP (y, 1)) == CONST_INT
3530	  && ((HAVE_POST_INCREMENT
3531	       && (INTVAL (XEXP (y, 1)) == size && offset == 0))
3532	      || (HAVE_POST_DECREMENT
3533		  && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
3534	      || (HAVE_PRE_INCREMENT
3535		  && (INTVAL (XEXP (y, 1)) == size && offset == size))
3536	      || (HAVE_PRE_DECREMENT
3537		  && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
3538	  /* Make sure this reg appears only once in this insn.  */
3539	  && (use = find_use_as_address (PATTERN (insn), addr, offset),
3540	      use != 0 && use != (rtx) 1))
3541	{
3542	  rtx q = SET_DEST (set);
3543	  enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
3544				    ? (offset ? PRE_INC : POST_INC)
3545				    : (offset ? PRE_DEC : POST_DEC));
3546
3547	  if (dead_or_set_p (incr, addr))
3548	    {
3549	      /* This is the simple case.  Try to make the auto-inc.  If
3550		 we can't, we are done.  Otherwise, we will do any
3551		 needed updates below.  */
3552	      if (! validate_change (insn, &XEXP (x, 0),
3553				     gen_rtx_fmt_e (inc_code, Pmode, addr),
3554				     0))
3555		return;
3556	    }
3557	  else if (GET_CODE (q) == REG
3558		   /* PREV_INSN used here to check the semi-open interval
3559		      [insn,incr).  */
3560		   && ! reg_used_between_p (q,  PREV_INSN (insn), incr)
3561		   /* We must also check for sets of q as q may be
3562		      a call clobbered hard register and there may
3563		      be a call between PREV_INSN (insn) and incr.  */
3564		   && ! reg_set_between_p (q,  PREV_INSN (insn), incr))
3565	    {
3566	      /* We have *p followed sometime later by q = p+size.
3567		 Both p and q must be live afterward,
3568		 and q is not used between INSN and its assignment.
3569		 Change it to q = p, ...*q..., q = q+size.
3570		 Then fall into the usual case.  */
3571	      rtx insns, temp;
3572	      basic_block bb;
3573
3574	      start_sequence ();
3575	      emit_move_insn (q, addr);
3576	      insns = get_insns ();
3577	      end_sequence ();
3578
3579	      bb = BLOCK_FOR_INSN (insn);
3580	      for (temp = insns; temp; temp = NEXT_INSN (temp))
3581		set_block_for_insn (temp, bb);
3582
3583	      /* If we can't make the auto-inc, or can't make the
3584		 replacement into Y, exit.  There's no point in making
3585		 the change below if we can't do the auto-inc and doing
3586		 so is not correct in the pre-inc case.  */
3587
3588	      validate_change (insn, &XEXP (x, 0),
3589			       gen_rtx_fmt_e (inc_code, Pmode, q),
3590			       1);
3591	      validate_change (incr, &XEXP (y, 0), q, 1);
3592	      if (! apply_change_group ())
3593		return;
3594
3595	      /* We now know we'll be doing this change, so emit the
3596		 new insn(s) and do the updates.  */
3597	      emit_insns_before (insns, insn);
3598
3599	      if (BLOCK_FOR_INSN (insn)->head == insn)
3600		BLOCK_FOR_INSN (insn)->head = insns;
3601
3602	      /* INCR will become a NOTE and INSN won't contain a
3603		 use of ADDR.  If a use of ADDR was just placed in
3604		 the insn before INSN, make that the next use.
3605		 Otherwise, invalidate it.  */
3606	      if (GET_CODE (PREV_INSN (insn)) == INSN
3607		  && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3608		  && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
3609		reg_next_use[regno] = PREV_INSN (insn);
3610	      else
3611		reg_next_use[regno] = 0;
3612
3613	      addr = q;
3614	      regno = REGNO (q);
3615
3616	      /* REGNO is now used in INCR which is below INSN, but
3617		 it previously wasn't live here.  If we don't mark
3618		 it as needed, we'll put a REG_DEAD note for it
3619		 on this insn, which is incorrect.  */
3620	      SET_REGNO_REG_SET (needed, regno);
3621
3622	      /* If there are any calls between INSN and INCR, show
3623		 that REGNO now crosses them.  */
3624	      for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3625		if (GET_CODE (temp) == CALL_INSN)
3626		  REG_N_CALLS_CROSSED (regno)++;
3627	    }
3628	  else
3629	    return;
3630
3631	  /* If we haven't returned, it means we were able to make the
3632	     auto-inc, so update the status.  First, record that this insn
3633	     has an implicit side effect.  */
3634
3635	  REG_NOTES (insn)
3636	    = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
3637
3638	  /* Modify the old increment-insn to simply copy
3639	     the already-incremented value of our register.  */
3640	  if (! validate_change (incr, &SET_SRC (set), addr, 0))
3641	    abort ();
3642
3643	  /* If that makes it a no-op (copying the register into itself) delete
3644	     it so it won't appear to be a "use" and a "set" of this
3645	     register.  */
3646	  if (SET_DEST (set) == addr)
3647	    {
3648	      PUT_CODE (incr, NOTE);
3649	      NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3650	      NOTE_SOURCE_FILE (incr) = 0;
3651	    }
3652
3653	  if (regno >= FIRST_PSEUDO_REGISTER)
3654	    {
3655	      /* Count an extra reference to the reg.  When a reg is
3656		 incremented, spilling it is worse, so we want to make
3657		 that less likely.  */
3658	      REG_N_REFS (regno) += loop_depth;
3659
3660	      /* Count the increment as a setting of the register,
3661		 even though it isn't a SET in rtl.  */
3662	      REG_N_SETS (regno)++;
3663	    }
3664	}
3665    }
3666}
3667#endif /* AUTO_INC_DEC */
3668
3669/* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3670   This is done assuming the registers needed from X
3671   are those that have 1-bits in NEEDED.
3672
3673   On the final pass, FINAL is 1.  This means try for autoincrement
3674   and count the uses and deaths of each pseudo-reg.
3675
3676   INSN is the containing instruction.  If INSN is dead, this function is not
3677   called.  */
3678
3679static void
3680mark_used_regs (needed, live, x, final, insn)
3681     regset needed;
3682     regset live;
3683     rtx x;
3684     int final;
3685     rtx insn;
3686{
3687  register RTX_CODE code;
3688  register int regno;
3689  int i;
3690
3691 retry:
3692  code = GET_CODE (x);
3693  switch (code)
3694    {
3695    case LABEL_REF:
3696    case SYMBOL_REF:
3697    case CONST_INT:
3698    case CONST:
3699    case CONST_DOUBLE:
3700    case PC:
3701    case ADDR_VEC:
3702    case ADDR_DIFF_VEC:
3703      return;
3704
3705#ifdef HAVE_cc0
3706    case CC0:
3707      cc0_live = 1;
3708      return;
3709#endif
3710
3711    case CLOBBER:
3712      /* If we are clobbering a MEM, mark any registers inside the address
3713	 as being used.  */
3714      if (GET_CODE (XEXP (x, 0)) == MEM)
3715	mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
3716      return;
3717
3718    case MEM:
3719      /* Invalidate the data for the last MEM stored, but only if MEM is
3720	 something that can be stored into.  */
3721      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3722	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3723	; /* needn't clear the memory set list */
3724      else
3725	{
3726	  rtx temp = mem_set_list;
3727	  rtx prev = NULL_RTX;
3728
3729	  while (temp)
3730	    {
3731	      if (anti_dependence (XEXP (temp, 0), x))
3732		{
3733		  /* Splice temp out of the list.  */
3734		  if (prev)
3735		    XEXP (prev, 1) = XEXP (temp, 1);
3736		  else
3737		    mem_set_list = XEXP (temp, 1);
3738		}
3739	      else
3740		prev = temp;
3741	      temp = XEXP (temp, 1);
3742	    }
3743	}
3744
3745      /* If the memory reference had embedded side effects (autoincrement
3746	 address modes.  Then we may need to kill some entries on the
3747	 memory set list.  */
3748      if (insn)
3749	invalidate_mems_from_autoinc (insn);
3750
3751#ifdef AUTO_INC_DEC
3752      if (final)
3753	find_auto_inc (needed, x, insn);
3754#endif
3755      break;
3756
3757    case SUBREG:
3758      if (GET_CODE (SUBREG_REG (x)) == REG
3759	  && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3760	  && (GET_MODE_SIZE (GET_MODE (x))
3761	      != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3762	REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
3763
3764      /* While we're here, optimize this case.  */
3765      x = SUBREG_REG (x);
3766
3767      /* In case the SUBREG is not of a register, don't optimize */
3768      if (GET_CODE (x) != REG)
3769	{
3770	  mark_used_regs (needed, live, x, final, insn);
3771	  return;
3772	}
3773
3774      /* ... fall through ...  */
3775
3776    case REG:
3777      /* See a register other than being set
3778	 => mark it as needed.  */
3779
3780      regno = REGNO (x);
3781      {
3782	int some_needed = REGNO_REG_SET_P (needed, regno);
3783	int some_not_needed = ! some_needed;
3784
3785	SET_REGNO_REG_SET (live, regno);
3786
3787	/* A hard reg in a wide mode may really be multiple registers.
3788	   If so, mark all of them just like the first.  */
3789	if (regno < FIRST_PSEUDO_REGISTER)
3790	  {
3791	    int n;
3792
3793	    /* For stack ptr or fixed arg pointer,
3794	       nothing below can be necessary, so waste no more time.  */
3795	    if (regno == STACK_POINTER_REGNUM
3796#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3797		|| (regno == HARD_FRAME_POINTER_REGNUM
3798		    && (! reload_completed || frame_pointer_needed))
3799#endif
3800#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3801		|| (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3802#endif
3803		|| (regno == FRAME_POINTER_REGNUM
3804		    && (! reload_completed || frame_pointer_needed)))
3805	      {
3806		/* If this is a register we are going to try to eliminate,
3807		   don't mark it live here.  If we are successful in
3808		   eliminating it, it need not be live unless it is used for
3809		   pseudos, in which case it will have been set live when
3810		   it was allocated to the pseudos.  If the register will not
3811		   be eliminated, reload will set it live at that point.  */
3812
3813		if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
3814		  regs_ever_live[regno] = 1;
3815		return;
3816	      }
3817	    /* No death notes for global register variables;
3818	       their values are live after this function exits.  */
3819	    if (global_regs[regno])
3820	      {
3821		if (final)
3822		  reg_next_use[regno] = insn;
3823		return;
3824	      }
3825
3826	    n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3827	    while (--n > 0)
3828	      {
3829		int regno_n = regno + n;
3830		int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3831
3832		SET_REGNO_REG_SET (live, regno_n);
3833		some_needed |= needed_regno;
3834		some_not_needed |= ! needed_regno;
3835	      }
3836	  }
3837	if (final)
3838	  {
3839	    /* Record where each reg is used, so when the reg
3840	       is set we know the next insn that uses it.  */
3841
3842	    reg_next_use[regno] = insn;
3843
3844	    if (regno < FIRST_PSEUDO_REGISTER)
3845	      {
3846		/* If a hard reg is being used,
3847		   record that this function does use it.  */
3848
3849		i = HARD_REGNO_NREGS (regno, GET_MODE (x));
3850		if (i == 0)
3851		  i = 1;
3852		do
3853		  regs_ever_live[regno + --i] = 1;
3854		while (i > 0);
3855	      }
3856	    else
3857	      {
3858		/* Keep track of which basic block each reg appears in.  */
3859
3860		register int blocknum = BLOCK_NUM (insn);
3861
3862		if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3863		  REG_BASIC_BLOCK (regno) = blocknum;
3864		else if (REG_BASIC_BLOCK (regno) != blocknum)
3865		  REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3866
3867		/* Count (weighted) number of uses of each reg.  */
3868
3869		REG_N_REFS (regno) += loop_depth;
3870	      }
3871
3872	    /* Record and count the insns in which a reg dies.
3873	       If it is used in this insn and was dead below the insn
3874	       then it dies in this insn.  If it was set in this insn,
3875	       we do not make a REG_DEAD note; likewise if we already
3876	       made such a note.  */
3877
3878	    if (some_not_needed
3879		&& ! dead_or_set_p (insn, x)
3880#if 0
3881		&& (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
3882#endif
3883		)
3884	      {
3885		/* Check for the case where the register dying partially
3886		   overlaps the register set by this insn.  */
3887		if (regno < FIRST_PSEUDO_REGISTER
3888		    && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
3889		  {
3890		    int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3891		    while (--n >= 0)
3892		      some_needed |= dead_or_set_regno_p (insn, regno + n);
3893		  }
3894
3895		/* If none of the words in X is needed, make a REG_DEAD
3896		   note.  Otherwise, we must make partial REG_DEAD notes.  */
3897		if (! some_needed)
3898		  {
3899		    REG_NOTES (insn)
3900		      = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
3901		    REG_N_DEATHS (regno)++;
3902		  }
3903		else
3904		  {
3905		    int i;
3906
3907		    /* Don't make a REG_DEAD note for a part of a register
3908		       that is set in the insn.  */
3909
3910		    for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
3911			 i >= 0; i--)
3912		      if (!REGNO_REG_SET_P (needed, regno + i)
3913			  && ! dead_or_set_regno_p (insn, regno + i))
3914			REG_NOTES (insn)
3915			  = gen_rtx_EXPR_LIST (REG_DEAD,
3916					       gen_rtx_REG (reg_raw_mode[regno + i],
3917							    regno + i),
3918					       REG_NOTES (insn));
3919		  }
3920	      }
3921	  }
3922      }
3923      return;
3924
3925    case SET:
3926      {
3927	register rtx testreg = SET_DEST (x);
3928	int mark_dest = 0;
3929
3930	/* If storing into MEM, don't show it as being used.  But do
3931	   show the address as being used.  */
3932	if (GET_CODE (testreg) == MEM)
3933	  {
3934#ifdef AUTO_INC_DEC
3935	    if (final)
3936	      find_auto_inc (needed, testreg, insn);
3937#endif
3938	    mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
3939	    mark_used_regs (needed, live, SET_SRC (x), final, insn);
3940	    return;
3941	  }
3942
3943	/* Storing in STRICT_LOW_PART is like storing in a reg
3944	   in that this SET might be dead, so ignore it in TESTREG.
3945	   but in some other ways it is like using the reg.
3946
3947	   Storing in a SUBREG or a bit field is like storing the entire
3948	   register in that if the register's value is not used
3949	   then this SET is not needed.  */
3950	while (GET_CODE (testreg) == STRICT_LOW_PART
3951	       || GET_CODE (testreg) == ZERO_EXTRACT
3952	       || GET_CODE (testreg) == SIGN_EXTRACT
3953	       || GET_CODE (testreg) == SUBREG)
3954	  {
3955	    if (GET_CODE (testreg) == SUBREG
3956		&& GET_CODE (SUBREG_REG (testreg)) == REG
3957		&& REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3958		&& (GET_MODE_SIZE (GET_MODE (testreg))
3959		    != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
3960	      REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
3961
3962	    /* Modifying a single register in an alternate mode
3963	       does not use any of the old value.  But these other
3964	       ways of storing in a register do use the old value.  */
3965	    if (GET_CODE (testreg) == SUBREG
3966		&& !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
3967	      ;
3968	    else
3969	      mark_dest = 1;
3970
3971	    testreg = XEXP (testreg, 0);
3972	  }
3973
3974	/* If this is a store into a register,
3975	   recursively scan the value being stored.  */
3976
3977	if ((GET_CODE (testreg) == PARALLEL
3978	     && GET_MODE (testreg) == BLKmode)
3979	    || (GET_CODE (testreg) == REG
3980		&& (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
3981						&& (! reload_completed || frame_pointer_needed)))
3982#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3983		&& ! (regno == HARD_FRAME_POINTER_REGNUM
3984		      && (! reload_completed || frame_pointer_needed))
3985#endif
3986#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3987		&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3988#endif
3989		))
3990	  /* We used to exclude global_regs here, but that seems wrong.
3991	     Storing in them is like storing in mem.  */
3992	  {
3993	    mark_used_regs (needed, live, SET_SRC (x), final, insn);
3994	    if (mark_dest)
3995	      mark_used_regs (needed, live, SET_DEST (x), final, insn);
3996	    return;
3997	  }
3998      }
3999      break;
4000
4001    case RETURN:
4002      /* If exiting needs the right stack value, consider this insn as
4003	 using the stack pointer.  In any event, consider it as using
4004	 all global registers and all registers used by return.  */
4005      if (! EXIT_IGNORE_STACK
4006	  || (! FRAME_POINTER_REQUIRED
4007	      && ! current_function_calls_alloca
4008	      && flag_omit_frame_pointer)
4009	  || current_function_sp_is_unchanging)
4010	SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
4011
4012      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4013	if (global_regs[i]
4014#ifdef EPILOGUE_USES
4015	    || EPILOGUE_USES (i)
4016#endif
4017	    )
4018	  SET_REGNO_REG_SET (live, i);
4019      break;
4020
4021    case ASM_OPERANDS:
4022    case UNSPEC_VOLATILE:
4023    case TRAP_IF:
4024    case ASM_INPUT:
4025      {
4026	/* Traditional and volatile asm instructions must be considered to use
4027	   and clobber all hard registers, all pseudo-registers and all of
4028	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
4029
4030	   Consider for instance a volatile asm that changes the fpu rounding
4031	   mode.  An insn should not be moved across this even if it only uses
4032	   pseudo-regs because it might give an incorrectly rounded result.
4033
4034	   ?!? Unfortunately, marking all hard registers as live causes massive
4035	   problems for the register allocator and marking all pseudos as live
4036	   creates mountains of uninitialized variable warnings.
4037
4038	   So for now, just clear the memory set list and mark any regs
4039	   we can find in ASM_OPERANDS as used.  */
4040	if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4041	  mem_set_list = NULL_RTX;
4042
4043        /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4044	   We can not just fall through here since then we would be confused
4045	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4046	   traditional asms unlike their normal usage.  */
4047	if (code == ASM_OPERANDS)
4048	  {
4049	    int j;
4050
4051	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4052	      mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4053			      final, insn);
4054	  }
4055	break;
4056      }
4057
4058
4059    default:
4060      break;
4061    }
4062
4063  /* Recursively scan the operands of this expression.  */
4064
4065  {
4066    register char *fmt = GET_RTX_FORMAT (code);
4067    register int i;
4068
4069    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4070      {
4071	if (fmt[i] == 'e')
4072	  {
4073	    /* Tail recursive case: save a function call level.  */
4074	    if (i == 0)
4075	      {
4076		x = XEXP (x, 0);
4077		goto retry;
4078	      }
4079	    mark_used_regs (needed, live, XEXP (x, i), final, insn);
4080	  }
4081	else if (fmt[i] == 'E')
4082	  {
4083	    register int j;
4084	    for (j = 0; j < XVECLEN (x, i); j++)
4085	      mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
4086	  }
4087      }
4088  }
4089}
4090
4091#ifdef AUTO_INC_DEC
4092
4093static int
4094try_pre_increment_1 (insn)
4095     rtx insn;
4096{
4097  /* Find the next use of this reg.  If in same basic block,
4098     make it do pre-increment or pre-decrement if appropriate.  */
4099  rtx x = single_set (insn);
4100  HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4101		* INTVAL (XEXP (SET_SRC (x), 1)));
4102  int regno = REGNO (SET_DEST (x));
4103  rtx y = reg_next_use[regno];
4104  if (y != 0
4105      && BLOCK_NUM (y) == BLOCK_NUM (insn)
4106      /* Don't do this if the reg dies, or gets set in y; a standard addressing
4107	 mode would be better.  */
4108      && ! dead_or_set_p (y, SET_DEST (x))
4109      && try_pre_increment (y, SET_DEST (x), amount))
4110    {
4111      /* We have found a suitable auto-increment
4112	 and already changed insn Y to do it.
4113	 So flush this increment-instruction.  */
4114      PUT_CODE (insn, NOTE);
4115      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4116      NOTE_SOURCE_FILE (insn) = 0;
4117      /* Count a reference to this reg for the increment
4118	 insn we are deleting.  When a reg is incremented.
4119	 spilling it is worse, so we want to make that
4120	 less likely.  */
4121      if (regno >= FIRST_PSEUDO_REGISTER)
4122	{
4123	  REG_N_REFS (regno) += loop_depth;
4124	  REG_N_SETS (regno)++;
4125	}
4126      return 1;
4127    }
4128  return 0;
4129}
4130
4131/* Try to change INSN so that it does pre-increment or pre-decrement
4132   addressing on register REG in order to add AMOUNT to REG.
4133   AMOUNT is negative for pre-decrement.
4134   Returns 1 if the change could be made.
4135   This checks all about the validity of the result of modifying INSN.  */
4136
4137static int
4138try_pre_increment (insn, reg, amount)
4139     rtx insn, reg;
4140     HOST_WIDE_INT amount;
4141{
4142  register rtx use;
4143
4144  /* Nonzero if we can try to make a pre-increment or pre-decrement.
4145     For example, addl $4,r1; movl (r1),... can become movl +(r1),...  */
4146  int pre_ok = 0;
4147  /* Nonzero if we can try to make a post-increment or post-decrement.
4148     For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4149     It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4150     supports both pre-inc and post-inc, or both pre-dec and post-dec.  */
4151  int post_ok = 0;
4152
4153  /* Nonzero if the opportunity actually requires post-inc or post-dec.  */
4154  int do_post = 0;
4155
4156  /* From the sign of increment, see which possibilities are conceivable
4157     on this target machine.  */
4158  if (HAVE_PRE_INCREMENT && amount > 0)
4159    pre_ok = 1;
4160  if (HAVE_POST_INCREMENT && amount > 0)
4161    post_ok = 1;
4162
4163  if (HAVE_PRE_DECREMENT && amount < 0)
4164    pre_ok = 1;
4165  if (HAVE_POST_DECREMENT && amount < 0)
4166    post_ok = 1;
4167
4168  if (! (pre_ok || post_ok))
4169    return 0;
4170
4171  /* It is not safe to add a side effect to a jump insn
4172     because if the incremented register is spilled and must be reloaded
4173     there would be no way to store the incremented value back in memory.  */
4174
4175  if (GET_CODE (insn) == JUMP_INSN)
4176    return 0;
4177
4178  use = 0;
4179  if (pre_ok)
4180    use = find_use_as_address (PATTERN (insn), reg, 0);
4181  if (post_ok && (use == 0 || use == (rtx) 1))
4182    {
4183      use = find_use_as_address (PATTERN (insn), reg, -amount);
4184      do_post = 1;
4185    }
4186
4187  if (use == 0 || use == (rtx) 1)
4188    return 0;
4189
4190  if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4191    return 0;
4192
4193  /* See if this combination of instruction and addressing mode exists.  */
4194  if (! validate_change (insn, &XEXP (use, 0),
4195			 gen_rtx_fmt_e (amount > 0
4196					? (do_post ? POST_INC : PRE_INC)
4197					: (do_post ? POST_DEC : PRE_DEC),
4198					Pmode, reg), 0))
4199    return 0;
4200
4201  /* Record that this insn now has an implicit side effect on X.  */
4202  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4203  return 1;
4204}
4205
4206#endif /* AUTO_INC_DEC */
4207
4208/* Find the place in the rtx X where REG is used as a memory address.
4209   Return the MEM rtx that so uses it.
4210   If PLUSCONST is nonzero, search instead for a memory address equivalent to
4211   (plus REG (const_int PLUSCONST)).
4212
4213   If such an address does not appear, return 0.
4214   If REG appears more than once, or is used other than in such an address,
4215   return (rtx)1.  */
4216
4217rtx
4218find_use_as_address (x, reg, plusconst)
4219     register rtx x;
4220     rtx reg;
4221     HOST_WIDE_INT plusconst;
4222{
4223  enum rtx_code code = GET_CODE (x);
4224  char *fmt = GET_RTX_FORMAT (code);
4225  register int i;
4226  register rtx value = 0;
4227  register rtx tem;
4228
4229  if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4230    return x;
4231
4232  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4233      && XEXP (XEXP (x, 0), 0) == reg
4234      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4235      && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4236    return x;
4237
4238  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4239    {
4240      /* If REG occurs inside a MEM used in a bit-field reference,
4241	 that is unacceptable.  */
4242      if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4243	return (rtx) (HOST_WIDE_INT) 1;
4244    }
4245
4246  if (x == reg)
4247    return (rtx) (HOST_WIDE_INT) 1;
4248
4249  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4250    {
4251      if (fmt[i] == 'e')
4252	{
4253	  tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4254	  if (value == 0)
4255	    value = tem;
4256	  else if (tem != 0)
4257	    return (rtx) (HOST_WIDE_INT) 1;
4258	}
4259      if (fmt[i] == 'E')
4260	{
4261	  register int j;
4262	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4263	    {
4264	      tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4265	      if (value == 0)
4266		value = tem;
4267	      else if (tem != 0)
4268		return (rtx) (HOST_WIDE_INT) 1;
4269	    }
4270	}
4271    }
4272
4273  return value;
4274}
4275
4276/* Write information about registers and basic blocks into FILE.
4277   This is part of making a debugging dump.  */
4278
4279void
4280dump_flow_info (file)
4281     FILE *file;
4282{
4283  register int i;
4284  static char *reg_class_names[] = REG_CLASS_NAMES;
4285
4286  fprintf (file, "%d registers.\n", max_regno);
4287  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4288    if (REG_N_REFS (i))
4289      {
4290	enum reg_class class, altclass;
4291	fprintf (file, "\nRegister %d used %d times across %d insns",
4292		 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4293	if (REG_BASIC_BLOCK (i) >= 0)
4294	  fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4295	if (REG_N_SETS (i))
4296  	  fprintf (file, "; set %d time%s", REG_N_SETS (i),
4297   		   (REG_N_SETS (i) == 1) ? "" : "s");
4298	if (REG_USERVAR_P (regno_reg_rtx[i]))
4299  	  fprintf (file, "; user var");
4300	if (REG_N_DEATHS (i) != 1)
4301	  fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4302	if (REG_N_CALLS_CROSSED (i) == 1)
4303	  fprintf (file, "; crosses 1 call");
4304	else if (REG_N_CALLS_CROSSED (i))
4305	  fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4306	if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4307	  fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4308	class = reg_preferred_class (i);
4309	altclass = reg_alternate_class (i);
4310	if (class != GENERAL_REGS || altclass != ALL_REGS)
4311	  {
4312	    if (altclass == ALL_REGS || class == ALL_REGS)
4313	      fprintf (file, "; pref %s", reg_class_names[(int) class]);
4314	    else if (altclass == NO_REGS)
4315	      fprintf (file, "; %s or none", reg_class_names[(int) class]);
4316	    else
4317	      fprintf (file, "; pref %s, else %s",
4318		       reg_class_names[(int) class],
4319		       reg_class_names[(int) altclass]);
4320	  }
4321	if (REGNO_POINTER_FLAG (i))
4322	  fprintf (file, "; pointer");
4323	fprintf (file, ".\n");
4324      }
4325
4326  fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
4327  for (i = 0; i < n_basic_blocks; i++)
4328    {
4329      register basic_block bb = BASIC_BLOCK (i);
4330      register int regno;
4331      register edge e;
4332
4333      fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4334	       i, INSN_UID (bb->head), INSN_UID (bb->end));
4335
4336      fprintf (file, "Predecessors: ");
4337      for (e = bb->pred; e ; e = e->pred_next)
4338	dump_edge_info (file, e, 0);
4339
4340      fprintf (file, "\nSuccessors: ");
4341      for (e = bb->succ; e ; e = e->succ_next)
4342	dump_edge_info (file, e, 1);
4343
4344      fprintf (file, "\nRegisters live at start:");
4345      if (bb->global_live_at_start)
4346	{
4347          for (regno = 0; regno < max_regno; regno++)
4348	    if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4349	      fprintf (file, " %d", regno);
4350	}
4351      else
4352	fprintf (file, " n/a");
4353
4354      fprintf (file, "\nRegisters live at end:");
4355      if (bb->global_live_at_end)
4356	{
4357          for (regno = 0; regno < max_regno; regno++)
4358	    if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4359	      fprintf (file, " %d", regno);
4360	}
4361      else
4362	fprintf (file, " n/a");
4363
4364      putc('\n', file);
4365    }
4366
4367  putc('\n', file);
4368}
4369
4370static void
4371dump_edge_info (file, e, do_succ)
4372     FILE *file;
4373     edge e;
4374     int do_succ;
4375{
4376  basic_block side = (do_succ ? e->dest : e->src);
4377
4378  if (side == ENTRY_BLOCK_PTR)
4379    fputs (" ENTRY", file);
4380  else if (side == EXIT_BLOCK_PTR)
4381    fputs (" EXIT", file);
4382  else
4383    fprintf (file, " %d", side->index);
4384
4385  if (e->flags)
4386    {
4387      static char * bitnames[] = {
4388	"fallthru", "crit", "ab", "abcall", "eh", "fake"
4389      };
4390      int comma = 0;
4391      int i, flags = e->flags;
4392
4393      fputc (' ', file);
4394      fputc ('(', file);
4395      for (i = 0; flags; i++)
4396	if (flags & (1 << i))
4397	  {
4398	    flags &= ~(1 << i);
4399
4400	    if (comma)
4401	      fputc (',', file);
4402	    if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
4403	      fputs (bitnames[i], file);
4404	    else
4405	      fprintf (file, "%d", i);
4406	    comma = 1;
4407	  }
4408      fputc (')', file);
4409    }
4410}
4411
4412
4413/* Like print_rtl, but also print out live information for the start of each
4414   basic block.  */
4415
4416void
4417print_rtl_with_bb (outf, rtx_first)
4418     FILE *outf;
4419     rtx rtx_first;
4420{
4421  register rtx tmp_rtx;
4422
4423  if (rtx_first == 0)
4424    fprintf (outf, "(nil)\n");
4425  else
4426    {
4427      int i;
4428      enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
4429      int max_uid = get_max_uid ();
4430      basic_block *start = (basic_block *)
4431	alloca (max_uid * sizeof (basic_block));
4432      basic_block *end = (basic_block *)
4433	alloca (max_uid * sizeof (basic_block));
4434      enum bb_state *in_bb_p = (enum bb_state *)
4435	alloca (max_uid * sizeof (enum bb_state));
4436
4437      memset (start, 0, max_uid * sizeof (basic_block));
4438      memset (end, 0, max_uid * sizeof (basic_block));
4439      memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
4440
4441      for (i = n_basic_blocks - 1; i >= 0; i--)
4442	{
4443	  basic_block bb = BASIC_BLOCK (i);
4444	  rtx x;
4445
4446	  start[INSN_UID (bb->head)] = bb;
4447	  end[INSN_UID (bb->end)] = bb;
4448	  for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
4449	    {
4450	      enum bb_state state = IN_MULTIPLE_BB;
4451	      if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
4452		state = IN_ONE_BB;
4453	      in_bb_p[INSN_UID(x)] = state;
4454
4455	      if (x == bb->end)
4456		break;
4457	    }
4458	}
4459
4460      for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
4461	{
4462	  int did_output;
4463	  basic_block bb;
4464
4465	  if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
4466	    {
4467	      fprintf (outf, ";; Start of basic block %d, registers live:",
4468		       bb->index);
4469
4470	      EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
4471					 {
4472					   fprintf (outf, " %d", i);
4473					   if (i < FIRST_PSEUDO_REGISTER)
4474					     fprintf (outf, " [%s]",
4475						      reg_names[i]);
4476					 });
4477	      putc ('\n', outf);
4478	    }
4479
4480	  if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
4481	      && GET_CODE (tmp_rtx) != NOTE
4482	      && GET_CODE (tmp_rtx) != BARRIER
4483	      && ! obey_regdecls)
4484	    fprintf (outf, ";; Insn is not within a basic block\n");
4485	  else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
4486	    fprintf (outf, ";; Insn is in multiple basic blocks\n");
4487
4488	  did_output = print_rtl_single (outf, tmp_rtx);
4489
4490	  if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
4491	    fprintf (outf, ";; End of basic block %d\n", bb->index);
4492
4493	  if (did_output)
4494	    putc ('\n', outf);
4495	}
4496    }
4497}
4498
4499
4500/* Integer list support.  */
4501
4502/* Allocate a node from list *HEAD_PTR.  */
4503
4504static int_list_ptr
4505alloc_int_list_node (head_ptr)
4506     int_list_block **head_ptr;
4507{
4508  struct int_list_block *first_blk = *head_ptr;
4509
4510  if (first_blk == NULL || first_blk->nodes_left <= 0)
4511    {
4512      first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
4513      first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
4514      first_blk->next = *head_ptr;
4515      *head_ptr = first_blk;
4516    }
4517
4518  first_blk->nodes_left--;
4519  return &first_blk->nodes[first_blk->nodes_left];
4520}
4521
4522/* Pointer to head of predecessor/successor block list.  */
4523static int_list_block *pred_int_list_blocks;
4524
4525/* Add a new node to integer list LIST with value VAL.
4526   LIST is a pointer to a list object to allow for different implementations.
4527   If *LIST is initially NULL, the list is empty.
4528   The caller must not care whether the element is added to the front or
4529   to the end of the list (to allow for different implementations).  */
4530
4531static int_list_ptr
4532add_int_list_node (blk_list, list, val)
4533     int_list_block **blk_list;
4534     int_list **list;
4535     int val;
4536{
4537  int_list_ptr p = alloc_int_list_node (blk_list);
4538
4539  p->val = val;
4540  p->next = *list;
4541  *list = p;
4542  return p;
4543}
4544
4545/* Free the blocks of lists at BLK_LIST.  */
4546
4547void
4548free_int_list (blk_list)
4549     int_list_block **blk_list;
4550{
4551  int_list_block *p, *next;
4552
4553  for (p = *blk_list; p != NULL; p = next)
4554    {
4555      next = p->next;
4556      free (p);
4557    }
4558
4559  /* Mark list as empty for the next function we compile.  */
4560  *blk_list = NULL;
4561}
4562
4563/* Predecessor/successor computation.  */
4564
4565/* Mark PRED_BB a precessor of SUCC_BB,
4566   and conversely SUCC_BB a successor of PRED_BB.  */
4567
4568static void
4569add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
4570     int pred_bb;
4571     int succ_bb;
4572     int_list_ptr *s_preds;
4573     int_list_ptr *s_succs;
4574     int *num_preds;
4575     int *num_succs;
4576{
4577  if (succ_bb != EXIT_BLOCK)
4578    {
4579      add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
4580      num_preds[succ_bb]++;
4581    }
4582  if (pred_bb != ENTRY_BLOCK)
4583    {
4584      add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
4585      num_succs[pred_bb]++;
4586    }
4587}
4588
4589/* Convert edge lists into pred/succ lists for backward compatibility.  */
4590
4591void
4592compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
4593     int_list_ptr *s_preds;
4594     int_list_ptr *s_succs;
4595     int *num_preds;
4596     int *num_succs;
4597{
4598  int i, n = n_basic_blocks;
4599  edge e;
4600
4601  memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
4602  memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
4603  memset (num_preds, 0, n_basic_blocks * sizeof (int));
4604  memset (num_succs, 0, n_basic_blocks * sizeof (int));
4605
4606  for (i = 0; i < n; ++i)
4607    {
4608      basic_block bb = BASIC_BLOCK (i);
4609
4610      for (e = bb->succ; e ; e = e->succ_next)
4611	add_pred_succ (i, e->dest->index, s_preds, s_succs,
4612		       num_preds, num_succs);
4613    }
4614
4615  for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
4616    add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
4617		   num_preds, num_succs);
4618}
4619
4620void
4621dump_bb_data (file, preds, succs, live_info)
4622     FILE *file;
4623     int_list_ptr *preds;
4624     int_list_ptr *succs;
4625     int live_info;
4626{
4627  int bb;
4628  int_list_ptr p;
4629
4630  fprintf (file, "BB data\n\n");
4631  for (bb = 0; bb < n_basic_blocks; bb++)
4632    {
4633      fprintf (file, "BB %d, start %d, end %d\n", bb,
4634	       INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
4635      fprintf (file, "  preds:");
4636      for (p = preds[bb]; p != NULL; p = p->next)
4637	{
4638	  int pred_bb = INT_LIST_VAL (p);
4639	  if (pred_bb == ENTRY_BLOCK)
4640	    fprintf (file, " entry");
4641	  else
4642	    fprintf (file, " %d", pred_bb);
4643	}
4644      fprintf (file, "\n");
4645      fprintf (file, "  succs:");
4646      for (p = succs[bb]; p != NULL; p = p->next)
4647	{
4648	  int succ_bb = INT_LIST_VAL (p);
4649	  if (succ_bb == EXIT_BLOCK)
4650	    fprintf (file, " exit");
4651	  else
4652	    fprintf (file, " %d", succ_bb);
4653	}
4654      if (live_info)
4655	{
4656	  int regno;
4657	  fprintf (file, "\nRegisters live at start:");
4658	  for (regno = 0; regno < max_regno; regno++)
4659	    if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
4660	      fprintf (file, " %d", regno);
4661	  fprintf (file, "\n");
4662	}
4663      fprintf (file, "\n");
4664    }
4665  fprintf (file, "\n");
4666}
4667
4668/* Free basic block data storage.  */
4669
4670void
4671free_bb_mem ()
4672{
4673  free_int_list (&pred_int_list_blocks);
4674}
4675
4676/* Compute dominator relationships.  */
4677void
4678compute_dominators (dominators, post_dominators, s_preds, s_succs)
4679     sbitmap *dominators;
4680     sbitmap *post_dominators;
4681     int_list_ptr *s_preds;
4682     int_list_ptr *s_succs;
4683{
4684  int bb, changed, passes;
4685  sbitmap *temp_bitmap;
4686
4687  temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4688  sbitmap_vector_ones (dominators, n_basic_blocks);
4689  sbitmap_vector_ones (post_dominators, n_basic_blocks);
4690  sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4691
4692  sbitmap_zero (dominators[0]);
4693  SET_BIT (dominators[0], 0);
4694
4695  sbitmap_zero (post_dominators[n_basic_blocks - 1]);
4696  SET_BIT (post_dominators[n_basic_blocks - 1], 0);
4697
4698  passes = 0;
4699  changed = 1;
4700  while (changed)
4701    {
4702      changed = 0;
4703      for (bb = 1; bb < n_basic_blocks; bb++)
4704	{
4705	  sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4706					     bb, s_preds);
4707	  SET_BIT (temp_bitmap[bb], bb);
4708	  changed |= sbitmap_a_and_b (dominators[bb],
4709				      dominators[bb],
4710				      temp_bitmap[bb]);
4711	  sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4712					   bb, s_succs);
4713	  SET_BIT (temp_bitmap[bb], bb);
4714	  changed |= sbitmap_a_and_b (post_dominators[bb],
4715				      post_dominators[bb],
4716				      temp_bitmap[bb]);
4717	}
4718      passes++;
4719    }
4720
4721  free (temp_bitmap);
4722}
4723
4724/* Given DOMINATORS, compute the immediate dominators into IDOM.  */
4725
4726void
4727compute_immediate_dominators (idom, dominators)
4728     int *idom;
4729     sbitmap *dominators;
4730{
4731  sbitmap *tmp;
4732  int b;
4733
4734  tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4735
4736  /* Begin with tmp(n) = dom(n) - { n }.  */
4737  for (b = n_basic_blocks; --b >= 0; )
4738    {
4739      sbitmap_copy (tmp[b], dominators[b]);
4740      RESET_BIT (tmp[b], b);
4741    }
4742
4743  /* Subtract out all of our dominator's dominators.  */
4744  for (b = n_basic_blocks; --b >= 0; )
4745    {
4746      sbitmap tmp_b = tmp[b];
4747      int s;
4748
4749      for (s = n_basic_blocks; --s >= 0; )
4750	if (TEST_BIT (tmp_b, s))
4751	  sbitmap_difference (tmp_b, tmp_b, tmp[s]);
4752    }
4753
4754  /* Find the one bit set in the bitmap and put it in the output array.  */
4755  for (b = n_basic_blocks; --b >= 0; )
4756    {
4757      int t;
4758      EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
4759    }
4760
4761  sbitmap_vector_free (tmp);
4762}
4763
4764/* Count for a single SET rtx, X.  */
4765
4766static void
4767count_reg_sets_1 (x)
4768     rtx x;
4769{
4770  register int regno;
4771  register rtx reg = SET_DEST (x);
4772
4773  /* Find the register that's set/clobbered.  */
4774  while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4775	 || GET_CODE (reg) == SIGN_EXTRACT
4776	 || GET_CODE (reg) == STRICT_LOW_PART)
4777    reg = XEXP (reg, 0);
4778
4779  if (GET_CODE (reg) == PARALLEL
4780      && GET_MODE (reg) == BLKmode)
4781    {
4782      register int i;
4783      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4784	count_reg_sets_1 (XVECEXP (reg, 0, i));
4785      return;
4786    }
4787
4788  if (GET_CODE (reg) == REG)
4789    {
4790      regno = REGNO (reg);
4791      if (regno >= FIRST_PSEUDO_REGISTER)
4792	{
4793	  /* Count (weighted) references, stores, etc.  This counts a
4794	     register twice if it is modified, but that is correct.  */
4795	  REG_N_SETS (regno)++;
4796
4797	  REG_N_REFS (regno) += loop_depth;
4798	}
4799    }
4800}
4801
4802/* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4803   REG_N_REFS by the current loop depth for each SET or CLOBBER found.  */
4804
4805static void
4806count_reg_sets  (x)
4807     rtx x;
4808{
4809  register RTX_CODE code = GET_CODE (x);
4810
4811  if (code == SET || code == CLOBBER)
4812    count_reg_sets_1 (x);
4813  else if (code == PARALLEL)
4814    {
4815      register int i;
4816      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4817	{
4818	  code = GET_CODE (XVECEXP (x, 0, i));
4819	  if (code == SET || code == CLOBBER)
4820	    count_reg_sets_1 (XVECEXP (x, 0, i));
4821	}
4822    }
4823}
4824
4825/* Increment REG_N_REFS by the current loop depth each register reference
4826   found in X.  */
4827
4828static void
4829count_reg_references (x)
4830     rtx x;
4831{
4832  register RTX_CODE code;
4833
4834 retry:
4835  code = GET_CODE (x);
4836  switch (code)
4837    {
4838    case LABEL_REF:
4839    case SYMBOL_REF:
4840    case CONST_INT:
4841    case CONST:
4842    case CONST_DOUBLE:
4843    case PC:
4844    case ADDR_VEC:
4845    case ADDR_DIFF_VEC:
4846    case ASM_INPUT:
4847      return;
4848
4849#ifdef HAVE_cc0
4850    case CC0:
4851      return;
4852#endif
4853
4854    case CLOBBER:
4855      /* If we are clobbering a MEM, mark any registers inside the address
4856	 as being used.  */
4857      if (GET_CODE (XEXP (x, 0)) == MEM)
4858	count_reg_references (XEXP (XEXP (x, 0), 0));
4859      return;
4860
4861    case SUBREG:
4862      /* While we're here, optimize this case.  */
4863      x = SUBREG_REG (x);
4864
4865      /* In case the SUBREG is not of a register, don't optimize */
4866      if (GET_CODE (x) != REG)
4867	{
4868	  count_reg_references (x);
4869	  return;
4870	}
4871
4872      /* ... fall through ...  */
4873
4874    case REG:
4875      if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4876	REG_N_REFS (REGNO (x)) += loop_depth;
4877      return;
4878
4879    case SET:
4880      {
4881	register rtx testreg = SET_DEST (x);
4882	int mark_dest = 0;
4883
4884	/* If storing into MEM, don't show it as being used.  But do
4885	   show the address as being used.  */
4886	if (GET_CODE (testreg) == MEM)
4887	  {
4888	    count_reg_references (XEXP (testreg, 0));
4889	    count_reg_references (SET_SRC (x));
4890	    return;
4891	  }
4892
4893	/* Storing in STRICT_LOW_PART is like storing in a reg
4894	   in that this SET might be dead, so ignore it in TESTREG.
4895	   but in some other ways it is like using the reg.
4896
4897	   Storing in a SUBREG or a bit field is like storing the entire
4898	   register in that if the register's value is not used
4899	   then this SET is not needed.  */
4900	while (GET_CODE (testreg) == STRICT_LOW_PART
4901	       || GET_CODE (testreg) == ZERO_EXTRACT
4902	       || GET_CODE (testreg) == SIGN_EXTRACT
4903	       || GET_CODE (testreg) == SUBREG)
4904	  {
4905	    /* Modifying a single register in an alternate mode
4906	       does not use any of the old value.  But these other
4907	       ways of storing in a register do use the old value.  */
4908	    if (GET_CODE (testreg) == SUBREG
4909		&& !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4910	      ;
4911	    else
4912	      mark_dest = 1;
4913
4914	    testreg = XEXP (testreg, 0);
4915	  }
4916
4917	/* If this is a store into a register,
4918	   recursively scan the value being stored.  */
4919
4920	if ((GET_CODE (testreg) == PARALLEL
4921	     && GET_MODE (testreg) == BLKmode)
4922	    || GET_CODE (testreg) == REG)
4923	  {
4924	    count_reg_references (SET_SRC (x));
4925	    if (mark_dest)
4926	      count_reg_references (SET_DEST (x));
4927	    return;
4928	  }
4929      }
4930      break;
4931
4932    default:
4933      break;
4934    }
4935
4936  /* Recursively scan the operands of this expression.  */
4937
4938  {
4939    register char *fmt = GET_RTX_FORMAT (code);
4940    register int i;
4941
4942    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4943      {
4944	if (fmt[i] == 'e')
4945	  {
4946	    /* Tail recursive case: save a function call level.  */
4947	    if (i == 0)
4948	      {
4949		x = XEXP (x, 0);
4950		goto retry;
4951	      }
4952	    count_reg_references (XEXP (x, i));
4953	  }
4954	else if (fmt[i] == 'E')
4955	  {
4956	    register int j;
4957	    for (j = 0; j < XVECLEN (x, i); j++)
4958	      count_reg_references (XVECEXP (x, i, j));
4959	  }
4960      }
4961  }
4962}
4963
4964/* Recompute register set/reference counts immediately prior to register
4965   allocation.
4966
4967   This avoids problems with set/reference counts changing to/from values
4968   which have special meanings to the register allocators.
4969
4970   Additionally, the reference counts are the primary component used by the
4971   register allocators to prioritize pseudos for allocation to hard regs.
4972   More accurate reference counts generally lead to better register allocation.
4973
4974   F is the first insn to be scanned.
4975   LOOP_STEP denotes how much loop_depth should be incremented per
4976   loop nesting level in order to increase the ref count more for references
4977   in a loop.
4978
4979   It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4980   possibly other information which is used by the register allocators.  */
4981
4982void
4983recompute_reg_usage (f, loop_step)
4984     rtx f;
4985     int loop_step;
4986{
4987  rtx insn;
4988  int i, max_reg;
4989
4990  /* Clear out the old data.  */
4991  max_reg = max_reg_num ();
4992  for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4993    {
4994      REG_N_SETS (i) = 0;
4995      REG_N_REFS (i) = 0;
4996    }
4997
4998  /* Scan each insn in the chain and count how many times each register is
4999     set/used.  */
5000  loop_depth = 1;
5001  for (insn = f; insn; insn = NEXT_INSN (insn))
5002    {
5003      /* Keep track of loop depth.  */
5004      if (GET_CODE (insn) == NOTE)
5005	{
5006	  /* Look for loop boundaries.  */
5007	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5008	    loop_depth -= loop_step;
5009	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5010	    loop_depth += loop_step;
5011
5012	  /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5013	     Abort now rather than setting register status incorrectly.  */
5014	  if (loop_depth == 0)
5015	    abort ();
5016	}
5017      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5018	{
5019	  rtx links;
5020
5021	  /* This call will increment REG_N_SETS for each SET or CLOBBER
5022	     of a register in INSN.  It will also increment REG_N_REFS
5023	     by the loop depth for each set of a register in INSN.  */
5024	  count_reg_sets (PATTERN (insn));
5025
5026	  /* count_reg_sets does not detect autoincrement address modes, so
5027	     detect them here by looking at the notes attached to INSN.  */
5028	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5029	    {
5030	      if (REG_NOTE_KIND (links) == REG_INC)
5031		/* Count (weighted) references, stores, etc.  This counts a
5032		   register twice if it is modified, but that is correct.  */
5033		REG_N_SETS (REGNO (XEXP (links, 0)))++;
5034	    }
5035
5036	  /* This call will increment REG_N_REFS by the current loop depth for
5037	     each reference to a register in INSN.  */
5038	  count_reg_references (PATTERN (insn));
5039
5040	  /* count_reg_references will not include counts for arguments to
5041	     function calls, so detect them here by examining the
5042	     CALL_INSN_FUNCTION_USAGE data.  */
5043	  if (GET_CODE (insn) == CALL_INSN)
5044	    {
5045	      rtx note;
5046
5047	      for (note = CALL_INSN_FUNCTION_USAGE (insn);
5048		   note;
5049		   note = XEXP (note, 1))
5050		if (GET_CODE (XEXP (note, 0)) == USE)
5051		  count_reg_references (SET_DEST (XEXP (note, 0)));
5052	    }
5053	}
5054    }
5055}
5056
5057/* Record INSN's block as BB.  */
5058
5059void
5060set_block_for_insn (insn, bb)
5061     rtx insn;
5062     basic_block bb;
5063{
5064  size_t uid = INSN_UID (insn);
5065  if (uid >= basic_block_for_insn->num_elements)
5066    {
5067      int new_size;
5068
5069      /* Add one-eighth the size so we don't keep calling xrealloc.  */
5070      new_size = uid + (uid + 7) / 8;
5071
5072      VARRAY_GROW (basic_block_for_insn, new_size);
5073    }
5074  VARRAY_BB (basic_block_for_insn, uid) = bb;
5075}
5076
5077/* Record INSN's block number as BB.  */
5078/* ??? This has got to go.  */
5079
5080void
5081set_block_num (insn, bb)
5082     rtx insn;
5083     int bb;
5084{
5085  set_block_for_insn (insn, BASIC_BLOCK (bb));
5086}
5087
5088/* Verify the CFG consistency.  This function check some CFG invariants and
5089   aborts when something is wrong.  Hope that this function will help to
5090   convert many optimization passes to preserve CFG consistent.
5091
5092   Currently it does following checks:
5093
5094   - test head/end pointers
5095   - overlapping of basic blocks
5096   - edge list corectness
5097   - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5098   - tails of basic blocks (ensure that boundary is necesary)
5099   - scans body of the basic block for JUMP_INSN, CODE_LABEL
5100     and NOTE_INSN_BASIC_BLOCK
5101   - check that all insns are in the basic blocks
5102   (except the switch handling code, barriers and notes)
5103
5104   In future it can be extended check a lot of other stuff as well
5105   (reachability of basic blocks, life information, etc. etc.).  */
5106
5107void
5108verify_flow_info ()
5109{
5110  const int max_uid = get_max_uid ();
5111  const rtx rtx_first = get_insns ();
5112  basic_block *bb_info;
5113  rtx x;
5114  int i;
5115
5116  bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
5117  memset (bb_info, 0, max_uid * sizeof (basic_block));
5118
5119  /* First pass check head/end pointers and set bb_info array used by
5120     later passes.  */
5121  for (i = n_basic_blocks - 1; i >= 0; i--)
5122    {
5123      basic_block bb = BASIC_BLOCK (i);
5124
5125      /* Check the head pointer and make sure that it is pointing into
5126         insn list.  */
5127      for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5128	if (x == bb->head)
5129	  break;
5130      if (!x)
5131	{
5132	  fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n",
5133		 INSN_UID (bb->head), bb->index);
5134	}
5135
5136      /* Check the end pointer and make sure that it is pointing into
5137         insn list.  */
5138      for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5139	{
5140	  if (bb_info[INSN_UID (x)] != NULL)
5141	    {
5142	      fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)",
5143		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5144	    }
5145	  bb_info[INSN_UID (x)] = bb;
5146
5147	  if (x == bb->end)
5148	    break;
5149	}
5150      if (!x)
5151	{
5152	  fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n",
5153		 INSN_UID (bb->end), bb->index);
5154	}
5155    }
5156
5157  /* Now check the basic blocks (boundaries etc.) */
5158  for (i = n_basic_blocks - 1; i >= 0; i--)
5159    {
5160      basic_block bb = BASIC_BLOCK (i);
5161      /* Check corectness of edge lists */
5162      edge e;
5163
5164      e = bb->succ;
5165      while (e)
5166	{
5167	  if (e->src != bb)
5168	    {
5169	      fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5170		       bb->index);
5171	      fprintf (stderr, "Predecessor: ");
5172	      dump_edge_info (stderr, e, 0);
5173	      fprintf (stderr, "\nSuccessor: ");
5174	      dump_edge_info (stderr, e, 1);
5175	      fflush (stderr);
5176	      abort ();
5177	    }
5178	  if (e->dest != EXIT_BLOCK_PTR)
5179	    {
5180	      edge e2 = e->dest->pred;
5181	      while (e2 && e2 != e)
5182		e2 = e2->pred_next;
5183	      if (!e2)
5184		{
5185		  fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5186			 bb->index);
5187		}
5188	    }
5189	  e = e->succ_next;
5190	}
5191
5192      e = bb->pred;
5193      while (e)
5194	{
5195	  if (e->dest != bb)
5196	    {
5197	      fprintf (stderr, "verify_flow_info: Basic block %d pred edge is corrupted\n",
5198		       bb->index);
5199	      fprintf (stderr, "Predecessor: ");
5200	      dump_edge_info (stderr, e, 0);
5201	      fprintf (stderr, "\nSuccessor: ");
5202	      dump_edge_info (stderr, e, 1);
5203	      fflush (stderr);
5204	      abort ();
5205	    }
5206	  if (e->src != ENTRY_BLOCK_PTR)
5207	    {
5208	      edge e2 = e->src->succ;
5209	      while (e2 && e2 != e)
5210		e2 = e2->succ_next;
5211	      if (!e2)
5212		{
5213		  fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5214			 bb->index);
5215		}
5216	    }
5217	  e = e->pred_next;
5218	}
5219
5220      /* OK pointers are correct.  Now check the header of basic
5221         block.  It ought to contain optional CODE_LABEL followed
5222	 by NOTE_BASIC_BLOCK.  */
5223      x = bb->head;
5224      if (GET_CODE (x) == CODE_LABEL)
5225	{
5226	  if (bb->end == x)
5227	    {
5228	      fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n");
5229	    }
5230	  x = NEXT_INSN (x);
5231	}
5232      if (GET_CODE (x) != NOTE
5233	  || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5234	  || NOTE_BASIC_BLOCK (x) != bb)
5235	{
5236	  fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5237		 bb->index);
5238	}
5239
5240      if (bb->end == x)
5241	{
5242	  /* Do checks for empty blocks here */
5243	}
5244      else
5245	{
5246	  x = NEXT_INSN (x);
5247	  while (x)
5248	    {
5249	      if (GET_CODE (x) == NOTE
5250		  && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5251		{
5252		  fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n",
5253			 INSN_UID (x), bb->index);
5254		}
5255
5256	      if (x == bb->end)
5257		break;
5258
5259	      if (GET_CODE (x) == JUMP_INSN
5260		  || GET_CODE (x) == CODE_LABEL
5261		  || GET_CODE (x) == BARRIER)
5262		{
5263		  fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n",
5264			      x, bb->index);
5265		}
5266
5267	      x = NEXT_INSN (x);
5268	    }
5269	}
5270    }
5271
5272  x = rtx_first;
5273  while (x)
5274    {
5275      if (!bb_info[INSN_UID (x)])
5276	{
5277	  switch (GET_CODE (x))
5278	    {
5279	    case BARRIER:
5280	    case NOTE:
5281	      break;
5282
5283	    case CODE_LABEL:
5284	      /* An addr_vec is placed outside any block block.  */
5285	      if (NEXT_INSN (x)
5286		  && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5287		  && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5288		      || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5289		{
5290		  x = NEXT_INSN (x);
5291		}
5292
5293	      /* But in any case, non-deletable labels can appear anywhere.  */
5294	      break;
5295
5296	    default:
5297	      fatal_insn ("verify_flow_info: Insn outside basic block\n", x);
5298	    }
5299	}
5300
5301      x = NEXT_INSN (x);
5302    }
5303}
5304