1/* Data flow analysis for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23/* This file contains the data flow analysis pass of the compiler.  It
24   computes data flow information which tells combine_instructions
25   which insns to consider combining and controls register allocation.
26
27   Additional data flow information that is too bulky to record is
28   generated during the analysis, and is used at that time to create
29   autoincrement and autodecrement addressing.
30
31   The first step is dividing the function into basic blocks.
32   find_basic_blocks does this.  Then life_analysis determines
33   where each register is live and where it is dead.
34
35   ** find_basic_blocks **
36
37   find_basic_blocks divides the current function's rtl into basic
38   blocks and constructs the CFG.  The blocks are recorded in the
39   basic_block_info array; the CFG exists in the edge structures
40   referenced by the blocks.
41
42   find_basic_blocks also finds any unreachable loops and deletes them.
43
44   ** life_analysis **
45
46   life_analysis is called immediately after find_basic_blocks.
47   It uses the basic block information to determine where each
48   hard or pseudo register is live.
49
50   ** live-register info **
51
52   The information about where each register is live is in two parts:
53   the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54
55   basic_block->global_live_at_start has an element for each basic
56   block, and the element is a bit-vector with a bit for each hard or
57   pseudo register.  The bit is 1 if the register is live at the
58   beginning of the basic block.
59
60   Two types of elements can be added to an insn's REG_NOTES.
61   A REG_DEAD note is added to an insn's REG_NOTES for any register
62   that meets both of two conditions:  The value in the register is not
63   needed in subsequent insns and the insn does not replace the value in
64   the register (in the case of multi-word hard registers, the value in
65   each register must be replaced by the insn to avoid a REG_DEAD note).
66
67   In the vast majority of cases, an object in a REG_DEAD note will be
68   used somewhere in the insn.  The (rare) exception to this is if an
69   insn uses a multi-word hard register and only some of the registers are
70   needed in subsequent insns.  In that case, REG_DEAD notes will be
71   provided for those hard registers that are not subsequently needed.
72   Partial REG_DEAD notes of this type do not occur when an insn sets
73   only some of the hard registers used in such a multi-word operand;
74   omitting REG_DEAD notes for objects stored in an insn is optional and
75   the desire to do so does not justify the complexity of the partial
76   REG_DEAD notes.
77
78   REG_UNUSED notes are added for each register that is set by the insn
79   but is unused subsequently (if every register set by the insn is unused
80   and the insn does not reference memory or have some other side-effect,
81   the insn is deleted instead).  If only part of a multi-word hard
82   register is used in a subsequent insn, REG_UNUSED notes are made for
83   the parts that will not be used.
84
85   To determine which registers are live after any insn, one can
86   start from the beginning of the basic block and scan insns, noting
87   which registers are set by each insn and which die there.
88
89   ** Other actions of life_analysis **
90
91   life_analysis sets up the LOG_LINKS fields of insns because the
92   information needed to do so is readily available.
93
94   life_analysis deletes insns whose only effect is to store a value
95   that is never used.
96
97   life_analysis notices cases where a reference to a register as
98   a memory address can be combined with a preceding or following
99   incrementation or decrementation of the register.  The separate
100   instruction to increment or decrement is deleted and the address
101   is changed to a POST_INC or similar rtx.
102
103   Each time an incrementing or decrementing address is created,
104   a REG_INC element is added to the insn's REG_NOTES list.
105
106   life_analysis fills in certain vectors containing information about
107   register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
108   reg_n_calls_crosses and reg_basic_block.
109
110   life_analysis sets current_function_sp_is_unchanging if the function
111   doesn't modify the stack pointer.  */
112
113/* TODO:
114
115   Split out from life_analysis:
116	- local property discovery (bb->local_live, bb->local_set)
117	- global property computation
118	- log links creation
119	- pre/post modify transformation
120*/
121
122#include "config.h"
123#include "system.h"
124#include "rtl.h"
125#include "basic-block.h"
126#include "insn-config.h"
127#include "regs.h"
128#include "hard-reg-set.h"
129#include "flags.h"
130#include "output.h"
131#include "except.h"
132#include "toplev.h"
133#include "recog.h"
134#include "insn-flags.h"
135
136#include "obstack.h"
137#define obstack_chunk_alloc xmalloc
138#define obstack_chunk_free free
139
140
141/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
142   the stack pointer does not matter.  The value is tested only in
143   functions that have frame pointers.
144   No definition is equivalent to always zero.  */
145#ifndef EXIT_IGNORE_STACK
146#define EXIT_IGNORE_STACK 0
147#endif
148
149
150/* The contents of the current function definition are allocated
151   in this obstack, and all are freed at the end of the function.
152   For top-level functions, this is temporary_obstack.
153   Separate obstacks are made for nested functions.  */
154
155extern struct obstack *function_obstack;
156
157/* List of labels that must never be deleted.  */
158extern rtx forced_labels;
159
160/* Number of basic blocks in the current function.  */
161
162int n_basic_blocks;
163
164/* The basic block array.  */
165
166varray_type basic_block_info;
167
168/* The special entry and exit blocks.  */
169
170struct basic_block_def entry_exit_blocks[2] =
171{
172  {
173    NULL,			/* head */
174    NULL,			/* end */
175    NULL,			/* pred */
176    NULL,			/* succ */
177    NULL,			/* local_set */
178    NULL,			/* global_live_at_start */
179    NULL,			/* global_live_at_end */
180    NULL,			/* aux */
181    ENTRY_BLOCK,		/* index */
182    0				/* loop_depth */
183  },
184  {
185    NULL,			/* head */
186    NULL,			/* end */
187    NULL,			/* pred */
188    NULL,			/* succ */
189    NULL,			/* local_set */
190    NULL,			/* global_live_at_start */
191    NULL,			/* global_live_at_end */
192    NULL,			/* aux */
193    EXIT_BLOCK,			/* index */
194    0				/* loop_depth */
195  }
196};
197
198/* Nonzero if the second flow pass has completed.  */
199int flow2_completed;
200
201/* Maximum register number used in this function, plus one.  */
202
203int max_regno;
204
205/* Indexed by n, giving various register information */
206
207varray_type reg_n_info;
208
209/* Size of the reg_n_info table.  */
210
211unsigned int reg_n_max;
212
213/* Element N is the next insn that uses (hard or pseudo) register number N
214   within the current basic block; or zero, if there is no such insn.
215   This is valid only during the final backward scan in propagate_block.  */
216
217static rtx *reg_next_use;
218
219/* Size of a regset for the current function,
220   in (1) bytes and (2) elements.  */
221
222int regset_bytes;
223int regset_size;
224
225/* Regset of regs live when calls to `setjmp'-like functions happen.  */
226/* ??? Does this exist only for the setjmp-clobbered warning message?  */
227
228regset regs_live_at_setjmp;
229
230/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
231   that have to go in the same hard reg.
232   The first two regs in the list are a pair, and the next two
233   are another pair, etc.  */
234rtx regs_may_share;
235
236/* Depth within loops of basic block being scanned for lifetime analysis,
237   plus one.  This is the weight attached to references to registers.  */
238
239static int loop_depth;
240
241/* During propagate_block, this is non-zero if the value of CC0 is live.  */
242
243static int cc0_live;
244
245/* During propagate_block, this contains a list of all the MEMs we are
246   tracking for dead store elimination.
247
248   ?!? Note we leak memory by not free-ing items on this list.  We need to
249   write some generic routines to operate on memory lists since cse, gcse,
250   loop, sched, flow and possibly other passes all need to do basically the
251   same operations on these lists.  */
252
253static rtx mem_set_list;
254
255/* Set of registers that may be eliminable.  These are handled specially
256   in updating regs_ever_live.  */
257
258static HARD_REG_SET elim_reg_set;
259
260/* The basic block structure for every insn, indexed by uid.  */
261
262varray_type basic_block_for_insn;
263
264/* The labels mentioned in non-jump rtl.  Valid during find_basic_blocks.  */
265/* ??? Should probably be using LABEL_NUSES instead.  It would take a
266   bit of surgery to be able to use or co-opt the routines in jump.  */
267
268static rtx label_value_list;
269
270/* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile.  */
271
272#define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
273#define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
274static bitmap uid_volatile;
275
276/* Forward declarations */
277static int count_basic_blocks		PROTO((rtx));
278static rtx find_basic_blocks_1		PROTO((rtx, rtx*));
279static void create_basic_block		PROTO((int, rtx, rtx, rtx));
280static void compute_bb_for_insn		PROTO((varray_type, int));
281static void clear_edges			PROTO((void));
282static void make_edges			PROTO((rtx, rtx*));
283static void make_edge			PROTO((basic_block, basic_block, int));
284static void make_label_edge		PROTO((basic_block, rtx, int));
285static void mark_critical_edges		PROTO((void));
286
287static void commit_one_edge_insertion	PROTO((edge));
288
289static void delete_unreachable_blocks	PROTO((void));
290static void delete_eh_regions		PROTO((void));
291static int can_delete_note_p		PROTO((rtx));
292static void delete_insn_chain		PROTO((rtx, rtx));
293static int delete_block			PROTO((basic_block));
294static void expunge_block		PROTO((basic_block));
295static rtx flow_delete_insn		PROTO((rtx));
296static int can_delete_label_p		PROTO((rtx));
297static void merge_blocks_nomove		PROTO((basic_block, basic_block));
298static int merge_blocks			PROTO((edge,basic_block,basic_block));
299static void tidy_fallthru_edge		PROTO((edge,basic_block,basic_block));
300static void calculate_loop_depth	PROTO((rtx));
301
302static int set_noop_p			PROTO((rtx));
303static int noop_move_p			PROTO((rtx));
304static void notice_stack_pointer_modification PROTO ((rtx, rtx));
305static void record_volatile_insns	PROTO((rtx));
306static void mark_regs_live_at_end	PROTO((regset));
307static void life_analysis_1		PROTO((rtx, int, int));
308static void init_regset_vector		PROTO ((regset *, int,
309						struct obstack *));
310static void propagate_block		PROTO((regset, rtx, rtx, int,
311					       regset, int, int));
312static int insn_dead_p			PROTO((rtx, regset, int, rtx));
313static int libcall_dead_p		PROTO((rtx, regset, rtx, rtx));
314static void mark_set_regs		PROTO((regset, regset, rtx,
315					       rtx, regset));
316static void mark_set_1			PROTO((regset, regset, rtx,
317					       rtx, regset));
318#ifdef AUTO_INC_DEC
319static void find_auto_inc		PROTO((regset, rtx, rtx));
320static int try_pre_increment_1		PROTO((rtx));
321static int try_pre_increment		PROTO((rtx, rtx, HOST_WIDE_INT));
322#endif
323static void mark_used_regs		PROTO((regset, regset, rtx, int, rtx));
324void dump_flow_info			PROTO((FILE *));
325static void dump_edge_info		PROTO((FILE *, edge, int));
326
327static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
328static int_list_ptr add_int_list_node   PROTO ((int_list_block **,
329						int_list **, int));
330
331static void add_pred_succ		PROTO ((int, int, int_list_ptr *,
332						int_list_ptr *, int *, int *));
333
334static void count_reg_sets_1		PROTO ((rtx));
335static void count_reg_sets		PROTO ((rtx));
336static void count_reg_references	PROTO ((rtx));
337static void notice_stack_pointer_modification PROTO ((rtx, rtx));
338static void invalidate_mems_from_autoinc	PROTO ((rtx));
339void verify_flow_info			PROTO ((void));
340
341/* Find basic blocks of the current function.
342   F is the first insn of the function and NREGS the number of register
343   numbers in use.  */
344
345void
346find_basic_blocks (f, nregs, file, do_cleanup)
347     rtx f;
348     int nregs ATTRIBUTE_UNUSED;
349     FILE *file ATTRIBUTE_UNUSED;
350     int do_cleanup;
351{
352  rtx *bb_eh_end;
353  int max_uid;
354
355  /* Flush out existing data.  */
356  if (basic_block_info != NULL)
357    {
358      int i;
359
360      clear_edges ();
361
362      /* Clear bb->aux on all extant basic blocks.  We'll use this as a
363	 tag for reuse during create_basic_block, just in case some pass
364	 copies around basic block notes improperly.  */
365      for (i = 0; i < n_basic_blocks; ++i)
366	BASIC_BLOCK (i)->aux = NULL;
367
368      VARRAY_FREE (basic_block_info);
369    }
370
371  n_basic_blocks = count_basic_blocks (f);
372
373  /* Size the basic block table.  The actual structures will be allocated
374     by find_basic_blocks_1, since we want to keep the structure pointers
375     stable across calls to find_basic_blocks.  */
376  /* ??? This whole issue would be much simpler if we called find_basic_blocks
377     exactly once, and thereafter we don't have a single long chain of
378     instructions at all until close to the end of compilation when we
379     actually lay them out.  */
380
381  VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
382
383  /* An array to record the active exception region at the end of each
384     basic block.  It is filled in by find_basic_blocks_1 for make_edges.  */
385  bb_eh_end = (rtx *) alloca (n_basic_blocks * sizeof (rtx));
386
387  label_value_list = find_basic_blocks_1 (f, bb_eh_end);
388
389  /* Record the block to which an insn belongs.  */
390  /* ??? This should be done another way, by which (perhaps) a label is
391     tagged directly with the basic block that it starts.  It is used for
392     more than that currently, but IMO that is the only valid use.  */
393
394  max_uid = get_max_uid ();
395#ifdef AUTO_INC_DEC
396  /* Leave space for insns life_analysis makes in some cases for auto-inc.
397     These cases are rare, so we don't need too much space.  */
398  max_uid += max_uid / 10;
399#endif
400
401  VARRAY_BB_INIT (basic_block_for_insn, max_uid, "basic_block_for_insn");
402  compute_bb_for_insn (basic_block_for_insn, max_uid);
403
404  /* Discover the edges of our cfg.  */
405
406  make_edges (label_value_list, bb_eh_end);
407
408  /* Delete unreachable blocks.  */
409
410  if (do_cleanup)
411    delete_unreachable_blocks ();
412
413  /* Mark critical edges.  */
414
415  mark_critical_edges ();
416
417  /* Discover the loop depth at the start of each basic block to aid
418     register allocation.  */
419  calculate_loop_depth (f);
420
421  /* Kill the data we won't maintain.  */
422  label_value_list = 0;
423
424#ifdef ENABLE_CHECKING
425  verify_flow_info ();
426#endif
427}
428
429/* Count the basic blocks of the function.  */
430
431static int
432count_basic_blocks (f)
433     rtx f;
434{
435  register rtx insn;
436  register RTX_CODE prev_code;
437  register int count = 0;
438  int eh_region = 0;
439  int call_had_abnormal_edge = 0;
440  rtx prev_call = NULL_RTX;
441
442  prev_code = JUMP_INSN;
443  for (insn = f; insn; insn = NEXT_INSN (insn))
444    {
445      register RTX_CODE code = GET_CODE (insn);
446
447      if (code == CODE_LABEL
448	  || (GET_RTX_CLASS (code) == 'i'
449	      && (prev_code == JUMP_INSN
450		  || prev_code == BARRIER
451		  || (prev_code == CALL_INSN && call_had_abnormal_edge))))
452	{
453	  count++;
454
455	  /* If the previous insn was a call that did not create an
456	     abnormal edge, we want to add a nop so that the CALL_INSN
457	     itself is not at basic_block_end.  This allows us to
458	     easily distinguish between normal calls and those which
459	     create abnormal edges in the flow graph.  */
460
461	  if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
462	    {
463	      rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
464	      emit_insn_after (nop, prev_call);
465	    }
466	}
467
468      /* Record whether this call created an edge.  */
469      if (code == CALL_INSN)
470	{
471	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
472	  int region = (note ? XINT (XEXP (note, 0), 0) : 1);
473	  prev_call = insn;
474	  call_had_abnormal_edge = 0;
475
476	  /* If there is a specified EH region, we have an edge.  */
477	  if (eh_region && region > 0)
478	    call_had_abnormal_edge = 1;
479	  else
480	    {
481	      /* If there is a nonlocal goto label and the specified
482		 region number isn't -1, we have an edge. (0 means
483		 no throw, but might have a nonlocal goto).  */
484	      if (nonlocal_goto_handler_labels && region >= 0)
485		call_had_abnormal_edge = 1;
486	    }
487	}
488      else if (code != NOTE)
489	prev_call = NULL_RTX;
490
491      if (code != NOTE)
492	prev_code = code;
493      else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
494	++eh_region;
495      else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
496	--eh_region;
497
498    }
499
500  /* The rest of the compiler works a bit smoother when we don't have to
501     check for the edge case of do-nothing functions with no basic blocks.  */
502  if (count == 0)
503    {
504      emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
505      count = 1;
506    }
507
508  return count;
509}
510
511/* Find all basic blocks of the function whose first insn is F.
512   Store the correct data in the tables that describe the basic blocks,
513   set up the chains of references for each CODE_LABEL, and
514   delete any entire basic blocks that cannot be reached.
515
516   NONLOCAL_LABEL_LIST is a list of non-local labels in the function.  Blocks
517   that are otherwise unreachable may be reachable with a non-local goto.
518
519   BB_EH_END is an array in which we record the list of exception regions
520   active at the end of every basic block.  */
521
522static rtx
523find_basic_blocks_1 (f, bb_eh_end)
524     rtx f;
525     rtx *bb_eh_end;
526{
527  register rtx insn, next;
528  int call_has_abnormal_edge = 0;
529  int i = 0;
530  rtx bb_note = NULL_RTX;
531  rtx eh_list = NULL_RTX;
532  rtx label_value_list = NULL_RTX;
533  rtx head = NULL_RTX;
534  rtx end = NULL_RTX;
535
536  /* We process the instructions in a slightly different way than we did
537     previously.  This is so that we see a NOTE_BASIC_BLOCK after we have
538     closed out the previous block, so that it gets attached at the proper
539     place.  Since this form should be equivalent to the previous,
540     find_basic_blocks_0 continues to use the old form as a check.  */
541
542  for (insn = f; insn; insn = next)
543    {
544      enum rtx_code code = GET_CODE (insn);
545
546      next = NEXT_INSN (insn);
547
548      if (code == CALL_INSN)
549	{
550	  /* Record whether this call created an edge.  */
551	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
552	  int region = (note ? XINT (XEXP (note, 0), 0) : 1);
553	  call_has_abnormal_edge = 0;
554
555	  /* If there is an EH region, we have an edge.  */
556	  if (eh_list && region > 0)
557	    call_has_abnormal_edge = 1;
558	  else
559	    {
560	      /* If there is a nonlocal goto label and the specified
561		 region number isn't -1, we have an edge. (0 means
562		 no throw, but might have a nonlocal goto).  */
563	      if (nonlocal_goto_handler_labels && region >= 0)
564		call_has_abnormal_edge = 1;
565	    }
566	}
567
568      switch (code)
569	{
570	case NOTE:
571	  {
572	    int kind = NOTE_LINE_NUMBER (insn);
573
574	    /* Keep a LIFO list of the currently active exception notes.  */
575	    if (kind == NOTE_INSN_EH_REGION_BEG)
576	      eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
577	    else if (kind == NOTE_INSN_EH_REGION_END)
578	      eh_list = XEXP (eh_list, 1);
579
580	    /* Look for basic block notes with which to keep the
581	       basic_block_info pointers stable.  Unthread the note now;
582	       we'll put it back at the right place in create_basic_block.
583	       Or not at all if we've already found a note in this block.  */
584	    else if (kind == NOTE_INSN_BASIC_BLOCK)
585	      {
586		if (bb_note == NULL_RTX)
587		  bb_note = insn;
588		next = flow_delete_insn (insn);
589	      }
590
591	    break;
592	  }
593
594	case CODE_LABEL:
595	  /* A basic block starts at a label.  If we've closed one off due
596	     to a barrier or some such, no need to do it again.  */
597	  if (head != NULL_RTX)
598	    {
599	      /* While we now have edge lists with which other portions of
600		 the compiler might determine a call ending a basic block
601		 does not imply an abnormal edge, it will be a bit before
602		 everything can be updated.  So continue to emit a noop at
603		 the end of such a block.  */
604	      if (GET_CODE (end) == CALL_INSN)
605		{
606		  rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
607		  end = emit_insn_after (nop, end);
608		}
609
610	      bb_eh_end[i] = eh_list;
611	      create_basic_block (i++, head, end, bb_note);
612	      bb_note = NULL_RTX;
613	    }
614	  head = end = insn;
615	  break;
616
617	case JUMP_INSN:
618	  /* A basic block ends at a jump.  */
619	  if (head == NULL_RTX)
620	    head = insn;
621	  else
622	    {
623	      /* ??? Make a special check for table jumps.  The way this
624		 happens is truely and amazingly gross.  We are about to
625		 create a basic block that contains just a code label and
626		 an addr*vec jump insn.  Worse, an addr_diff_vec creates
627		 its own natural loop.
628
629		 Prevent this bit of brain damage, pasting things together
630		 correctly in make_edges.
631
632		 The correct solution involves emitting the table directly
633		 on the tablejump instruction as a note, or JUMP_LABEL.  */
634
635	      if (GET_CODE (PATTERN (insn)) == ADDR_VEC
636		  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
637		{
638		  head = end = NULL;
639		  n_basic_blocks--;
640		  break;
641		}
642	    }
643	  end = insn;
644	  goto new_bb_inclusive;
645
646	case BARRIER:
647	  /* A basic block ends at a barrier.  It may be that an unconditional
648	     jump already closed the basic block -- no need to do it again.  */
649	  if (head == NULL_RTX)
650	    break;
651
652	  /* While we now have edge lists with which other portions of the
653	     compiler might determine a call ending a basic block does not
654	     imply an abnormal edge, it will be a bit before everything can
655	     be updated.  So continue to emit a noop at the end of such a
656	     block.  */
657	  if (GET_CODE (end) == CALL_INSN)
658	    {
659	      rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
660	      end = emit_insn_after (nop, end);
661	    }
662	  goto new_bb_exclusive;
663
664	case CALL_INSN:
665	  /* A basic block ends at a call that can either throw or
666	     do a non-local goto.  */
667	  if (call_has_abnormal_edge)
668	    {
669	    new_bb_inclusive:
670	      if (head == NULL_RTX)
671		head = insn;
672	      end = insn;
673
674	    new_bb_exclusive:
675	      bb_eh_end[i] = eh_list;
676	      create_basic_block (i++, head, end, bb_note);
677	      head = end = NULL_RTX;
678	      bb_note = NULL_RTX;
679	      break;
680	    }
681	  /* FALLTHRU */
682
683	default:
684	  if (GET_RTX_CLASS (code) == 'i')
685	    {
686	      if (head == NULL_RTX)
687		head = insn;
688	      end = insn;
689	    }
690	  break;
691	}
692
693      if (GET_RTX_CLASS (code) == 'i')
694	{
695	  rtx note;
696
697	  /* Make a list of all labels referred to other than by jumps
698	     (which just don't have the REG_LABEL notes).
699
700	     Make a special exception for labels followed by an ADDR*VEC,
701	     as this would be a part of the tablejump setup code.
702
703	     Make a special exception for the eh_return_stub_label, which
704	     we know isn't part of any otherwise visible control flow.  */
705
706	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
707	    if (REG_NOTE_KIND (note) == REG_LABEL)
708	      {
709	        rtx lab = XEXP (note, 0), next;
710
711		if (lab == eh_return_stub_label)
712		  ;
713		else if ((next = next_nonnote_insn (lab)) != NULL
714			 && GET_CODE (next) == JUMP_INSN
715			 && (GET_CODE (PATTERN (next)) == ADDR_VEC
716			     || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
717		  ;
718		else
719		  label_value_list
720		    = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
721				         label_value_list);
722	      }
723	}
724    }
725
726  if (head != NULL_RTX)
727    {
728      bb_eh_end[i] = eh_list;
729      create_basic_block (i++, head, end, bb_note);
730    }
731
732  if (i != n_basic_blocks)
733    abort ();
734
735  return label_value_list;
736}
737
738/* Create a new basic block consisting of the instructions between
739   HEAD and END inclusive.  Reuses the note and basic block struct
740   in BB_NOTE, if any.  */
741
742static void
743create_basic_block (index, head, end, bb_note)
744     int index;
745     rtx head, end, bb_note;
746{
747  basic_block bb;
748
749  if (bb_note
750      && ! RTX_INTEGRATED_P (bb_note)
751      && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
752      && bb->aux == NULL)
753    {
754      /* If we found an existing note, thread it back onto the chain.  */
755
756      if (GET_CODE (head) == CODE_LABEL)
757	add_insn_after (bb_note, head);
758      else
759	{
760	  add_insn_before (bb_note, head);
761	  head = bb_note;
762	}
763    }
764  else
765    {
766      /* Otherwise we must create a note and a basic block structure.
767	 Since we allow basic block structs in rtl, give the struct
768	 the same lifetime by allocating it off the function obstack
769	 rather than using malloc.  */
770
771      bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
772      memset (bb, 0, sizeof (*bb));
773
774      if (GET_CODE (head) == CODE_LABEL)
775	bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
776      else
777	{
778	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
779	  head = bb_note;
780	}
781      NOTE_BASIC_BLOCK (bb_note) = bb;
782    }
783
784  /* Always include the bb note in the block.  */
785  if (NEXT_INSN (end) == bb_note)
786    end = bb_note;
787
788  bb->head = head;
789  bb->end = end;
790  bb->index = index;
791  BASIC_BLOCK (index) = bb;
792
793  /* Tag the block so that we know it has been used when considering
794     other basic block notes.  */
795  bb->aux = bb;
796}
797
798/* Records the basic block struct in BB_FOR_INSN, for every instruction
799   indexed by INSN_UID.  MAX is the size of the array.  */
800
801static void
802compute_bb_for_insn (bb_for_insn, max)
803     varray_type bb_for_insn;
804     int max;
805{
806  int i;
807
808  for (i = 0; i < n_basic_blocks; ++i)
809    {
810      basic_block bb = BASIC_BLOCK (i);
811      rtx insn, end;
812
813      end = bb->end;
814      insn = bb->head;
815      while (1)
816	{
817	  int uid = INSN_UID (insn);
818	  if (uid < max)
819	    VARRAY_BB (bb_for_insn, uid) = bb;
820	  if (insn == end)
821	    break;
822	  insn = NEXT_INSN (insn);
823	}
824    }
825}
826
827/* Free the memory associated with the edge structures.  */
828
829static void
830clear_edges ()
831{
832  int i;
833  edge n, e;
834
835  for (i = 0; i < n_basic_blocks; ++i)
836    {
837      basic_block bb = BASIC_BLOCK (i);
838
839      for (e = bb->succ; e ; e = n)
840	{
841	  n = e->succ_next;
842	  free (e);
843	}
844
845      bb->succ = 0;
846      bb->pred = 0;
847    }
848
849  for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
850    {
851      n = e->succ_next;
852      free (e);
853    }
854
855  ENTRY_BLOCK_PTR->succ = 0;
856  EXIT_BLOCK_PTR->pred = 0;
857}
858
859/* Identify the edges between basic blocks.
860
861   NONLOCAL_LABEL_LIST is a list of non-local labels in the function.  Blocks
862   that are otherwise unreachable may be reachable with a non-local goto.
863
864   BB_EH_END is an array indexed by basic block number in which we record
865   the list of exception regions active at the end of the basic block.  */
866
867static void
868make_edges (label_value_list, bb_eh_end)
869     rtx label_value_list;
870     rtx *bb_eh_end;
871{
872  int i;
873
874  /* Assume no computed jump; revise as we create edges.  */
875  current_function_has_computed_jump = 0;
876
877  /* By nature of the way these get numbered, block 0 is always the entry.  */
878  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
879
880  for (i = 0; i < n_basic_blocks; ++i)
881    {
882      basic_block bb = BASIC_BLOCK (i);
883      rtx insn, x, eh_list;
884      enum rtx_code code;
885      int force_fallthru = 0;
886
887      /* If we have asynchronous exceptions, scan the notes for all exception
888	 regions active in the block.  In the normal case, we only need the
889	 one active at the end of the block, which is bb_eh_end[i].  */
890
891      eh_list = bb_eh_end[i];
892      if (asynchronous_exceptions)
893	{
894	  for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
895	    {
896	      if (GET_CODE (insn) == NOTE
897		  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
898		eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
899	    }
900	}
901
902      /* Now examine the last instruction of the block, and discover the
903	 ways we can leave the block.  */
904
905      insn = bb->end;
906      code = GET_CODE (insn);
907
908      /* A branch.  */
909      if (code == JUMP_INSN)
910	{
911	  rtx tmp;
912
913	  /* ??? Recognize a tablejump and do the right thing.  */
914	  if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
915	      && (tmp = NEXT_INSN (tmp)) != NULL_RTX
916	      && GET_CODE (tmp) == JUMP_INSN
917	      && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
918		  || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
919	    {
920	      rtvec vec;
921	      int j;
922
923	      if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
924		vec = XVEC (PATTERN (tmp), 0);
925	      else
926		vec = XVEC (PATTERN (tmp), 1);
927
928	      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
929		make_label_edge (bb, XEXP (RTVEC_ELT (vec, j), 0), 0);
930
931	      /* Some targets (eg, ARM) emit a conditional jump that also
932		 contains the out-of-range target.  Scan for these and
933		 add an edge if necessary.  */
934	      if ((tmp = single_set (insn)) != NULL
935		  && SET_DEST (tmp) == pc_rtx
936		  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
937		  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
938		make_label_edge (bb, XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
939
940#ifdef CASE_DROPS_THROUGH
941	      /* Silly VAXen.  The ADDR_VEC is going to be in the way of
942		 us naturally detecting fallthru into the next block.  */
943	      force_fallthru = 1;
944#endif
945	    }
946
947	  /* If this is a computed jump, then mark it as reaching
948	     everything on the label_value_list and forced_labels list.  */
949	  else if (computed_jump_p (insn))
950	    {
951	      current_function_has_computed_jump = 1;
952
953	      for (x = label_value_list; x; x = XEXP (x, 1))
954		make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
955
956	      for (x = forced_labels; x; x = XEXP (x, 1))
957		make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
958	    }
959
960	  /* Returns create an exit out.  */
961	  else if (returnjump_p (insn))
962	    make_edge (bb, EXIT_BLOCK_PTR, 0);
963
964	  /* Otherwise, we have a plain conditional or unconditional jump.  */
965	  else
966	    {
967	      if (! JUMP_LABEL (insn))
968		abort ();
969	      make_label_edge (bb, JUMP_LABEL (insn), 0);
970	    }
971	}
972
973      /* If this is a CALL_INSN, then mark it as reaching the active EH
974	 handler for this CALL_INSN.  If we're handling asynchronous
975	 exceptions then any insn can reach any of the active handlers.
976
977	 Also mark the CALL_INSN as reaching any nonlocal goto handler.  */
978
979      if (code == CALL_INSN || asynchronous_exceptions)
980	{
981	  int is_call = (code == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
982	  handler_info *ptr;
983
984	  /* Use REG_EH_RETHROW and REG_EH_REGION if available.  */
985	  /* ??? REG_EH_REGION is not generated presently.  Is it
986	     inteded that there be multiple notes for the regions?
987	     or is my eh_list collection redundant with handler linking?  */
988
989	  x = find_reg_note (insn, REG_EH_RETHROW, 0);
990	  if (!x)
991	    x = find_reg_note (insn, REG_EH_REGION, 0);
992	  if (x)
993	    {
994	      if (XINT (XEXP (x, 0), 0) > 0)
995		{
996		  ptr = get_first_handler (XINT (XEXP (x, 0), 0));
997		  while (ptr)
998		    {
999		      make_label_edge (bb, ptr->handler_label,
1000				       EDGE_ABNORMAL | EDGE_EH | is_call);
1001		      ptr = ptr->next;
1002		    }
1003		}
1004	    }
1005	  else
1006	    {
1007	      for (x = eh_list; x; x = XEXP (x, 1))
1008		{
1009		  ptr = get_first_handler (NOTE_BLOCK_NUMBER (XEXP (x, 0)));
1010		  while (ptr)
1011		    {
1012		      make_label_edge (bb, ptr->handler_label,
1013				       EDGE_ABNORMAL | EDGE_EH | is_call);
1014		      ptr = ptr->next;
1015		    }
1016		}
1017	    }
1018
1019	  if (code == CALL_INSN && nonlocal_goto_handler_labels)
1020	    {
1021	      /* ??? This could be made smarter: in some cases it's possible
1022		 to tell that certain calls will not do a nonlocal goto.
1023
1024		 For example, if the nested functions that do the nonlocal
1025		 gotos do not have their addresses taken, then only calls to
1026		 those functions or to other nested functions that use them
1027		 could possibly do nonlocal gotos.  */
1028
1029	      for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1030	        make_label_edge (bb, XEXP (x, 0),
1031			         EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1032	    }
1033	}
1034
1035      /* We know something about the structure of the function __throw in
1036	 libgcc2.c.  It is the only function that ever contains eh_stub
1037	 labels.  It modifies its return address so that the last block
1038	 returns to one of the eh_stub labels within it.  So we have to
1039	 make additional edges in the flow graph.  */
1040      if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1041	make_label_edge (bb, eh_return_stub_label, EDGE_EH);
1042
1043      /* Find out if we can drop through to the next block.  */
1044      insn = next_nonnote_insn (insn);
1045      if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1046	make_edge (bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1047      else if (i + 1 < n_basic_blocks)
1048	{
1049	  rtx tmp = BLOCK_HEAD (i + 1);
1050	  if (GET_CODE (tmp) == NOTE)
1051	    tmp = next_nonnote_insn (tmp);
1052	  if (force_fallthru || insn == tmp)
1053	    make_edge (bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1054	}
1055    }
1056}
1057
1058/* Create an edge between two basic blocks.  FLAGS are auxiliary information
1059   about the edge that is accumulated between calls.  */
1060
1061static void
1062make_edge (src, dst, flags)
1063     basic_block src, dst;
1064     int flags;
1065{
1066  edge e;
1067
1068  /* Make sure we don't add duplicate edges.  */
1069
1070  for (e = src->succ; e ; e = e->succ_next)
1071    if (e->dest == dst)
1072      {
1073	e->flags |= flags;
1074	return;
1075      }
1076
1077  e = (edge) xcalloc (1, sizeof (*e));
1078
1079  e->succ_next = src->succ;
1080  e->pred_next = dst->pred;
1081  e->src = src;
1082  e->dest = dst;
1083  e->flags = flags;
1084
1085  src->succ = e;
1086  dst->pred = e;
1087}
1088
1089/* Create an edge from a basic block to a label.  */
1090
1091static void
1092make_label_edge (src, label, flags)
1093     basic_block src;
1094     rtx label;
1095     int flags;
1096{
1097  if (GET_CODE (label) != CODE_LABEL)
1098    abort ();
1099
1100  /* If the label was never emitted, this insn is junk, but avoid a
1101     crash trying to refer to BLOCK_FOR_INSN (label).  This can happen
1102     as a result of a syntax error and a diagnostic has already been
1103     printed.  */
1104
1105  if (INSN_UID (label) == 0)
1106    return;
1107
1108  make_edge (src, BLOCK_FOR_INSN (label), flags);
1109}
1110
1111/* Identify critical edges and set the bits appropriately.  */
1112static void
1113mark_critical_edges ()
1114{
1115  int i, n = n_basic_blocks;
1116  basic_block bb;
1117
1118  /* We begin with the entry block.  This is not terribly important now,
1119     but could be if a front end (Fortran) implemented alternate entry
1120     points.  */
1121  bb = ENTRY_BLOCK_PTR;
1122  i = -1;
1123
1124  while (1)
1125    {
1126      edge e;
1127
1128      /* (1) Critical edges must have a source with multiple successors.  */
1129      if (bb->succ && bb->succ->succ_next)
1130	{
1131	  for (e = bb->succ; e ; e = e->succ_next)
1132	    {
1133	      /* (2) Critical edges must have a destination with multiple
1134		 predecessors.  Note that we know there is at least one
1135		 predecessor -- the edge we followed to get here.  */
1136	      if (e->dest->pred->pred_next)
1137		e->flags |= EDGE_CRITICAL;
1138	      else
1139		e->flags &= ~EDGE_CRITICAL;
1140	    }
1141	}
1142      else
1143	{
1144	  for (e = bb->succ; e ; e = e->succ_next)
1145	    e->flags &= ~EDGE_CRITICAL;
1146	}
1147
1148      if (++i >= n)
1149	break;
1150      bb = BASIC_BLOCK (i);
1151    }
1152}
1153
1154/* Split a (typically critical) edge.  Return the new block.
1155   Abort on abnormal edges.
1156
1157   ??? The code generally expects to be called on critical edges.
1158   The case of a block ending in an unconditional jump to a
1159   block with multiple predecessors is not handled optimally.  */
1160
1161basic_block
1162split_edge (edge_in)
1163     edge edge_in;
1164{
1165  basic_block old_pred, bb, old_succ;
1166  edge edge_out;
1167  rtx bb_note;
1168  int i;
1169
1170  /* Abnormal edges cannot be split.  */
1171  if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1172    abort ();
1173
1174  old_pred = edge_in->src;
1175  old_succ = edge_in->dest;
1176
1177  /* Remove the existing edge from the destination's pred list.  */
1178  {
1179    edge *pp;
1180    for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1181      continue;
1182    *pp = edge_in->pred_next;
1183    edge_in->pred_next = NULL;
1184  }
1185
1186  /* Create the new structures.  */
1187  bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1188  edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1189
1190  memset (bb, 0, sizeof (*bb));
1191  bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
1192  bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1193  bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1194
1195  /* ??? This info is likely going to be out of date very soon.  */
1196  CLEAR_REG_SET (bb->local_set);
1197  if (old_succ->global_live_at_start)
1198    {
1199      COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1200      COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1201    }
1202  else
1203    {
1204      CLEAR_REG_SET (bb->global_live_at_start);
1205      CLEAR_REG_SET (bb->global_live_at_end);
1206    }
1207
1208  /* Wire them up.  */
1209  bb->pred = edge_in;
1210  bb->succ = edge_out;
1211
1212  edge_in->dest = bb;
1213  edge_in->flags &= ~EDGE_CRITICAL;
1214
1215  edge_out->pred_next = old_succ->pred;
1216  edge_out->succ_next = NULL;
1217  edge_out->src = bb;
1218  edge_out->dest = old_succ;
1219  edge_out->flags = EDGE_FALLTHRU;
1220  edge_out->probability = REG_BR_PROB_BASE;
1221
1222  old_succ->pred = edge_out;
1223
1224  /* Tricky case -- if there existed a fallthru into the successor
1225     (and we're not it) we must add a new unconditional jump around
1226     the new block we're actually interested in.
1227
1228     Further, if that edge is critical, this means a second new basic
1229     block must be created to hold it.  In order to simplify correct
1230     insn placement, do this before we touch the existing basic block
1231     ordering for the block we were really wanting.  */
1232  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1233    {
1234      edge e;
1235      for (e = edge_out->pred_next; e ; e = e->pred_next)
1236	if (e->flags & EDGE_FALLTHRU)
1237	  break;
1238
1239      if (e)
1240	{
1241	  basic_block jump_block;
1242	  rtx pos;
1243
1244	  if ((e->flags & EDGE_CRITICAL) == 0)
1245	    {
1246	      /* Non critical -- we can simply add a jump to the end
1247		 of the existing predecessor.  */
1248	      jump_block = e->src;
1249	    }
1250	  else
1251	    {
1252	      /* We need a new block to hold the jump.  The simplest
1253	         way to do the bulk of the work here is to recursively
1254	         call ourselves.  */
1255	      jump_block = split_edge (e);
1256	      e = jump_block->succ;
1257	    }
1258
1259	  /* Now add the jump insn ...  */
1260	  pos = emit_jump_insn_after (gen_jump (old_succ->head),
1261				      jump_block->end);
1262	  jump_block->end = pos;
1263	  emit_barrier_after (pos);
1264
1265	  /* ... let jump know that label is in use, ...  */
1266	  ++LABEL_NUSES (old_succ->head);
1267
1268	  /* ... and clear fallthru on the outgoing edge.  */
1269	  e->flags &= ~EDGE_FALLTHRU;
1270
1271	  /* Continue splitting the interesting edge.  */
1272	}
1273    }
1274
1275  /* Place the new block just in front of the successor.  */
1276  VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1277  for (i = n_basic_blocks - 1; i > old_succ->index; --i)
1278    {
1279      basic_block tmp = BASIC_BLOCK (i - 1);
1280      BASIC_BLOCK (i) = tmp;
1281      tmp->index = i;
1282    }
1283  BASIC_BLOCK (i) = bb;
1284  bb->index = i;
1285
1286  /* Create the basic block note.  */
1287  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1288  NOTE_BASIC_BLOCK (bb_note) = bb;
1289  bb->head = bb->end = bb_note;
1290
1291  /* Not quite simple -- for non-fallthru edges, we must adjust the
1292     predecessor's jump instruction to target our new block.  */
1293  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1294    {
1295      rtx tmp, insn = old_pred->end;
1296      rtx old_label = old_succ->head;
1297      rtx new_label = gen_label_rtx ();
1298
1299      if (GET_CODE (insn) != JUMP_INSN)
1300	abort ();
1301
1302      /* ??? Recognize a tablejump and adjust all matching cases.  */
1303      if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1304	  && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1305	  && GET_CODE (tmp) == JUMP_INSN
1306	  && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1307	      || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1308	{
1309	  rtvec vec;
1310	  int j;
1311
1312	  if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1313	    vec = XVEC (PATTERN (tmp), 0);
1314	  else
1315	    vec = XVEC (PATTERN (tmp), 1);
1316
1317	  for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1318	    if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1319	      {
1320	        RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1321		--LABEL_NUSES (old_label);
1322		++LABEL_NUSES (new_label);
1323	      }
1324	}
1325      else
1326	{
1327	  /* This would have indicated an abnormal edge.  */
1328	  if (computed_jump_p (insn))
1329	    abort ();
1330
1331	  /* A return instruction can't be redirected.  */
1332	  if (returnjump_p (insn))
1333	    abort ();
1334
1335	  /* If the insn doesn't go where we think, we're confused.  */
1336	  if (JUMP_LABEL (insn) != old_label)
1337	    abort ();
1338
1339	  redirect_jump (insn, new_label);
1340	}
1341
1342      emit_label_before (new_label, bb_note);
1343      bb->head = new_label;
1344    }
1345
1346  return bb;
1347}
1348
1349/* Queue instructions for insertion on an edge between two basic blocks.
1350   The new instructions and basic blocks (if any) will not appear in the
1351   CFG until commit_edge_insertions is called.  */
1352
1353void
1354insert_insn_on_edge (pattern, e)
1355     rtx pattern;
1356     edge e;
1357{
1358  /* We cannot insert instructions on an abnormal critical edge.
1359     It will be easier to find the culprit if we die now.  */
1360  if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1361      == (EDGE_ABNORMAL|EDGE_CRITICAL))
1362    abort ();
1363
1364  if (e->insns == NULL_RTX)
1365    start_sequence ();
1366  else
1367    push_to_sequence (e->insns);
1368
1369  emit_insn (pattern);
1370
1371  e->insns = get_insns ();
1372  end_sequence();
1373}
1374
1375/* Update the CFG for the instructions queued on edge E.  */
1376
1377static void
1378commit_one_edge_insertion (e)
1379     edge e;
1380{
1381  rtx before = NULL_RTX, after = NULL_RTX, tmp;
1382  basic_block bb;
1383
1384  /* Figure out where to put these things.  If the destination has
1385     one predecessor, insert there.  Except for the exit block.  */
1386  if (e->dest->pred->pred_next == NULL
1387      && e->dest != EXIT_BLOCK_PTR)
1388    {
1389      bb = e->dest;
1390
1391      /* Get the location correct wrt a code label, and "nice" wrt
1392	 a basic block note, and before everything else.  */
1393      tmp = bb->head;
1394      if (GET_CODE (tmp) == CODE_LABEL)
1395	tmp = NEXT_INSN (tmp);
1396      if (GET_CODE (tmp) == NOTE
1397	  && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1398	tmp = NEXT_INSN (tmp);
1399      if (tmp == bb->head)
1400	before = tmp;
1401      else
1402	after = PREV_INSN (tmp);
1403    }
1404
1405  /* If the source has one successor and the edge is not abnormal,
1406     insert there.  Except for the entry block.  */
1407  else if ((e->flags & EDGE_ABNORMAL) == 0
1408	   && e->src->succ->succ_next == NULL
1409	   && e->src != ENTRY_BLOCK_PTR)
1410    {
1411      bb = e->src;
1412      if (GET_CODE (bb->end) == JUMP_INSN)
1413	{
1414	  /* ??? Is it possible to wind up with non-simple jumps?  Perhaps
1415	     a jump with delay slots already filled?  */
1416	  if (! simplejump_p (bb->end))
1417	    abort ();
1418
1419	  before = bb->end;
1420	}
1421      else
1422	{
1423	  /* We'd better be fallthru, or we've lost track of what's what.  */
1424	  if ((e->flags & EDGE_FALLTHRU) == 0)
1425	    abort ();
1426
1427	  after = bb->end;
1428	}
1429    }
1430
1431  /* Otherwise we must split the edge.  */
1432  else
1433    {
1434      bb = split_edge (e);
1435      after = bb->end;
1436    }
1437
1438  /* Now that we've found the spot, do the insertion.  */
1439  tmp = e->insns;
1440  e->insns = NULL_RTX;
1441  if (before)
1442    {
1443      emit_insns_before (tmp, before);
1444      if (before == bb->head)
1445	bb->head = before;
1446    }
1447  else
1448    {
1449      tmp = emit_insns_after (tmp, after);
1450      if (after == bb->end)
1451	bb->end = tmp;
1452    }
1453}
1454
1455/* Update the CFG for all queued instructions.  */
1456
1457void
1458commit_edge_insertions ()
1459{
1460  int i;
1461  basic_block bb;
1462
1463  i = -1;
1464  bb = ENTRY_BLOCK_PTR;
1465  while (1)
1466    {
1467      edge e, next;
1468
1469      for (e = bb->succ; e ; e = next)
1470	{
1471	  next = e->succ_next;
1472	  if (e->insns)
1473	    commit_one_edge_insertion (e);
1474	}
1475
1476      if (++i >= n_basic_blocks)
1477	break;
1478      bb = BASIC_BLOCK (i);
1479    }
1480}
1481
1482/* Delete all unreachable basic blocks.   */
1483
1484static void
1485delete_unreachable_blocks ()
1486{
1487  basic_block *worklist, *tos;
1488  int deleted_handler;
1489  edge e;
1490  int i, n;
1491
1492  n = n_basic_blocks;
1493  tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1494
1495  /* Use basic_block->aux as a marker.  Clear them all.  */
1496
1497  for (i = 0; i < n; ++i)
1498    BASIC_BLOCK (i)->aux = NULL;
1499
1500  /* Add our starting points to the worklist.  Almost always there will
1501     be only one.  It isn't inconcievable that we might one day directly
1502     support Fortran alternate entry points.  */
1503
1504  for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1505    {
1506      *tos++ = e->dest;
1507
1508      /* Mark the block with a handy non-null value.  */
1509      e->dest->aux = e;
1510    }
1511
1512  /* Iterate: find everything reachable from what we've already seen.  */
1513
1514  while (tos != worklist)
1515    {
1516      basic_block b = *--tos;
1517
1518      for (e = b->succ; e ; e = e->succ_next)
1519	if (!e->dest->aux)
1520	  {
1521	    *tos++ = e->dest;
1522	    e->dest->aux = e;
1523	  }
1524    }
1525
1526  /* Delete all unreachable basic blocks.  Count down so that we don't
1527     interfere with the block renumbering that happens in delete_block.  */
1528
1529  deleted_handler = 0;
1530
1531  for (i = n - 1; i >= 0; --i)
1532    {
1533      basic_block b = BASIC_BLOCK (i);
1534
1535      if (b->aux != NULL)
1536	/* This block was found.  Tidy up the mark.  */
1537	b->aux = NULL;
1538      else
1539	deleted_handler |= delete_block (b);
1540    }
1541
1542  /* Fix up edges that now fall through, or rather should now fall through
1543     but previously required a jump around now deleted blocks.  Simplify
1544     the search by only examining blocks numerically adjacent, since this
1545     is how find_basic_blocks created them.  */
1546
1547  for (i = 1; i < n_basic_blocks; ++i)
1548    {
1549      basic_block b = BASIC_BLOCK (i - 1);
1550      basic_block c = BASIC_BLOCK (i);
1551      edge s;
1552
1553      /* We care about simple conditional or unconditional jumps with
1554	 a single successor.
1555
1556	 If we had a conditional branch to the next instruction when
1557	 find_basic_blocks was called, then there will only be one
1558	 out edge for the block which ended with the conditional
1559	 branch (since we do not create duplicate edges).
1560
1561	 Furthermore, the edge will be marked as a fallthru because we
1562	 merge the flags for the duplicate edges.  So we do not want to
1563	 check that the edge is not a FALLTHRU edge.  */
1564      if ((s = b->succ) != NULL
1565	  && s->succ_next == NULL
1566	  && s->dest == c
1567	  /* If the last insn is not a normal conditional jump
1568	     (or an unconditional jump), then we can not tidy the
1569	     fallthru edge because we can not delete the jump.  */
1570	  && GET_CODE (b->end) == JUMP_INSN
1571	  && condjump_p (b->end))
1572	tidy_fallthru_edge (s, b, c);
1573    }
1574
1575  /* Attempt to merge blocks as made possible by edge removal.  If a block
1576     has only one successor, and the successor has only one predecessor,
1577     they may be combined.  */
1578
1579  for (i = 0; i < n_basic_blocks; )
1580    {
1581      basic_block c, b = BASIC_BLOCK (i);
1582      edge s;
1583
1584      /* A loop because chains of blocks might be combineable.  */
1585      while ((s = b->succ) != NULL
1586	     && s->succ_next == NULL
1587	     && (s->flags & EDGE_EH) == 0
1588	     && (c = s->dest) != EXIT_BLOCK_PTR
1589	     && c->pred->pred_next == NULL
1590	     /* If the last insn is not a normal conditional jump
1591		(or an unconditional jump), then we can not merge
1592		the blocks because we can not delete the jump.  */
1593	     && GET_CODE (b->end) == JUMP_INSN
1594	     && condjump_p (b->end)
1595	     && merge_blocks (s, b, c))
1596	continue;
1597
1598      /* Don't get confused by the index shift caused by deleting blocks.  */
1599      i = b->index + 1;
1600    }
1601
1602  /* If we deleted an exception handler, we may have EH region begin/end
1603     blocks to remove as well. */
1604  if (deleted_handler)
1605    delete_eh_regions ();
1606}
1607
1608/* Find EH regions for which there is no longer a handler, and delete them.  */
1609
1610static void
1611delete_eh_regions ()
1612{
1613  rtx insn;
1614
1615  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1616    if (GET_CODE (insn) == NOTE)
1617      {
1618	if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1619	    (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1620	  {
1621	    int num = CODE_LABEL_NUMBER (insn);
1622	    /* A NULL handler indicates a region is no longer needed */
1623	    if (get_first_handler (num) == NULL)
1624	      {
1625		NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1626		NOTE_SOURCE_FILE (insn) = 0;
1627	      }
1628	  }
1629      }
1630}
1631
1632/* Return true if NOTE is not one of the ones that must be kept paired,
1633   so that we may simply delete them.  */
1634
1635static int
1636can_delete_note_p (note)
1637     rtx note;
1638{
1639  return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1640	  || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1641}
1642
1643/* Unlink a chain of insns between START and FINISH, leaving notes
1644   that must be paired.  */
1645
1646static void
1647delete_insn_chain (start, finish)
1648     rtx start, finish;
1649{
1650  /* Unchain the insns one by one.  It would be quicker to delete all
1651     of these with a single unchaining, rather than one at a time, but
1652     we need to keep the NOTE's.  */
1653
1654  rtx next;
1655
1656  while (1)
1657    {
1658      next = NEXT_INSN (start);
1659      if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1660	;
1661      else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1662	;
1663      else
1664	next = flow_delete_insn (start);
1665
1666      if (start == finish)
1667	break;
1668      start = next;
1669    }
1670}
1671
1672/* Delete the insns in a (non-live) block.  We physically delete every
1673   non-deleted-note insn, and update the flow graph appropriately.
1674
1675   Return nonzero if we deleted an exception handler.  */
1676
1677/* ??? Preserving all such notes strikes me as wrong.  It would be nice
1678   to post-process the stream to remove empty blocks, loops, ranges, etc.  */
1679
1680static int
1681delete_block (b)
1682     basic_block b;
1683{
1684  int deleted_handler = 0;
1685  rtx insn, end, tmp;
1686
1687  /* If the head of this block is a CODE_LABEL, then it might be the
1688     label for an exception handler which can't be reached.
1689
1690     We need to remove the label from the exception_handler_label list
1691     and remove the associated NOTE_EH_REGION_BEG and NOTE_EH_REGION_END
1692     notes.  */
1693
1694  insn = b->head;
1695
1696  if (GET_CODE (insn) == CODE_LABEL)
1697    {
1698      rtx x, *prev = &exception_handler_labels;
1699
1700      for (x = exception_handler_labels; x; x = XEXP (x, 1))
1701	{
1702	  if (XEXP (x, 0) == insn)
1703	    {
1704	      /* Found a match, splice this label out of the EH label list.  */
1705	      *prev = XEXP (x, 1);
1706	      XEXP (x, 1) = NULL_RTX;
1707	      XEXP (x, 0) = NULL_RTX;
1708
1709	      /* Remove the handler from all regions */
1710	      remove_handler (insn);
1711	      deleted_handler = 1;
1712	      break;
1713	    }
1714	  prev = &XEXP (x, 1);
1715	}
1716
1717      /* This label may be referenced by code solely for its value, or
1718	 referenced by static data, or something.  We have determined
1719	 that it is not reachable, but cannot delete the label itself.
1720	 Save code space and continue to delete the balance of the block,
1721	 along with properly updating the cfg.  */
1722      if (!can_delete_label_p (insn))
1723	{
1724	  /* If we've only got one of these, skip the whole deleting
1725	     insns thing.  */
1726	  if (insn == b->end)
1727	    goto no_delete_insns;
1728	  insn = NEXT_INSN (insn);
1729	}
1730    }
1731
1732  /* Include any jump table following the basic block.  */
1733  end = b->end;
1734  if (GET_CODE (end) == JUMP_INSN
1735      && (tmp = JUMP_LABEL (end)) != NULL_RTX
1736      && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1737      && GET_CODE (tmp) == JUMP_INSN
1738      && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1739	  || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1740    end = tmp;
1741
1742  /* Include any barrier that may follow the basic block.  */
1743  tmp = next_nonnote_insn (b->end);
1744  if (tmp && GET_CODE (tmp) == BARRIER)
1745    end = tmp;
1746
1747  delete_insn_chain (insn, end);
1748
1749no_delete_insns:
1750
1751  /* Remove the edges into and out of this block.  Note that there may
1752     indeed be edges in, if we are removing an unreachable loop.  */
1753  {
1754    edge e, next, *q;
1755
1756    for (e = b->pred; e ; e = next)
1757      {
1758	for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1759	  continue;
1760	*q = e->succ_next;
1761	next = e->pred_next;
1762	free (e);
1763      }
1764    for (e = b->succ; e ; e = next)
1765      {
1766	for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1767	  continue;
1768	*q = e->pred_next;
1769	next = e->succ_next;
1770	free (e);
1771      }
1772
1773    b->pred = NULL;
1774    b->succ = NULL;
1775  }
1776
1777  /* Remove the basic block from the array, and compact behind it.  */
1778  expunge_block (b);
1779
1780  return deleted_handler;
1781}
1782
1783/* Remove block B from the basic block array and compact behind it.  */
1784
1785static void
1786expunge_block (b)
1787     basic_block b;
1788{
1789  int i, n = n_basic_blocks;
1790
1791  for (i = b->index; i + 1 < n; ++i)
1792    {
1793      basic_block x = BASIC_BLOCK (i + 1);
1794      BASIC_BLOCK (i) = x;
1795      x->index = i;
1796    }
1797
1798  basic_block_info->num_elements--;
1799  n_basic_blocks--;
1800}
1801
1802/* Delete INSN by patching it out.  Return the next insn.  */
1803
1804static rtx
1805flow_delete_insn (insn)
1806     rtx insn;
1807{
1808  rtx prev = PREV_INSN (insn);
1809  rtx next = NEXT_INSN (insn);
1810  rtx note;
1811
1812  PREV_INSN (insn) = NULL_RTX;
1813  NEXT_INSN (insn) = NULL_RTX;
1814
1815  if (prev)
1816    NEXT_INSN (prev) = next;
1817  if (next)
1818    PREV_INSN (next) = prev;
1819  else
1820    set_last_insn (prev);
1821
1822  if (GET_CODE (insn) == CODE_LABEL)
1823    remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1824
1825  /* If deleting a jump, decrement the use count of the label.  Deleting
1826     the label itself should happen in the normal course of block merging.  */
1827  if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1828    LABEL_NUSES (JUMP_LABEL (insn))--;
1829
1830  /* Also if deleting an insn that references a label.  */
1831  else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
1832    LABEL_NUSES (XEXP (note, 0))--;
1833
1834  return next;
1835}
1836
1837/* True if a given label can be deleted.  */
1838
1839static int
1840can_delete_label_p (label)
1841     rtx label;
1842{
1843  rtx x;
1844
1845  if (LABEL_PRESERVE_P (label))
1846    return 0;
1847
1848  for (x = forced_labels; x ; x = XEXP (x, 1))
1849    if (label == XEXP (x, 0))
1850      return 0;
1851  for (x = label_value_list; x ; x = XEXP (x, 1))
1852    if (label == XEXP (x, 0))
1853      return 0;
1854  for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1855    if (label == XEXP (x, 0))
1856      return 0;
1857
1858  /* User declared labels must be preserved.  */
1859  if (LABEL_NAME (label) != 0)
1860    return 0;
1861
1862  return 1;
1863}
1864
1865/* Blocks A and B are to be merged into a single block.  The insns
1866   are already contiguous, hence `nomove'.  */
1867
1868static void
1869merge_blocks_nomove (a, b)
1870     basic_block a, b;
1871{
1872  edge e;
1873  rtx b_head, b_end, a_end;
1874  int b_empty = 0;
1875
1876  /* If there was a CODE_LABEL beginning B, delete it.  */
1877  b_head = b->head;
1878  b_end = b->end;
1879  if (GET_CODE (b_head) == CODE_LABEL)
1880    {
1881      /* Detect basic blocks with nothing but a label.  This can happen
1882	 in particular at the end of a function.  */
1883      if (b_head == b_end)
1884	b_empty = 1;
1885      b_head = flow_delete_insn (b_head);
1886    }
1887
1888  /* Delete the basic block note.  */
1889  if (GET_CODE (b_head) == NOTE
1890      && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
1891    {
1892      if (b_head == b_end)
1893	b_empty = 1;
1894      b_head = flow_delete_insn (b_head);
1895    }
1896
1897  /* If there was a jump out of A, delete it.  */
1898  a_end = a->end;
1899  if (GET_CODE (a_end) == JUMP_INSN)
1900    {
1901      rtx prev;
1902
1903      prev = prev_nonnote_insn (a_end);
1904      if (!prev)
1905	prev = a->head;
1906
1907#ifdef HAVE_cc0
1908      /* If this was a conditional jump, we need to also delete
1909	 the insn that set cc0.  */
1910
1911      if (prev && sets_cc0_p (prev))
1912	{
1913          rtx tmp = prev;
1914	  prev = prev_nonnote_insn (prev);
1915	  if (!prev)
1916	    prev = a->head;
1917	  flow_delete_insn (tmp);
1918	}
1919#endif
1920
1921      /* Note that a->head != a->end, since we should have at least a
1922	 bb note plus the jump, so prev != insn.  */
1923      flow_delete_insn (a_end);
1924      a_end = prev;
1925    }
1926
1927  /* By definition, there should only be one successor of A, and that is
1928     B.  Free that edge struct.  */
1929  free (a->succ);
1930
1931  /* Adjust the edges out of B for the new owner.  */
1932  for (e = b->succ; e ; e = e->succ_next)
1933    e->src = a;
1934  a->succ = b->succ;
1935
1936  /* Reassociate the insns of B with A.  */
1937  if (!b_empty)
1938    {
1939      BLOCK_FOR_INSN (b_head) = a;
1940      while (b_head != b_end)
1941	{
1942	  b_head = NEXT_INSN (b_head);
1943	  BLOCK_FOR_INSN (b_head) = a;
1944	}
1945      a_end = b_head;
1946    }
1947  a->end = a_end;
1948
1949  /* Compact the basic block array.  */
1950  expunge_block (b);
1951}
1952
1953/* Attempt to merge basic blocks that are potentially non-adjacent.
1954   Return true iff the attempt succeeded.  */
1955
1956static int
1957merge_blocks (e, b, c)
1958     edge e;
1959     basic_block b, c;
1960{
1961  /* If B has a fallthru edge to C, no need to move anything.  */
1962  if (!(e->flags & EDGE_FALLTHRU))
1963    {
1964      /* ??? From here on out we must make sure to not munge nesting
1965	 of exception regions and lexical blocks.  Need to think about
1966	 these cases before this gets implemented.  */
1967      return 0;
1968
1969      /* If C has an outgoing fallthru, and B does not have an incoming
1970	 fallthru, move B before C.  The later clause is somewhat arbitrary,
1971	 but avoids modifying blocks other than the two we've been given.  */
1972
1973      /* Otherwise, move C after B.  If C had a fallthru, which doesn't
1974	 happen to be the physical successor to B, insert an unconditional
1975	 branch.  If C already ended with a conditional branch, the new
1976	 jump must go in a new basic block D.  */
1977    }
1978
1979  /* If a label still appears somewhere and we cannot delete the label,
1980     then we cannot merge the blocks.  The edge was tidied already.  */
1981  {
1982    rtx insn, stop = NEXT_INSN (c->head);
1983    for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
1984      if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
1985	return 0;
1986  }
1987
1988  merge_blocks_nomove (b, c);
1989  return 1;
1990}
1991
1992/* The given edge should potentially a fallthru edge.  If that is in
1993   fact true, delete the unconditional jump and barriers that are in
1994   the way.  */
1995
1996static void
1997tidy_fallthru_edge (e, b, c)
1998     edge e;
1999     basic_block b, c;
2000{
2001  rtx q;
2002
2003  /* ??? In a late-running flow pass, other folks may have deleted basic
2004     blocks by nopping out blocks, leaving multiple BARRIERs between here
2005     and the target label. They ought to be chastized and fixed.
2006
2007     We can also wind up with a sequence of undeletable labels between
2008     one block and the next.
2009
2010     So search through a sequence of barriers, labels, and notes for
2011     the head of block C and assert that we really do fall through.  */
2012
2013  if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2014    return;
2015
2016  /* Remove what will soon cease being the jump insn from the source block.
2017     If block B consisted only of this single jump, turn it into a deleted
2018     note.  */
2019  q = b->end;
2020  if (GET_CODE (q) == JUMP_INSN)
2021    {
2022#ifdef HAVE_cc0
2023      /* If this was a conditional jump, we need to also delete
2024	 the insn that set cc0.  */
2025      if (! simplejump_p (q) && condjump_p (q))
2026	q = PREV_INSN (q);
2027#endif
2028
2029      if (b->head == q)
2030	{
2031	  PUT_CODE (q, NOTE);
2032	  NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2033	  NOTE_SOURCE_FILE (q) = 0;
2034	}
2035      else
2036	b->end = q = PREV_INSN (q);
2037    }
2038
2039  /* Selectively unlink the sequence.  */
2040  if (q != PREV_INSN (c->head))
2041    delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2042
2043  e->flags |= EDGE_FALLTHRU;
2044}
2045
2046/* Discover and record the loop depth at the head of each basic block.  */
2047
2048static void
2049calculate_loop_depth (insns)
2050     rtx insns;
2051{
2052  basic_block bb;
2053  rtx insn;
2054  int i = 0, depth = 1;
2055
2056  bb = BASIC_BLOCK (i);
2057  for (insn = insns; insn ; insn = NEXT_INSN (insn))
2058    {
2059      if (insn == bb->head)
2060	{
2061	  bb->loop_depth = depth;
2062	  if (++i >= n_basic_blocks)
2063	    break;
2064	  bb = BASIC_BLOCK (i);
2065	}
2066
2067      if (GET_CODE (insn) == NOTE)
2068	{
2069	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2070	    depth++;
2071	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2072	    depth--;
2073
2074	  /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2075	  if (depth == 0)
2076	    abort ();
2077	}
2078    }
2079}
2080
2081/* Perform data flow analysis.
2082   F is the first insn of the function and NREGS the number of register numbers
2083   in use.  */
2084
2085void
2086life_analysis (f, nregs, file, remove_dead_code)
2087     rtx f;
2088     int nregs;
2089     FILE *file;
2090     int remove_dead_code;
2091{
2092#ifdef ELIMINABLE_REGS
2093  register size_t i;
2094  static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2095#endif
2096
2097  /* Record which registers will be eliminated.  We use this in
2098     mark_used_regs.  */
2099
2100  CLEAR_HARD_REG_SET (elim_reg_set);
2101
2102#ifdef ELIMINABLE_REGS
2103  for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2104    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2105#else
2106  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2107#endif
2108
2109  /* Allocate a bitmap to be filled in by record_volatile_insns.  */
2110  uid_volatile = BITMAP_ALLOCA ();
2111
2112  /* We want alias analysis information for local dead store elimination.  */
2113  init_alias_analysis ();
2114  life_analysis_1 (f, nregs, remove_dead_code);
2115  end_alias_analysis ();
2116
2117  if (file)
2118    dump_flow_info (file);
2119
2120  BITMAP_FREE (uid_volatile);
2121  free_basic_block_vars (1);
2122}
2123
2124/* Free the variables allocated by find_basic_blocks.
2125
2126   KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed.  */
2127
2128void
2129free_basic_block_vars (keep_head_end_p)
2130     int keep_head_end_p;
2131{
2132  if (basic_block_for_insn)
2133    {
2134      VARRAY_FREE (basic_block_for_insn);
2135      basic_block_for_insn = NULL;
2136    }
2137
2138  if (! keep_head_end_p)
2139    {
2140      clear_edges ();
2141      VARRAY_FREE (basic_block_info);
2142      n_basic_blocks = 0;
2143
2144      ENTRY_BLOCK_PTR->aux = NULL;
2145      ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2146      EXIT_BLOCK_PTR->aux = NULL;
2147      EXIT_BLOCK_PTR->global_live_at_start = NULL;
2148    }
2149}
2150
2151/* Return nonzero if the destination of SET equals the source.  */
2152static int
2153set_noop_p (set)
2154     rtx set;
2155{
2156  rtx src = SET_SRC (set);
2157  rtx dst = SET_DEST (set);
2158  if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2159      && REGNO (src) == REGNO (dst))
2160    return 1;
2161  if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2162      || SUBREG_WORD (src) != SUBREG_WORD (dst))
2163    return 0;
2164  src = SUBREG_REG (src);
2165  dst = SUBREG_REG (dst);
2166  if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2167      && REGNO (src) == REGNO (dst))
2168    return 1;
2169  return 0;
2170}
2171
2172/* Return nonzero if an insn consists only of SETs, each of which only sets a
2173   value to itself.  */
2174static int
2175noop_move_p (insn)
2176     rtx insn;
2177{
2178  rtx pat = PATTERN (insn);
2179
2180  /* Insns carrying these notes are useful later on.  */
2181  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2182    return 0;
2183
2184  if (GET_CODE (pat) == SET && set_noop_p (pat))
2185    return 1;
2186
2187  if (GET_CODE (pat) == PARALLEL)
2188    {
2189      int i;
2190      /* If nothing but SETs of registers to themselves,
2191	 this insn can also be deleted.  */
2192      for (i = 0; i < XVECLEN (pat, 0); i++)
2193	{
2194	  rtx tem = XVECEXP (pat, 0, i);
2195
2196	  if (GET_CODE (tem) == USE
2197	      || GET_CODE (tem) == CLOBBER)
2198	    continue;
2199
2200	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2201	    return 0;
2202	}
2203
2204      return 1;
2205    }
2206  return 0;
2207}
2208
2209static void
2210notice_stack_pointer_modification (x, pat)
2211     rtx x;
2212     rtx pat ATTRIBUTE_UNUSED;
2213{
2214  if (x == stack_pointer_rtx
2215      /* The stack pointer is only modified indirectly as the result
2216	 of a push until later in flow.  See the comments in rtl.texi
2217	 regarding Embedded Side-Effects on Addresses.  */
2218      || (GET_CODE (x) == MEM
2219	  && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2220	      || GET_CODE (XEXP (x, 0)) == PRE_INC
2221	      || GET_CODE (XEXP (x, 0)) == POST_DEC
2222	      || GET_CODE (XEXP (x, 0)) == POST_INC)
2223	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2224    current_function_sp_is_unchanging = 0;
2225}
2226
2227/* Record which insns refer to any volatile memory
2228   or for any reason can't be deleted just because they are dead stores.
2229   Also, delete any insns that copy a register to itself.
2230   And see if the stack pointer is modified.  */
2231static void
2232record_volatile_insns (f)
2233     rtx f;
2234{
2235  rtx insn;
2236  for (insn = f; insn; insn = NEXT_INSN (insn))
2237    {
2238      enum rtx_code code1 = GET_CODE (insn);
2239      if (code1 == CALL_INSN)
2240	SET_INSN_VOLATILE (insn);
2241      else if (code1 == INSN || code1 == JUMP_INSN)
2242	{
2243	  if (GET_CODE (PATTERN (insn)) != USE
2244	      && volatile_refs_p (PATTERN (insn)))
2245	    SET_INSN_VOLATILE (insn);
2246
2247	  /* A SET that makes space on the stack cannot be dead.
2248	     (Such SETs occur only for allocating variable-size data,
2249	     so they will always have a PLUS or MINUS according to the
2250	     direction of stack growth.)
2251	     Even if this function never uses this stack pointer value,
2252	     signal handlers do!  */
2253	  else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2254		   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2255#ifdef STACK_GROWS_DOWNWARD
2256		   && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2257#else
2258		   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2259#endif
2260		   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2261	    SET_INSN_VOLATILE (insn);
2262
2263	  /* Delete (in effect) any obvious no-op moves.  */
2264	  else if (noop_move_p (insn))
2265	    {
2266	      PUT_CODE (insn, NOTE);
2267	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2268	      NOTE_SOURCE_FILE (insn) = 0;
2269	    }
2270	}
2271
2272      /* Check if insn modifies the stack pointer.  */
2273      if ( current_function_sp_is_unchanging
2274	   && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2275	note_stores (PATTERN (insn), notice_stack_pointer_modification);
2276    }
2277}
2278
2279/* Mark those regs which are needed at the end of the function as live
2280   at the end of the last basic block.  */
2281static void
2282mark_regs_live_at_end (set)
2283     regset set;
2284{
2285  int i;
2286
2287  /* If exiting needs the right stack value, consider the stack pointer
2288     live at the end of the function.  */
2289  if (! EXIT_IGNORE_STACK
2290      || (! FRAME_POINTER_REQUIRED
2291	  && ! current_function_calls_alloca
2292	  && flag_omit_frame_pointer)
2293      || current_function_sp_is_unchanging)
2294    {
2295      SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2296    }
2297
2298  /* Mark the frame pointer if needed at the end of the function.  If
2299     we end up eliminating it, it will be removed from the live list
2300     of each basic block by reload.  */
2301
2302  if (! reload_completed || frame_pointer_needed)
2303    {
2304      SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2305#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2306      /* If they are different, also mark the hard frame pointer as live */
2307      SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2308#endif
2309    }
2310
2311  /* Mark all global registers, and all registers used by the epilogue
2312     as being live at the end of the function since they may be
2313     referenced by our caller.  */
2314  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2315    if (global_regs[i]
2316#ifdef EPILOGUE_USES
2317	|| EPILOGUE_USES (i)
2318#endif
2319	)
2320      SET_REGNO_REG_SET (set, i);
2321
2322  /* ??? Mark function return value here rather than as uses.  */
2323}
2324
2325/* Determine which registers are live at the start of each
2326   basic block of the function whose first insn is F.
2327   NREGS is the number of registers used in F.
2328   We allocate the vector basic_block_live_at_start
2329   and the regsets that it points to, and fill them with the data.
2330   regset_size and regset_bytes are also set here.  */
2331
2332static void
2333life_analysis_1 (f, nregs, remove_dead_code)
2334     rtx f;
2335     int nregs;
2336     int remove_dead_code;
2337{
2338  int first_pass;
2339  int changed;
2340  register int i;
2341  char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2342  regset *new_live_at_end;
2343
2344  struct obstack flow_obstack;
2345
2346  gcc_obstack_init (&flow_obstack);
2347
2348  max_regno = nregs;
2349
2350  /* Allocate and zero out many data structures
2351     that will record the data from lifetime analysis.  */
2352
2353  allocate_reg_life_data ();
2354  allocate_bb_life_data ();
2355
2356  reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2357  memset (reg_next_use, 0, nregs * sizeof (rtx));
2358
2359  /* Set up regset-vectors used internally within this function.
2360     Their meanings are documented above, with their declarations.  */
2361
2362  new_live_at_end = (regset *) alloca ((n_basic_blocks + 1) * sizeof (regset));
2363  init_regset_vector (new_live_at_end, n_basic_blocks + 1, &flow_obstack);
2364
2365  /* Stick these vectors into the AUX field of the basic block, so that
2366     we don't have to keep going through the index.  */
2367
2368  for (i = 0; i < n_basic_blocks; ++i)
2369    BASIC_BLOCK (i)->aux = new_live_at_end[i];
2370  ENTRY_BLOCK_PTR->aux = new_live_at_end[i];
2371
2372  /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2373     This will be cleared by record_volatile_insns if it encounters an insn
2374     which modifies the stack pointer.  */
2375  current_function_sp_is_unchanging = !current_function_calls_alloca;
2376
2377  record_volatile_insns (f);
2378
2379  if (n_basic_blocks > 0)
2380    {
2381      regset theend;
2382      register edge e;
2383
2384      theend = EXIT_BLOCK_PTR->global_live_at_start;
2385      mark_regs_live_at_end (theend);
2386
2387      /* Propogate this exit data to each of EXIT's predecessors.  */
2388      for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2389	{
2390	  COPY_REG_SET (e->src->global_live_at_end, theend);
2391	  COPY_REG_SET ((regset) e->src->aux, theend);
2392	}
2393    }
2394
2395  /* The post-reload life analysis have (on a global basis) the same registers
2396     live as was computed by reload itself.
2397
2398     Otherwise elimination offsets and such may be incorrect.
2399
2400     Reload will make some registers as live even though they do not appear
2401     in the rtl.  */
2402  if (reload_completed)
2403    memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2404  memset (regs_ever_live, 0, sizeof regs_ever_live);
2405
2406  /* Propagate life info through the basic blocks
2407     around the graph of basic blocks.
2408
2409     This is a relaxation process: each time a new register
2410     is live at the end of the basic block, we must scan the block
2411     to determine which registers are, as a consequence, live at the beginning
2412     of that block.  These registers must then be marked live at the ends
2413     of all the blocks that can transfer control to that block.
2414     The process continues until it reaches a fixed point.  */
2415
2416  first_pass = 1;
2417  changed = 1;
2418  while (changed)
2419    {
2420      changed = 0;
2421      for (i = n_basic_blocks - 1; i >= 0; i--)
2422	{
2423	  basic_block bb = BASIC_BLOCK (i);
2424	  int consider = first_pass;
2425	  int must_rescan = first_pass;
2426	  register int j;
2427
2428	  if (!first_pass)
2429	    {
2430	      /* Set CONSIDER if this block needs thinking about at all
2431		 (that is, if the regs live now at the end of it
2432		 are not the same as were live at the end of it when
2433		 we last thought about it).
2434		 Set must_rescan if it needs to be thought about
2435		 instruction by instruction (that is, if any additional
2436		 reg that is live at the end now but was not live there before
2437		 is one of the significant regs of this basic block).  */
2438
2439	      EXECUTE_IF_AND_COMPL_IN_REG_SET
2440		((regset) bb->aux, bb->global_live_at_end, 0, j,
2441		 {
2442		   consider = 1;
2443		   if (REGNO_REG_SET_P (bb->local_set, j))
2444		     {
2445		       must_rescan = 1;
2446		       goto done;
2447		     }
2448		 });
2449	    done:
2450	      if (! consider)
2451		continue;
2452	    }
2453
2454	  /* The live_at_start of this block may be changing,
2455	     so another pass will be required after this one.  */
2456	  changed = 1;
2457
2458	  if (! must_rescan)
2459	    {
2460	      /* No complete rescan needed;
2461		 just record those variables newly known live at end
2462		 as live at start as well.  */
2463	      IOR_AND_COMPL_REG_SET (bb->global_live_at_start,
2464				     (regset) bb->aux,
2465				     bb->global_live_at_end);
2466
2467	      IOR_AND_COMPL_REG_SET (bb->global_live_at_end,
2468				     (regset) bb->aux,
2469				     bb->global_live_at_end);
2470	    }
2471	  else
2472	    {
2473	      /* Update the basic_block_live_at_start
2474		 by propagation backwards through the block.  */
2475	      COPY_REG_SET (bb->global_live_at_end, (regset) bb->aux);
2476	      COPY_REG_SET (bb->global_live_at_start,
2477			    bb->global_live_at_end);
2478	      propagate_block (bb->global_live_at_start,
2479			       bb->head, bb->end, 0,
2480			       first_pass ? bb->local_set : (regset) 0,
2481			       i, remove_dead_code);
2482	    }
2483
2484	  /* Update the new_live_at_end's of the block's predecessors.  */
2485	  {
2486	    register edge e;
2487
2488	    for (e = bb->pred; e ; e = e->pred_next)
2489	      IOR_REG_SET ((regset) e->src->aux, bb->global_live_at_start);
2490	  }
2491
2492#ifdef USE_C_ALLOCA
2493	  alloca (0);
2494#endif
2495	}
2496      first_pass = 0;
2497    }
2498
2499  /* The only pseudos that are live at the beginning of the function are
2500     those that were not set anywhere in the function.  local-alloc doesn't
2501     know how to handle these correctly, so mark them as not local to any
2502     one basic block.  */
2503
2504  if (n_basic_blocks > 0)
2505    EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start,
2506			       FIRST_PSEUDO_REGISTER, i,
2507			       {
2508				 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2509			       });
2510
2511  /* Now the life information is accurate.  Make one more pass over each
2512     basic block to delete dead stores, create autoincrement addressing
2513     and record how many times each register is used, is set, or dies.  */
2514
2515  for (i = 0; i < n_basic_blocks; i++)
2516    {
2517      basic_block bb = BASIC_BLOCK (i);
2518
2519      /* We start with global_live_at_end to determine which stores are
2520	 dead.  This process is destructive, and we wish to preserve the
2521	 contents of global_live_at_end for posterity.  Fortunately,
2522	 new_live_at_end, due to the way we converged on a solution,
2523	 contains a duplicate of global_live_at_end that we can kill.  */
2524      propagate_block ((regset) bb->aux, bb->head, bb->end, 1, (regset) 0, i, remove_dead_code);
2525
2526#ifdef USE_C_ALLOCA
2527      alloca (0);
2528#endif
2529    }
2530
2531  /* We have a problem with any pseudoreg that lives across the setjmp.
2532     ANSI says that if a user variable does not change in value between
2533     the setjmp and the longjmp, then the longjmp preserves it.  This
2534     includes longjmp from a place where the pseudo appears dead.
2535     (In principle, the value still exists if it is in scope.)
2536     If the pseudo goes in a hard reg, some other value may occupy
2537     that hard reg where this pseudo is dead, thus clobbering the pseudo.
2538     Conclusion: such a pseudo must not go in a hard reg.  */
2539  EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2540			     FIRST_PSEUDO_REGISTER, i,
2541			     {
2542			       if (regno_reg_rtx[i] != 0)
2543				 {
2544				   REG_LIVE_LENGTH (i) = -1;
2545				   REG_BASIC_BLOCK (i) = -1;
2546				 }
2547			     });
2548
2549  /* Restore regs_ever_live that was provided by reload.  */
2550  if (reload_completed)
2551    memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2552
2553  free_regset_vector (new_live_at_end, n_basic_blocks);
2554  obstack_free (&flow_obstack, NULL_PTR);
2555
2556  for (i = 0; i < n_basic_blocks; ++i)
2557    BASIC_BLOCK (i)->aux = NULL;
2558  ENTRY_BLOCK_PTR->aux = NULL;
2559}
2560
2561/* Subroutines of life analysis.  */
2562
2563/* Allocate the permanent data structures that represent the results
2564   of life analysis.  Not static since used also for stupid life analysis.  */
2565
2566void
2567allocate_bb_life_data ()
2568{
2569  register int i;
2570
2571  for (i = 0; i < n_basic_blocks; i++)
2572    {
2573      basic_block bb = BASIC_BLOCK (i);
2574
2575      bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
2576      bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
2577      bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
2578    }
2579
2580  ENTRY_BLOCK_PTR->global_live_at_end
2581    = OBSTACK_ALLOC_REG_SET (function_obstack);
2582  EXIT_BLOCK_PTR->global_live_at_start
2583    = OBSTACK_ALLOC_REG_SET (function_obstack);
2584
2585  regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
2586}
2587
2588void
2589allocate_reg_life_data ()
2590{
2591  int i;
2592
2593  /* Recalculate the register space, in case it has grown.  Old style
2594     vector oriented regsets would set regset_{size,bytes} here also.  */
2595  allocate_reg_info (max_regno, FALSE, FALSE);
2596
2597  /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2598     information, explicitly reset it here.  The allocation should have
2599     already happened on the previous reg_scan pass.  Make sure in case
2600     some more registers were allocated.  */
2601  for (i = 0; i < max_regno; i++)
2602    REG_N_SETS (i) = 0;
2603}
2604
2605/* Make each element of VECTOR point at a regset.  The vector has
2606   NELTS elements, and space is allocated from the ALLOC_OBSTACK
2607   obstack.  */
2608
2609static void
2610init_regset_vector (vector, nelts, alloc_obstack)
2611     regset *vector;
2612     int nelts;
2613     struct obstack *alloc_obstack;
2614{
2615  register int i;
2616
2617  for (i = 0; i < nelts; i++)
2618    {
2619      vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
2620      CLEAR_REG_SET (vector[i]);
2621    }
2622}
2623
2624/* Release any additional space allocated for each element of VECTOR point
2625   other than the regset header itself.  The vector has NELTS elements.  */
2626
2627void
2628free_regset_vector (vector, nelts)
2629     regset *vector;
2630     int nelts;
2631{
2632  register int i;
2633
2634  for (i = 0; i < nelts; i++)
2635    FREE_REG_SET (vector[i]);
2636}
2637
2638/* Compute the registers live at the beginning of a basic block
2639   from those live at the end.
2640
2641   When called, OLD contains those live at the end.
2642   On return, it contains those live at the beginning.
2643   FIRST and LAST are the first and last insns of the basic block.
2644
2645   FINAL is nonzero if we are doing the final pass which is not
2646   for computing the life info (since that has already been done)
2647   but for acting on it.  On this pass, we delete dead stores,
2648   set up the logical links and dead-variables lists of instructions,
2649   and merge instructions for autoincrement and autodecrement addresses.
2650
2651   SIGNIFICANT is nonzero only the first time for each basic block.
2652   If it is nonzero, it points to a regset in which we store
2653   a 1 for each register that is set within the block.
2654
2655   BNUM is the number of the basic block.  */
2656
2657static void
2658propagate_block (old, first, last, final, significant, bnum, remove_dead_code)
2659     register regset old;
2660     rtx first;
2661     rtx last;
2662     int final;
2663     regset significant;
2664     int bnum;
2665     int remove_dead_code;
2666{
2667  register rtx insn;
2668  rtx prev;
2669  regset live;
2670  regset dead;
2671
2672  /* Find the loop depth for this block.  Ignore loop level changes in the
2673     middle of the basic block -- for register allocation purposes, the
2674     important uses will be in the blocks wholely contained within the loop
2675     not in the loop pre-header or post-trailer.  */
2676  loop_depth = BASIC_BLOCK (bnum)->loop_depth;
2677
2678  dead = ALLOCA_REG_SET ();
2679  live = ALLOCA_REG_SET ();
2680
2681  cc0_live = 0;
2682  mem_set_list = NULL_RTX;
2683
2684  if (final)
2685    {
2686      register int i;
2687
2688      /* Process the regs live at the end of the block.
2689	 Mark them as not local to any one basic block. */
2690      EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2691				 {
2692				   REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2693				 });
2694    }
2695
2696  /* Scan the block an insn at a time from end to beginning.  */
2697
2698  for (insn = last; ; insn = prev)
2699    {
2700      prev = PREV_INSN (insn);
2701
2702      if (GET_CODE (insn) == NOTE)
2703	{
2704	  /* If this is a call to `setjmp' et al,
2705	     warn if any non-volatile datum is live.  */
2706
2707	  if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2708	    IOR_REG_SET (regs_live_at_setjmp, old);
2709	}
2710
2711      /* Update the life-status of regs for this insn.
2712	 First DEAD gets which regs are set in this insn
2713	 then LIVE gets which regs are used in this insn.
2714	 Then the regs live before the insn
2715	 are those live after, with DEAD regs turned off,
2716	 and then LIVE regs turned on.  */
2717
2718      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2719	{
2720	  register int i;
2721	  rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
2722	  int insn_is_dead = 0;
2723	  int libcall_is_dead = 0;
2724
2725	  if (remove_dead_code)
2726	    {
2727	      insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
2728	                      /* Don't delete something that refers to volatile storage!  */
2729	                      && ! INSN_VOLATILE (insn));
2730	      libcall_is_dead = (insn_is_dead && note != 0
2731	                         && libcall_dead_p (PATTERN (insn), old, note, insn));
2732	    }
2733
2734	  /* If an instruction consists of just dead store(s) on final pass,
2735	     "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2736	     We could really delete it with delete_insn, but that
2737	     can cause trouble for first or last insn in a basic block.  */
2738	  if (final && insn_is_dead)
2739	    {
2740	      rtx inote;
2741	      /* If the insn referred to a label, note that the label is
2742		 now less used.  */
2743	      for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
2744		{
2745		  if (REG_NOTE_KIND (inote) == REG_LABEL)
2746		    {
2747		      int n_forced;
2748		      rtx label = XEXP (inote, 0);
2749		      rtx next;
2750		      LABEL_NUSES (label)--;
2751
2752		      /* The label may be forced if it has been put in the
2753		         constant pool.  We can't delete it in this case, but
2754		         we still must discard a jump table following it.  */
2755		      n_forced = 0;
2756		      if (LABEL_PRESERVE_P (label))
2757			n_forced++;
2758
2759		      /* If this label was attached to an ADDR_VEC, it's
2760			 safe to delete the ADDR_VEC.  In fact, it's pretty much
2761			 mandatory to delete it, because the ADDR_VEC may
2762			 be referencing labels that no longer exist.  */
2763		      if (LABEL_NUSES (label) == n_forced
2764			  && (next = next_nonnote_insn (label)) != NULL
2765			  && GET_CODE (next) == JUMP_INSN
2766			  && (GET_CODE (PATTERN (next)) == ADDR_VEC
2767			      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
2768			{
2769			  rtx pat = PATTERN (next);
2770			  int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2771			  int len = XVECLEN (pat, diff_vec_p);
2772			  int i;
2773			  for (i = 0; i < len; i++)
2774			    LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2775
2776			  flow_delete_insn (next);
2777			}
2778		    }
2779		}
2780
2781	      PUT_CODE (insn, NOTE);
2782	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2783	      NOTE_SOURCE_FILE (insn) = 0;
2784
2785	      /* CC0 is now known to be dead.  Either this insn used it,
2786		 in which case it doesn't anymore, or clobbered it,
2787		 so the next insn can't use it.  */
2788	      cc0_live = 0;
2789
2790	      /* If this insn is copying the return value from a library call,
2791		 delete the entire library call.  */
2792	      if (libcall_is_dead)
2793		{
2794		  rtx first = XEXP (note, 0);
2795		  rtx p = insn;
2796		  while (INSN_DELETED_P (first))
2797		    first = NEXT_INSN (first);
2798		  while (p != first)
2799		    {
2800		      p = PREV_INSN (p);
2801		      PUT_CODE (p, NOTE);
2802		      NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
2803		      NOTE_SOURCE_FILE (p) = 0;
2804		    }
2805		}
2806	      goto flushed;
2807	    }
2808
2809	  CLEAR_REG_SET (dead);
2810	  CLEAR_REG_SET (live);
2811
2812	  /* See if this is an increment or decrement that can be
2813	     merged into a following memory address.  */
2814#ifdef AUTO_INC_DEC
2815	  {
2816	    register rtx x = single_set (insn);
2817
2818	    /* Does this instruction increment or decrement a register?  */
2819	    if (!reload_completed
2820		&& final && x != 0
2821		&& GET_CODE (SET_DEST (x)) == REG
2822		&& (GET_CODE (SET_SRC (x)) == PLUS
2823		    || GET_CODE (SET_SRC (x)) == MINUS)
2824		&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
2825		&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2826		/* Ok, look for a following memory ref we can combine with.
2827		   If one is found, change the memory ref to a PRE_INC
2828		   or PRE_DEC, cancel this insn, and return 1.
2829		   Return 0 if nothing has been done.  */
2830		&& try_pre_increment_1 (insn))
2831	      goto flushed;
2832	  }
2833#endif /* AUTO_INC_DEC */
2834
2835	  /* If this is not the final pass, and this insn is copying the
2836	     value of a library call and it's dead, don't scan the
2837	     insns that perform the library call, so that the call's
2838	     arguments are not marked live.  */
2839	  if (libcall_is_dead)
2840	    {
2841	      /* Mark the dest reg as `significant'.  */
2842	      mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
2843
2844	      insn = XEXP (note, 0);
2845	      prev = PREV_INSN (insn);
2846	    }
2847	  else if (GET_CODE (PATTERN (insn)) == SET
2848		   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2849		   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2850		   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
2851		   && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
2852	    /* We have an insn to pop a constant amount off the stack.
2853	       (Such insns use PLUS regardless of the direction of the stack,
2854	       and any insn to adjust the stack by a constant is always a pop.)
2855	       These insns, if not dead stores, have no effect on life.  */
2856	    ;
2857	  else
2858	    {
2859	      /* Any regs live at the time of a call instruction
2860		 must not go in a register clobbered by calls.
2861		 Find all regs now live and record this for them.  */
2862
2863	      if (GET_CODE (insn) == CALL_INSN && final)
2864		EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2865					   {
2866					     REG_N_CALLS_CROSSED (i)++;
2867					   });
2868
2869	      /* LIVE gets the regs used in INSN;
2870		 DEAD gets those set by it.  Dead insns don't make anything
2871		 live.  */
2872
2873	      mark_set_regs (old, dead, PATTERN (insn),
2874			     final ? insn : NULL_RTX, significant);
2875
2876	      /* If an insn doesn't use CC0, it becomes dead since we
2877		 assume that every insn clobbers it.  So show it dead here;
2878		 mark_used_regs will set it live if it is referenced.  */
2879	      cc0_live = 0;
2880
2881	      if (! insn_is_dead)
2882		mark_used_regs (old, live, PATTERN (insn), final, insn);
2883
2884	      /* Sometimes we may have inserted something before INSN (such as
2885		 a move) when we make an auto-inc.  So ensure we will scan
2886		 those insns.  */
2887#ifdef AUTO_INC_DEC
2888	      prev = PREV_INSN (insn);
2889#endif
2890
2891	      if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
2892		{
2893		  register int i;
2894
2895		  rtx note;
2896
2897	          for (note = CALL_INSN_FUNCTION_USAGE (insn);
2898		       note;
2899		       note = XEXP (note, 1))
2900		    if (GET_CODE (XEXP (note, 0)) == USE)
2901		      mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
2902				      final, insn);
2903
2904		  /* Each call clobbers all call-clobbered regs that are not
2905		     global or fixed.  Note that the function-value reg is a
2906		     call-clobbered reg, and mark_set_regs has already had
2907		     a chance to handle it.  */
2908
2909		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2910		    if (call_used_regs[i] && ! global_regs[i]
2911			&& ! fixed_regs[i])
2912		      SET_REGNO_REG_SET (dead, i);
2913
2914		  /* The stack ptr is used (honorarily) by a CALL insn.  */
2915		  SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2916
2917		  /* Calls may also reference any of the global registers,
2918		     so they are made live.  */
2919		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2920		    if (global_regs[i])
2921		      mark_used_regs (old, live,
2922				      gen_rtx_REG (reg_raw_mode[i], i),
2923				      final, insn);
2924
2925		  /* Calls also clobber memory.  */
2926		  mem_set_list = NULL_RTX;
2927		}
2928
2929	      /* Update OLD for the registers used or set.  */
2930	      AND_COMPL_REG_SET (old, dead);
2931	      IOR_REG_SET (old, live);
2932
2933	    }
2934
2935	  /* On final pass, update counts of how many insns each reg is live
2936	     at.  */
2937	  if (final)
2938	    EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2939				       { REG_LIVE_LENGTH (i)++; });
2940	}
2941    flushed: ;
2942      if (insn == first)
2943	break;
2944    }
2945
2946  FREE_REG_SET (dead);
2947  FREE_REG_SET (live);
2948}
2949
2950/* Return 1 if X (the body of an insn, or part of it) is just dead stores
2951   (SET expressions whose destinations are registers dead after the insn).
2952   NEEDED is the regset that says which regs are alive after the insn.
2953
2954   Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2955
2956   If X is the entire body of an insn, NOTES contains the reg notes
2957   pertaining to the insn.  */
2958
2959static int
2960insn_dead_p (x, needed, call_ok, notes)
2961     rtx x;
2962     regset needed;
2963     int call_ok;
2964     rtx notes ATTRIBUTE_UNUSED;
2965{
2966  enum rtx_code code = GET_CODE (x);
2967
2968#ifdef AUTO_INC_DEC
2969  /* If flow is invoked after reload, we must take existing AUTO_INC
2970     expresions into account.  */
2971  if (reload_completed)
2972    {
2973      for ( ; notes; notes = XEXP (notes, 1))
2974	{
2975	  if (REG_NOTE_KIND (notes) == REG_INC)
2976	    {
2977	      int regno = REGNO (XEXP (notes, 0));
2978
2979	      /* Don't delete insns to set global regs.  */
2980	      if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2981		  || REGNO_REG_SET_P (needed, regno))
2982		return 0;
2983	    }
2984	}
2985    }
2986#endif
2987
2988  /* If setting something that's a reg or part of one,
2989     see if that register's altered value will be live.  */
2990
2991  if (code == SET)
2992    {
2993      rtx r = SET_DEST (x);
2994
2995      /* A SET that is a subroutine call cannot be dead.  */
2996      if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
2997	return 0;
2998
2999#ifdef HAVE_cc0
3000      if (GET_CODE (r) == CC0)
3001	return ! cc0_live;
3002#endif
3003
3004      if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3005	{
3006	  rtx temp;
3007	  /* Walk the set of memory locations we are currently tracking
3008	     and see if one is an identical match to this memory location.
3009	     If so, this memory write is dead (remember, we're walking
3010	     backwards from the end of the block to the start.  */
3011	  temp = mem_set_list;
3012	  while (temp)
3013	    {
3014	      if (rtx_equal_p (XEXP (temp, 0), r))
3015		return 1;
3016	      temp = XEXP (temp, 1);
3017	    }
3018	}
3019
3020      while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3021	     || GET_CODE (r) == ZERO_EXTRACT)
3022	r = SUBREG_REG (r);
3023
3024      if (GET_CODE (r) == REG)
3025	{
3026	  int regno = REGNO (r);
3027
3028	  /* Don't delete insns to set global regs.  */
3029	  if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3030	      /* Make sure insns to set frame pointer aren't deleted.  */
3031	      || (regno == FRAME_POINTER_REGNUM
3032		  && (! reload_completed || frame_pointer_needed))
3033#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3034	      || (regno == HARD_FRAME_POINTER_REGNUM
3035		  && (! reload_completed || frame_pointer_needed))
3036#endif
3037#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3038	      /* Make sure insns to set arg pointer are never deleted
3039		 (if the arg pointer isn't fixed, there will be a USE for
3040		 it, so we can treat it normally).  */
3041	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3042#endif
3043	      || REGNO_REG_SET_P (needed, regno))
3044	    return 0;
3045
3046	  /* If this is a hard register, verify that subsequent words are
3047	     not needed.  */
3048	  if (regno < FIRST_PSEUDO_REGISTER)
3049	    {
3050	      int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3051
3052	      while (--n > 0)
3053		if (REGNO_REG_SET_P (needed, regno+n))
3054		  return 0;
3055	    }
3056
3057	  return 1;
3058	}
3059    }
3060
3061  /* If performing several activities,
3062     insn is dead if each activity is individually dead.
3063     Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3064     that's inside a PARALLEL doesn't make the insn worth keeping.  */
3065  else if (code == PARALLEL)
3066    {
3067      int i = XVECLEN (x, 0);
3068
3069      for (i--; i >= 0; i--)
3070	if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3071	    && GET_CODE (XVECEXP (x, 0, i)) != USE
3072	    && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3073	  return 0;
3074
3075      return 1;
3076    }
3077
3078  /* A CLOBBER of a pseudo-register that is dead serves no purpose.  That
3079     is not necessarily true for hard registers.  */
3080  else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3081	   && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3082	   && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3083    return 1;
3084
3085  /* We do not check other CLOBBER or USE here.  An insn consisting of just
3086     a CLOBBER or just a USE should not be deleted.  */
3087  return 0;
3088}
3089
3090/* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3091   return 1 if the entire library call is dead.
3092   This is true if X copies a register (hard or pseudo)
3093   and if the hard return  reg of the call insn is dead.
3094   (The caller should have tested the destination of X already for death.)
3095
3096   If this insn doesn't just copy a register, then we don't
3097   have an ordinary libcall.  In that case, cse could not have
3098   managed to substitute the source for the dest later on,
3099   so we can assume the libcall is dead.
3100
3101   NEEDED is the bit vector of pseudoregs live before this insn.
3102   NOTE is the REG_RETVAL note of the insn.  INSN is the insn itself.  */
3103
3104static int
3105libcall_dead_p (x, needed, note, insn)
3106     rtx x;
3107     regset needed;
3108     rtx note;
3109     rtx insn;
3110{
3111  register RTX_CODE code = GET_CODE (x);
3112
3113  if (code == SET)
3114    {
3115      register rtx r = SET_SRC (x);
3116      if (GET_CODE (r) == REG)
3117	{
3118	  rtx call = XEXP (note, 0);
3119	  rtx call_pat;
3120	  register int i;
3121
3122	  /* Find the call insn.  */
3123	  while (call != insn && GET_CODE (call) != CALL_INSN)
3124	    call = NEXT_INSN (call);
3125
3126	  /* If there is none, do nothing special,
3127	     since ordinary death handling can understand these insns.  */
3128	  if (call == insn)
3129	    return 0;
3130
3131	  /* See if the hard reg holding the value is dead.
3132	     If this is a PARALLEL, find the call within it.  */
3133	  call_pat = PATTERN (call);
3134	  if (GET_CODE (call_pat) == PARALLEL)
3135	    {
3136	      for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3137		if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3138		    && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3139		  break;
3140
3141	      /* This may be a library call that is returning a value
3142		 via invisible pointer.  Do nothing special, since
3143		 ordinary death handling can understand these insns.  */
3144	      if (i < 0)
3145		return 0;
3146
3147	      call_pat = XVECEXP (call_pat, 0, i);
3148	    }
3149
3150	  return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3151	}
3152    }
3153  return 1;
3154}
3155
3156/* Return 1 if register REGNO was used before it was set, i.e. if it is
3157   live at function entry.  Don't count global register variables, variables
3158   in registers that can be used for function arg passing, or variables in
3159   fixed hard registers.  */
3160
3161int
3162regno_uninitialized (regno)
3163     int regno;
3164{
3165  if (n_basic_blocks == 0
3166      || (regno < FIRST_PSEUDO_REGISTER
3167	  && (global_regs[regno]
3168	      || fixed_regs[regno]
3169	      || FUNCTION_ARG_REGNO_P (regno))))
3170    return 0;
3171
3172  return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3173}
3174
3175/* 1 if register REGNO was alive at a place where `setjmp' was called
3176   and was set more than once or is an argument.
3177   Such regs may be clobbered by `longjmp'.  */
3178
3179int
3180regno_clobbered_at_setjmp (regno)
3181     int regno;
3182{
3183  if (n_basic_blocks == 0)
3184    return 0;
3185
3186  return ((REG_N_SETS (regno) > 1
3187	   || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3188	  && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3189}
3190
3191/* INSN references memory, possibly using autoincrement addressing modes.
3192   Find any entries on the mem_set_list that need to be invalidated due
3193   to an address change.  */
3194static void
3195invalidate_mems_from_autoinc (insn)
3196     rtx insn;
3197{
3198  rtx note = REG_NOTES (insn);
3199  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3200    {
3201      if (REG_NOTE_KIND (note) == REG_INC)
3202        {
3203          rtx temp = mem_set_list;
3204          rtx prev = NULL_RTX;
3205
3206          while (temp)
3207	    {
3208	      if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3209	        {
3210	          /* Splice temp out of list.  */
3211	          if (prev)
3212	            XEXP (prev, 1) = XEXP (temp, 1);
3213	          else
3214	            mem_set_list = XEXP (temp, 1);
3215	        }
3216	      else
3217	        prev = temp;
3218              temp = XEXP (temp, 1);
3219	    }
3220	}
3221    }
3222}
3223
3224/* Process the registers that are set within X.
3225   Their bits are set to 1 in the regset DEAD,
3226   because they are dead prior to this insn.
3227
3228   If INSN is nonzero, it is the insn being processed
3229   and the fact that it is nonzero implies this is the FINAL pass
3230   in propagate_block.  In this case, various info about register
3231   usage is stored, LOG_LINKS fields of insns are set up.  */
3232
3233static void
3234mark_set_regs (needed, dead, x, insn, significant)
3235     regset needed;
3236     regset dead;
3237     rtx x;
3238     rtx insn;
3239     regset significant;
3240{
3241  register RTX_CODE code = GET_CODE (x);
3242
3243  if (code == SET || code == CLOBBER)
3244    mark_set_1 (needed, dead, x, insn, significant);
3245  else if (code == PARALLEL)
3246    {
3247      register int i;
3248      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3249	{
3250	  code = GET_CODE (XVECEXP (x, 0, i));
3251	  if (code == SET || code == CLOBBER)
3252	    mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
3253	}
3254    }
3255}
3256
3257/* Process a single SET rtx, X.  */
3258
3259static void
3260mark_set_1 (needed, dead, x, insn, significant)
3261     regset needed;
3262     regset dead;
3263     rtx x;
3264     rtx insn;
3265     regset significant;
3266{
3267  register int regno;
3268  register rtx reg = SET_DEST (x);
3269
3270  /* Some targets place small structures in registers for
3271     return values of functions.  We have to detect this
3272     case specially here to get correct flow information.  */
3273  if (GET_CODE (reg) == PARALLEL
3274      && GET_MODE (reg) == BLKmode)
3275    {
3276      register int i;
3277
3278      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3279	  mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
3280      return;
3281    }
3282
3283  /* Modifying just one hardware register of a multi-reg value
3284     or just a byte field of a register
3285     does not mean the value from before this insn is now dead.
3286     But it does mean liveness of that register at the end of the block
3287     is significant.
3288
3289     Within mark_set_1, however, we treat it as if the register is
3290     indeed modified.  mark_used_regs will, however, also treat this
3291     register as being used.  Thus, we treat these insns as setting a
3292     new value for the register as a function of its old value.  This
3293     cases LOG_LINKS to be made appropriately and this will help combine.  */
3294
3295  while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3296	 || GET_CODE (reg) == SIGN_EXTRACT
3297	 || GET_CODE (reg) == STRICT_LOW_PART)
3298    reg = XEXP (reg, 0);
3299
3300  /* If this set is a MEM, then it kills any aliased writes.
3301     If this set is a REG, then it kills any MEMs which use the reg.  */
3302  if (GET_CODE (reg) == MEM
3303      || GET_CODE (reg) == REG)
3304    {
3305      rtx temp = mem_set_list;
3306      rtx prev = NULL_RTX;
3307
3308      while (temp)
3309	{
3310	  if ((GET_CODE (reg) == MEM
3311	       && output_dependence (XEXP (temp, 0), reg))
3312	      || (GET_CODE (reg) == REG
3313		  && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3314	    {
3315	      /* Splice this entry out of the list.  */
3316	      if (prev)
3317		XEXP (prev, 1) = XEXP (temp, 1);
3318	      else
3319		mem_set_list = XEXP (temp, 1);
3320	    }
3321	  else
3322	    prev = temp;
3323	  temp = XEXP (temp, 1);
3324	}
3325    }
3326
3327  /* If the memory reference had embedded side effects (autoincrement
3328     address modes.  Then we may need to kill some entries on the
3329     memory set list.  */
3330  if (insn && GET_CODE (reg) == MEM)
3331    invalidate_mems_from_autoinc (insn);
3332
3333  if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3334      /* We do not know the size of a BLKmode store, so we do not track
3335	 them for redundant store elimination.  */
3336      && GET_MODE (reg) != BLKmode
3337      /* There are no REG_INC notes for SP, so we can't assume we'll see
3338	 everything that invalidates it.  To be safe, don't eliminate any
3339	 stores though SP; none of them should be redundant anyway.  */
3340      && ! reg_mentioned_p (stack_pointer_rtx, reg))
3341    mem_set_list = gen_rtx_EXPR_LIST (VOIDmode, reg, mem_set_list);
3342
3343  if (GET_CODE (reg) == REG
3344      && (regno = REGNO (reg), ! (regno == FRAME_POINTER_REGNUM
3345				  && (! reload_completed || frame_pointer_needed)))
3346#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3347      && ! (regno == HARD_FRAME_POINTER_REGNUM
3348	    && (! reload_completed || frame_pointer_needed))
3349#endif
3350#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3351      && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3352#endif
3353      && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3354    /* && regno != STACK_POINTER_REGNUM) -- let's try without this.  */
3355    {
3356      int some_needed = REGNO_REG_SET_P (needed, regno);
3357      int some_not_needed = ! some_needed;
3358
3359      /* Mark it as a significant register for this basic block.  */
3360      if (significant)
3361	SET_REGNO_REG_SET (significant, regno);
3362
3363      /* Mark it as dead before this insn.  */
3364      SET_REGNO_REG_SET (dead, regno);
3365
3366      /* A hard reg in a wide mode may really be multiple registers.
3367	 If so, mark all of them just like the first.  */
3368      if (regno < FIRST_PSEUDO_REGISTER)
3369	{
3370	  int n;
3371
3372	  /* Nothing below is needed for the stack pointer; get out asap.
3373	     Eg, log links aren't needed, since combine won't use them.  */
3374	  if (regno == STACK_POINTER_REGNUM)
3375	    return;
3376
3377	  n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3378	  while (--n > 0)
3379	    {
3380	      int regno_n = regno + n;
3381	      int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3382	      if (significant)
3383		SET_REGNO_REG_SET (significant, regno_n);
3384
3385	      SET_REGNO_REG_SET (dead, regno_n);
3386	      some_needed |= needed_regno;
3387	      some_not_needed |= ! needed_regno;
3388	    }
3389	}
3390      /* Additional data to record if this is the final pass.  */
3391      if (insn)
3392	{
3393	  register rtx y = reg_next_use[regno];
3394	  register int blocknum = BLOCK_NUM (insn);
3395
3396	  /* If this is a hard reg, record this function uses the reg.  */
3397
3398	  if (regno < FIRST_PSEUDO_REGISTER)
3399	    {
3400	      register int i;
3401	      int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3402
3403	      for (i = regno; i < endregno; i++)
3404		{
3405		  /* The next use is no longer "next", since a store
3406		     intervenes.  */
3407		  reg_next_use[i] = 0;
3408
3409		  regs_ever_live[i] = 1;
3410		  REG_N_SETS (i)++;
3411		}
3412	    }
3413	  else
3414	    {
3415	      /* The next use is no longer "next", since a store
3416		 intervenes.  */
3417	      reg_next_use[regno] = 0;
3418
3419	      /* Keep track of which basic blocks each reg appears in.  */
3420
3421	      if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3422		REG_BASIC_BLOCK (regno) = blocknum;
3423	      else if (REG_BASIC_BLOCK (regno) != blocknum)
3424		REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3425
3426	      /* Count (weighted) references, stores, etc.  This counts a
3427		 register twice if it is modified, but that is correct.  */
3428	      REG_N_SETS (regno)++;
3429
3430	      REG_N_REFS (regno) += loop_depth;
3431
3432	      /* The insns where a reg is live are normally counted
3433		 elsewhere, but we want the count to include the insn
3434		 where the reg is set, and the normal counting mechanism
3435		 would not count it.  */
3436	      REG_LIVE_LENGTH (regno)++;
3437	    }
3438
3439	  if (! some_not_needed)
3440	    {
3441	      /* Make a logical link from the next following insn
3442		 that uses this register, back to this insn.
3443		 The following insns have already been processed.
3444
3445		 We don't build a LOG_LINK for hard registers containing
3446		 in ASM_OPERANDs.  If these registers get replaced,
3447		 we might wind up changing the semantics of the insn,
3448		 even if reload can make what appear to be valid assignments
3449		 later.  */
3450	      if (y && (BLOCK_NUM (y) == blocknum)
3451		  && (regno >= FIRST_PSEUDO_REGISTER
3452		      || asm_noperands (PATTERN (y)) < 0))
3453		LOG_LINKS (y)
3454		  = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
3455	    }
3456	  else if (! some_needed)
3457	    {
3458	      /* Note that dead stores have already been deleted when possible
3459		 If we get here, we have found a dead store that cannot
3460		 be eliminated (because the same insn does something useful).
3461		 Indicate this by marking the reg being set as dying here.  */
3462	      REG_NOTES (insn)
3463		= gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3464	      REG_N_DEATHS (REGNO (reg))++;
3465	    }
3466	  else
3467	    {
3468	      /* This is a case where we have a multi-word hard register
3469		 and some, but not all, of the words of the register are
3470		 needed in subsequent insns.  Write REG_UNUSED notes
3471		 for those parts that were not needed.  This case should
3472		 be rare.  */
3473
3474	      int i;
3475
3476	      for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
3477		   i >= 0; i--)
3478		if (!REGNO_REG_SET_P (needed, regno + i))
3479		  REG_NOTES (insn)
3480		    = gen_rtx_EXPR_LIST (REG_UNUSED,
3481					 gen_rtx_REG (reg_raw_mode[regno + i],
3482						      regno + i),
3483					 REG_NOTES (insn));
3484	    }
3485	}
3486    }
3487  else if (GET_CODE (reg) == REG)
3488    reg_next_use[regno] = 0;
3489
3490  /* If this is the last pass and this is a SCRATCH, show it will be dying
3491     here and count it.  */
3492  else if (GET_CODE (reg) == SCRATCH && insn != 0)
3493    {
3494      REG_NOTES (insn)
3495	= gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3496    }
3497}
3498
3499#ifdef AUTO_INC_DEC
3500
3501/* X is a MEM found in INSN.  See if we can convert it into an auto-increment
3502   reference.  */
3503
3504static void
3505find_auto_inc (needed, x, insn)
3506     regset needed;
3507     rtx x;
3508     rtx insn;
3509{
3510  rtx addr = XEXP (x, 0);
3511  HOST_WIDE_INT offset = 0;
3512  rtx set;
3513
3514  /* Here we detect use of an index register which might be good for
3515     postincrement, postdecrement, preincrement, or predecrement.  */
3516
3517  if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3518    offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3519
3520  if (GET_CODE (addr) == REG)
3521    {
3522      register rtx y;
3523      register int size = GET_MODE_SIZE (GET_MODE (x));
3524      rtx use;
3525      rtx incr;
3526      int regno = REGNO (addr);
3527
3528      /* Is the next use an increment that might make auto-increment? */
3529      if ((incr = reg_next_use[regno]) != 0
3530	  && (set = single_set (incr)) != 0
3531	  && GET_CODE (set) == SET
3532	  && BLOCK_NUM (incr) == BLOCK_NUM (insn)
3533	  /* Can't add side effects to jumps; if reg is spilled and
3534	     reloaded, there's no way to store back the altered value.  */
3535	  && GET_CODE (insn) != JUMP_INSN
3536	  && (y = SET_SRC (set), GET_CODE (y) == PLUS)
3537	  && XEXP (y, 0) == addr
3538	  && GET_CODE (XEXP (y, 1)) == CONST_INT
3539	  && ((HAVE_POST_INCREMENT
3540	       && (INTVAL (XEXP (y, 1)) == size && offset == 0))
3541	      || (HAVE_POST_DECREMENT
3542		  && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
3543	      || (HAVE_PRE_INCREMENT
3544		  && (INTVAL (XEXP (y, 1)) == size && offset == size))
3545	      || (HAVE_PRE_DECREMENT
3546		  && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
3547	  /* Make sure this reg appears only once in this insn.  */
3548	  && (use = find_use_as_address (PATTERN (insn), addr, offset),
3549	      use != 0 && use != (rtx) 1))
3550	{
3551	  rtx q = SET_DEST (set);
3552	  enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
3553				    ? (offset ? PRE_INC : POST_INC)
3554				    : (offset ? PRE_DEC : POST_DEC));
3555
3556	  if (dead_or_set_p (incr, addr))
3557	    {
3558	      /* This is the simple case.  Try to make the auto-inc.  If
3559		 we can't, we are done.  Otherwise, we will do any
3560		 needed updates below.  */
3561	      if (! validate_change (insn, &XEXP (x, 0),
3562				     gen_rtx_fmt_e (inc_code, Pmode, addr),
3563				     0))
3564		return;
3565	    }
3566	  else if (GET_CODE (q) == REG
3567		   /* PREV_INSN used here to check the semi-open interval
3568		      [insn,incr).  */
3569		   && ! reg_used_between_p (q,  PREV_INSN (insn), incr)
3570		   /* We must also check for sets of q as q may be
3571		      a call clobbered hard register and there may
3572		      be a call between PREV_INSN (insn) and incr.  */
3573		   && ! reg_set_between_p (q,  PREV_INSN (insn), incr))
3574	    {
3575	      /* We have *p followed sometime later by q = p+size.
3576		 Both p and q must be live afterward,
3577		 and q is not used between INSN and its assignment.
3578		 Change it to q = p, ...*q..., q = q+size.
3579		 Then fall into the usual case.  */
3580	      rtx insns, temp;
3581	      basic_block bb;
3582
3583	      start_sequence ();
3584	      emit_move_insn (q, addr);
3585	      insns = get_insns ();
3586	      end_sequence ();
3587
3588	      bb = BLOCK_FOR_INSN (insn);
3589	      for (temp = insns; temp; temp = NEXT_INSN (temp))
3590		set_block_for_insn (temp, bb);
3591
3592	      /* If we can't make the auto-inc, or can't make the
3593		 replacement into Y, exit.  There's no point in making
3594		 the change below if we can't do the auto-inc and doing
3595		 so is not correct in the pre-inc case.  */
3596
3597	      validate_change (insn, &XEXP (x, 0),
3598			       gen_rtx_fmt_e (inc_code, Pmode, q),
3599			       1);
3600	      validate_change (incr, &XEXP (y, 0), q, 1);
3601	      if (! apply_change_group ())
3602		return;
3603
3604	      /* We now know we'll be doing this change, so emit the
3605		 new insn(s) and do the updates.  */
3606	      emit_insns_before (insns, insn);
3607
3608	      if (BLOCK_FOR_INSN (insn)->head == insn)
3609		BLOCK_FOR_INSN (insn)->head = insns;
3610
3611	      /* INCR will become a NOTE and INSN won't contain a
3612		 use of ADDR.  If a use of ADDR was just placed in
3613		 the insn before INSN, make that the next use.
3614		 Otherwise, invalidate it.  */
3615	      if (GET_CODE (PREV_INSN (insn)) == INSN
3616		  && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3617		  && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
3618		reg_next_use[regno] = PREV_INSN (insn);
3619	      else
3620		reg_next_use[regno] = 0;
3621
3622	      addr = q;
3623	      regno = REGNO (q);
3624
3625	      /* REGNO is now used in INCR which is below INSN, but
3626		 it previously wasn't live here.  If we don't mark
3627		 it as needed, we'll put a REG_DEAD note for it
3628		 on this insn, which is incorrect.  */
3629	      SET_REGNO_REG_SET (needed, regno);
3630
3631	      /* If there are any calls between INSN and INCR, show
3632		 that REGNO now crosses them.  */
3633	      for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3634		if (GET_CODE (temp) == CALL_INSN)
3635		  REG_N_CALLS_CROSSED (regno)++;
3636	    }
3637	  else
3638	    return;
3639
3640	  /* If we haven't returned, it means we were able to make the
3641	     auto-inc, so update the status.  First, record that this insn
3642	     has an implicit side effect.  */
3643
3644	  REG_NOTES (insn)
3645	    = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
3646
3647	  /* Modify the old increment-insn to simply copy
3648	     the already-incremented value of our register.  */
3649	  if (! validate_change (incr, &SET_SRC (set), addr, 0))
3650	    abort ();
3651
3652	  /* If that makes it a no-op (copying the register into itself) delete
3653	     it so it won't appear to be a "use" and a "set" of this
3654	     register.  */
3655	  if (SET_DEST (set) == addr)
3656	    {
3657	      PUT_CODE (incr, NOTE);
3658	      NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3659	      NOTE_SOURCE_FILE (incr) = 0;
3660	    }
3661
3662	  if (regno >= FIRST_PSEUDO_REGISTER)
3663	    {
3664	      /* Count an extra reference to the reg.  When a reg is
3665		 incremented, spilling it is worse, so we want to make
3666		 that less likely.  */
3667	      REG_N_REFS (regno) += loop_depth;
3668
3669	      /* Count the increment as a setting of the register,
3670		 even though it isn't a SET in rtl.  */
3671	      REG_N_SETS (regno)++;
3672	    }
3673	}
3674    }
3675}
3676#endif /* AUTO_INC_DEC */
3677
3678/* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3679   This is done assuming the registers needed from X
3680   are those that have 1-bits in NEEDED.
3681
3682   On the final pass, FINAL is 1.  This means try for autoincrement
3683   and count the uses and deaths of each pseudo-reg.
3684
3685   INSN is the containing instruction.  If INSN is dead, this function is not
3686   called.  */
3687
3688static void
3689mark_used_regs (needed, live, x, final, insn)
3690     regset needed;
3691     regset live;
3692     rtx x;
3693     int final;
3694     rtx insn;
3695{
3696  register RTX_CODE code;
3697  register int regno;
3698  int i;
3699
3700 retry:
3701  code = GET_CODE (x);
3702  switch (code)
3703    {
3704    case LABEL_REF:
3705    case SYMBOL_REF:
3706    case CONST_INT:
3707    case CONST:
3708    case CONST_DOUBLE:
3709    case PC:
3710    case ADDR_VEC:
3711    case ADDR_DIFF_VEC:
3712      return;
3713
3714#ifdef HAVE_cc0
3715    case CC0:
3716      cc0_live = 1;
3717      return;
3718#endif
3719
3720    case CLOBBER:
3721      /* If we are clobbering a MEM, mark any registers inside the address
3722	 as being used.  */
3723      if (GET_CODE (XEXP (x, 0)) == MEM)
3724	mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
3725      return;
3726
3727    case MEM:
3728      /* Invalidate the data for the last MEM stored, but only if MEM is
3729	 something that can be stored into.  */
3730      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3731	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3732	; /* needn't clear the memory set list */
3733      else
3734	{
3735	  rtx temp = mem_set_list;
3736	  rtx prev = NULL_RTX;
3737
3738	  while (temp)
3739	    {
3740	      if (anti_dependence (XEXP (temp, 0), x))
3741		{
3742		  /* Splice temp out of the list.  */
3743		  if (prev)
3744		    XEXP (prev, 1) = XEXP (temp, 1);
3745		  else
3746		    mem_set_list = XEXP (temp, 1);
3747		}
3748	      else
3749		prev = temp;
3750	      temp = XEXP (temp, 1);
3751	    }
3752	}
3753
3754      /* If the memory reference had embedded side effects (autoincrement
3755	 address modes.  Then we may need to kill some entries on the
3756	 memory set list.  */
3757      if (insn)
3758	invalidate_mems_from_autoinc (insn);
3759
3760#ifdef AUTO_INC_DEC
3761      if (final)
3762	find_auto_inc (needed, x, insn);
3763#endif
3764      break;
3765
3766    case SUBREG:
3767      if (GET_CODE (SUBREG_REG (x)) == REG
3768	  && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3769	  && (GET_MODE_SIZE (GET_MODE (x))
3770	      != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3771	REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
3772
3773      /* While we're here, optimize this case.  */
3774      x = SUBREG_REG (x);
3775
3776      /* In case the SUBREG is not of a register, don't optimize */
3777      if (GET_CODE (x) != REG)
3778	{
3779	  mark_used_regs (needed, live, x, final, insn);
3780	  return;
3781	}
3782
3783      /* ... fall through ...  */
3784
3785    case REG:
3786      /* See a register other than being set
3787	 => mark it as needed.  */
3788
3789      regno = REGNO (x);
3790      {
3791	int some_needed = REGNO_REG_SET_P (needed, regno);
3792	int some_not_needed = ! some_needed;
3793
3794	SET_REGNO_REG_SET (live, regno);
3795
3796	/* A hard reg in a wide mode may really be multiple registers.
3797	   If so, mark all of them just like the first.  */
3798	if (regno < FIRST_PSEUDO_REGISTER)
3799	  {
3800	    int n;
3801
3802	    /* For stack ptr or fixed arg pointer,
3803	       nothing below can be necessary, so waste no more time.  */
3804	    if (regno == STACK_POINTER_REGNUM
3805#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3806		|| (regno == HARD_FRAME_POINTER_REGNUM
3807		    && (! reload_completed || frame_pointer_needed))
3808#endif
3809#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3810		|| (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3811#endif
3812		|| (regno == FRAME_POINTER_REGNUM
3813		    && (! reload_completed || frame_pointer_needed)))
3814	      {
3815		/* If this is a register we are going to try to eliminate,
3816		   don't mark it live here.  If we are successful in
3817		   eliminating it, it need not be live unless it is used for
3818		   pseudos, in which case it will have been set live when
3819		   it was allocated to the pseudos.  If the register will not
3820		   be eliminated, reload will set it live at that point.  */
3821
3822		if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
3823		  regs_ever_live[regno] = 1;
3824		return;
3825	      }
3826	    /* No death notes for global register variables;
3827	       their values are live after this function exits.  */
3828	    if (global_regs[regno])
3829	      {
3830		if (final)
3831		  reg_next_use[regno] = insn;
3832		return;
3833	      }
3834
3835	    n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3836	    while (--n > 0)
3837	      {
3838		int regno_n = regno + n;
3839		int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3840
3841		SET_REGNO_REG_SET (live, regno_n);
3842		some_needed |= needed_regno;
3843		some_not_needed |= ! needed_regno;
3844	      }
3845	  }
3846	if (final)
3847	  {
3848	    /* Record where each reg is used, so when the reg
3849	       is set we know the next insn that uses it.  */
3850
3851	    reg_next_use[regno] = insn;
3852
3853	    if (regno < FIRST_PSEUDO_REGISTER)
3854	      {
3855		/* If a hard reg is being used,
3856		   record that this function does use it.  */
3857
3858		i = HARD_REGNO_NREGS (regno, GET_MODE (x));
3859		if (i == 0)
3860		  i = 1;
3861		do
3862		  regs_ever_live[regno + --i] = 1;
3863		while (i > 0);
3864	      }
3865	    else
3866	      {
3867		/* Keep track of which basic block each reg appears in.  */
3868
3869		register int blocknum = BLOCK_NUM (insn);
3870
3871		if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3872		  REG_BASIC_BLOCK (regno) = blocknum;
3873		else if (REG_BASIC_BLOCK (regno) != blocknum)
3874		  REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3875
3876		/* Count (weighted) number of uses of each reg.  */
3877
3878		REG_N_REFS (regno) += loop_depth;
3879	      }
3880
3881	    /* Record and count the insns in which a reg dies.
3882	       If it is used in this insn and was dead below the insn
3883	       then it dies in this insn.  If it was set in this insn,
3884	       we do not make a REG_DEAD note; likewise if we already
3885	       made such a note.  */
3886
3887	    if (some_not_needed
3888		&& ! dead_or_set_p (insn, x)
3889#if 0
3890		&& (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
3891#endif
3892		)
3893	      {
3894		/* Check for the case where the register dying partially
3895		   overlaps the register set by this insn.  */
3896		if (regno < FIRST_PSEUDO_REGISTER
3897		    && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
3898		  {
3899		    int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3900		    while (--n >= 0)
3901		      some_needed |= dead_or_set_regno_p (insn, regno + n);
3902		  }
3903
3904		/* If none of the words in X is needed, make a REG_DEAD
3905		   note.  Otherwise, we must make partial REG_DEAD notes.  */
3906		if (! some_needed)
3907		  {
3908		    REG_NOTES (insn)
3909		      = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
3910		    REG_N_DEATHS (regno)++;
3911		  }
3912		else
3913		  {
3914		    int i;
3915
3916		    /* Don't make a REG_DEAD note for a part of a register
3917		       that is set in the insn.  */
3918
3919		    for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
3920			 i >= 0; i--)
3921		      if (!REGNO_REG_SET_P (needed, regno + i)
3922			  && ! dead_or_set_regno_p (insn, regno + i))
3923			REG_NOTES (insn)
3924			  = gen_rtx_EXPR_LIST (REG_DEAD,
3925					       gen_rtx_REG (reg_raw_mode[regno + i],
3926							    regno + i),
3927					       REG_NOTES (insn));
3928		  }
3929	      }
3930	  }
3931      }
3932      return;
3933
3934    case SET:
3935      {
3936	register rtx testreg = SET_DEST (x);
3937	int mark_dest = 0;
3938
3939	/* If storing into MEM, don't show it as being used.  But do
3940	   show the address as being used.  */
3941	if (GET_CODE (testreg) == MEM)
3942	  {
3943#ifdef AUTO_INC_DEC
3944	    if (final)
3945	      find_auto_inc (needed, testreg, insn);
3946#endif
3947	    mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
3948	    mark_used_regs (needed, live, SET_SRC (x), final, insn);
3949	    return;
3950	  }
3951
3952	/* Storing in STRICT_LOW_PART is like storing in a reg
3953	   in that this SET might be dead, so ignore it in TESTREG.
3954	   but in some other ways it is like using the reg.
3955
3956	   Storing in a SUBREG or a bit field is like storing the entire
3957	   register in that if the register's value is not used
3958	   then this SET is not needed.  */
3959	while (GET_CODE (testreg) == STRICT_LOW_PART
3960	       || GET_CODE (testreg) == ZERO_EXTRACT
3961	       || GET_CODE (testreg) == SIGN_EXTRACT
3962	       || GET_CODE (testreg) == SUBREG)
3963	  {
3964	    if (GET_CODE (testreg) == SUBREG
3965		&& GET_CODE (SUBREG_REG (testreg)) == REG
3966		&& REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3967		&& (GET_MODE_SIZE (GET_MODE (testreg))
3968		    != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
3969	      REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
3970
3971	    /* Modifying a single register in an alternate mode
3972	       does not use any of the old value.  But these other
3973	       ways of storing in a register do use the old value.  */
3974	    if (GET_CODE (testreg) == SUBREG
3975		&& !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
3976	      ;
3977	    else
3978	      mark_dest = 1;
3979
3980	    testreg = XEXP (testreg, 0);
3981	  }
3982
3983	/* If this is a store into a register,
3984	   recursively scan the value being stored.  */
3985
3986	if ((GET_CODE (testreg) == PARALLEL
3987	     && GET_MODE (testreg) == BLKmode)
3988	    || (GET_CODE (testreg) == REG
3989		&& (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
3990						&& (! reload_completed || frame_pointer_needed)))
3991#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3992		&& ! (regno == HARD_FRAME_POINTER_REGNUM
3993		      && (! reload_completed || frame_pointer_needed))
3994#endif
3995#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3996		&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3997#endif
3998		))
3999	  /* We used to exclude global_regs here, but that seems wrong.
4000	     Storing in them is like storing in mem.  */
4001	  {
4002	    mark_used_regs (needed, live, SET_SRC (x), final, insn);
4003	    if (mark_dest)
4004	      mark_used_regs (needed, live, SET_DEST (x), final, insn);
4005	    return;
4006	  }
4007      }
4008      break;
4009
4010    case RETURN:
4011      /* If exiting needs the right stack value, consider this insn as
4012	 using the stack pointer.  In any event, consider it as using
4013	 all global registers and all registers used by return.  */
4014      if (! EXIT_IGNORE_STACK
4015	  || (! FRAME_POINTER_REQUIRED
4016	      && ! current_function_calls_alloca
4017	      && flag_omit_frame_pointer)
4018	  || current_function_sp_is_unchanging)
4019	SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
4020
4021      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4022	if (global_regs[i]
4023#ifdef EPILOGUE_USES
4024	    || EPILOGUE_USES (i)
4025#endif
4026	    )
4027	  SET_REGNO_REG_SET (live, i);
4028      break;
4029
4030    case ASM_OPERANDS:
4031    case UNSPEC_VOLATILE:
4032    case TRAP_IF:
4033    case ASM_INPUT:
4034      {
4035	/* Traditional and volatile asm instructions must be considered to use
4036	   and clobber all hard registers, all pseudo-registers and all of
4037	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
4038
4039	   Consider for instance a volatile asm that changes the fpu rounding
4040	   mode.  An insn should not be moved across this even if it only uses
4041	   pseudo-regs because it might give an incorrectly rounded result.
4042
4043	   ?!? Unfortunately, marking all hard registers as live causes massive
4044	   problems for the register allocator and marking all pseudos as live
4045	   creates mountains of uninitialized variable warnings.
4046
4047	   So for now, just clear the memory set list and mark any regs
4048	   we can find in ASM_OPERANDS as used.  */
4049	if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4050	  mem_set_list = NULL_RTX;
4051
4052        /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4053	   We can not just fall through here since then we would be confused
4054	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4055	   traditional asms unlike their normal usage.  */
4056	if (code == ASM_OPERANDS)
4057	  {
4058	    int j;
4059
4060	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4061	      mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4062			      final, insn);
4063	  }
4064	break;
4065      }
4066
4067
4068    default:
4069      break;
4070    }
4071
4072  /* Recursively scan the operands of this expression.  */
4073
4074  {
4075    register char *fmt = GET_RTX_FORMAT (code);
4076    register int i;
4077
4078    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4079      {
4080	if (fmt[i] == 'e')
4081	  {
4082	    /* Tail recursive case: save a function call level.  */
4083	    if (i == 0)
4084	      {
4085		x = XEXP (x, 0);
4086		goto retry;
4087	      }
4088	    mark_used_regs (needed, live, XEXP (x, i), final, insn);
4089	  }
4090	else if (fmt[i] == 'E')
4091	  {
4092	    register int j;
4093	    for (j = 0; j < XVECLEN (x, i); j++)
4094	      mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
4095	  }
4096      }
4097  }
4098}
4099
4100#ifdef AUTO_INC_DEC
4101
4102static int
4103try_pre_increment_1 (insn)
4104     rtx insn;
4105{
4106  /* Find the next use of this reg.  If in same basic block,
4107     make it do pre-increment or pre-decrement if appropriate.  */
4108  rtx x = single_set (insn);
4109  HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4110		* INTVAL (XEXP (SET_SRC (x), 1)));
4111  int regno = REGNO (SET_DEST (x));
4112  rtx y = reg_next_use[regno];
4113  if (y != 0
4114      && BLOCK_NUM (y) == BLOCK_NUM (insn)
4115      /* Don't do this if the reg dies, or gets set in y; a standard addressing
4116	 mode would be better.  */
4117      && ! dead_or_set_p (y, SET_DEST (x))
4118      && try_pre_increment (y, SET_DEST (x), amount))
4119    {
4120      /* We have found a suitable auto-increment
4121	 and already changed insn Y to do it.
4122	 So flush this increment-instruction.  */
4123      PUT_CODE (insn, NOTE);
4124      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4125      NOTE_SOURCE_FILE (insn) = 0;
4126      /* Count a reference to this reg for the increment
4127	 insn we are deleting.  When a reg is incremented.
4128	 spilling it is worse, so we want to make that
4129	 less likely.  */
4130      if (regno >= FIRST_PSEUDO_REGISTER)
4131	{
4132	  REG_N_REFS (regno) += loop_depth;
4133	  REG_N_SETS (regno)++;
4134	}
4135      return 1;
4136    }
4137  return 0;
4138}
4139
4140/* Try to change INSN so that it does pre-increment or pre-decrement
4141   addressing on register REG in order to add AMOUNT to REG.
4142   AMOUNT is negative for pre-decrement.
4143   Returns 1 if the change could be made.
4144   This checks all about the validity of the result of modifying INSN.  */
4145
4146static int
4147try_pre_increment (insn, reg, amount)
4148     rtx insn, reg;
4149     HOST_WIDE_INT amount;
4150{
4151  register rtx use;
4152
4153  /* Nonzero if we can try to make a pre-increment or pre-decrement.
4154     For example, addl $4,r1; movl (r1),... can become movl +(r1),...  */
4155  int pre_ok = 0;
4156  /* Nonzero if we can try to make a post-increment or post-decrement.
4157     For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4158     It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4159     supports both pre-inc and post-inc, or both pre-dec and post-dec.  */
4160  int post_ok = 0;
4161
4162  /* Nonzero if the opportunity actually requires post-inc or post-dec.  */
4163  int do_post = 0;
4164
4165  /* From the sign of increment, see which possibilities are conceivable
4166     on this target machine.  */
4167  if (HAVE_PRE_INCREMENT && amount > 0)
4168    pre_ok = 1;
4169  if (HAVE_POST_INCREMENT && amount > 0)
4170    post_ok = 1;
4171
4172  if (HAVE_PRE_DECREMENT && amount < 0)
4173    pre_ok = 1;
4174  if (HAVE_POST_DECREMENT && amount < 0)
4175    post_ok = 1;
4176
4177  if (! (pre_ok || post_ok))
4178    return 0;
4179
4180  /* It is not safe to add a side effect to a jump insn
4181     because if the incremented register is spilled and must be reloaded
4182     there would be no way to store the incremented value back in memory.  */
4183
4184  if (GET_CODE (insn) == JUMP_INSN)
4185    return 0;
4186
4187  use = 0;
4188  if (pre_ok)
4189    use = find_use_as_address (PATTERN (insn), reg, 0);
4190  if (post_ok && (use == 0 || use == (rtx) 1))
4191    {
4192      use = find_use_as_address (PATTERN (insn), reg, -amount);
4193      do_post = 1;
4194    }
4195
4196  if (use == 0 || use == (rtx) 1)
4197    return 0;
4198
4199  if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4200    return 0;
4201
4202  /* See if this combination of instruction and addressing mode exists.  */
4203  if (! validate_change (insn, &XEXP (use, 0),
4204			 gen_rtx_fmt_e (amount > 0
4205					? (do_post ? POST_INC : PRE_INC)
4206					: (do_post ? POST_DEC : PRE_DEC),
4207					Pmode, reg), 0))
4208    return 0;
4209
4210  /* Record that this insn now has an implicit side effect on X.  */
4211  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4212  return 1;
4213}
4214
4215#endif /* AUTO_INC_DEC */
4216
4217/* Find the place in the rtx X where REG is used as a memory address.
4218   Return the MEM rtx that so uses it.
4219   If PLUSCONST is nonzero, search instead for a memory address equivalent to
4220   (plus REG (const_int PLUSCONST)).
4221
4222   If such an address does not appear, return 0.
4223   If REG appears more than once, or is used other than in such an address,
4224   return (rtx)1.  */
4225
4226rtx
4227find_use_as_address (x, reg, plusconst)
4228     register rtx x;
4229     rtx reg;
4230     HOST_WIDE_INT plusconst;
4231{
4232  enum rtx_code code = GET_CODE (x);
4233  char *fmt = GET_RTX_FORMAT (code);
4234  register int i;
4235  register rtx value = 0;
4236  register rtx tem;
4237
4238  if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4239    return x;
4240
4241  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4242      && XEXP (XEXP (x, 0), 0) == reg
4243      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4244      && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4245    return x;
4246
4247  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4248    {
4249      /* If REG occurs inside a MEM used in a bit-field reference,
4250	 that is unacceptable.  */
4251      if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4252	return (rtx) (HOST_WIDE_INT) 1;
4253    }
4254
4255  if (x == reg)
4256    return (rtx) (HOST_WIDE_INT) 1;
4257
4258  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4259    {
4260      if (fmt[i] == 'e')
4261	{
4262	  tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4263	  if (value == 0)
4264	    value = tem;
4265	  else if (tem != 0)
4266	    return (rtx) (HOST_WIDE_INT) 1;
4267	}
4268      if (fmt[i] == 'E')
4269	{
4270	  register int j;
4271	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4272	    {
4273	      tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4274	      if (value == 0)
4275		value = tem;
4276	      else if (tem != 0)
4277		return (rtx) (HOST_WIDE_INT) 1;
4278	    }
4279	}
4280    }
4281
4282  return value;
4283}
4284
4285/* Write information about registers and basic blocks into FILE.
4286   This is part of making a debugging dump.  */
4287
4288void
4289dump_flow_info (file)
4290     FILE *file;
4291{
4292  register int i;
4293  static char *reg_class_names[] = REG_CLASS_NAMES;
4294
4295  fprintf (file, "%d registers.\n", max_regno);
4296  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4297    if (REG_N_REFS (i))
4298      {
4299	enum reg_class class, altclass;
4300	fprintf (file, "\nRegister %d used %d times across %d insns",
4301		 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4302	if (REG_BASIC_BLOCK (i) >= 0)
4303	  fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4304	if (REG_N_SETS (i))
4305  	  fprintf (file, "; set %d time%s", REG_N_SETS (i),
4306   		   (REG_N_SETS (i) == 1) ? "" : "s");
4307	if (REG_USERVAR_P (regno_reg_rtx[i]))
4308  	  fprintf (file, "; user var");
4309	if (REG_N_DEATHS (i) != 1)
4310	  fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4311	if (REG_N_CALLS_CROSSED (i) == 1)
4312	  fprintf (file, "; crosses 1 call");
4313	else if (REG_N_CALLS_CROSSED (i))
4314	  fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4315	if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4316	  fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4317	class = reg_preferred_class (i);
4318	altclass = reg_alternate_class (i);
4319	if (class != GENERAL_REGS || altclass != ALL_REGS)
4320	  {
4321	    if (altclass == ALL_REGS || class == ALL_REGS)
4322	      fprintf (file, "; pref %s", reg_class_names[(int) class]);
4323	    else if (altclass == NO_REGS)
4324	      fprintf (file, "; %s or none", reg_class_names[(int) class]);
4325	    else
4326	      fprintf (file, "; pref %s, else %s",
4327		       reg_class_names[(int) class],
4328		       reg_class_names[(int) altclass]);
4329	  }
4330	if (REGNO_POINTER_FLAG (i))
4331	  fprintf (file, "; pointer");
4332	fprintf (file, ".\n");
4333      }
4334
4335  fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
4336  for (i = 0; i < n_basic_blocks; i++)
4337    {
4338      register basic_block bb = BASIC_BLOCK (i);
4339      register int regno;
4340      register edge e;
4341
4342      fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4343	       i, INSN_UID (bb->head), INSN_UID (bb->end));
4344
4345      fprintf (file, "Predecessors: ");
4346      for (e = bb->pred; e ; e = e->pred_next)
4347	dump_edge_info (file, e, 0);
4348
4349      fprintf (file, "\nSuccessors: ");
4350      for (e = bb->succ; e ; e = e->succ_next)
4351	dump_edge_info (file, e, 1);
4352
4353      fprintf (file, "\nRegisters live at start:");
4354      if (bb->global_live_at_start)
4355	{
4356          for (regno = 0; regno < max_regno; regno++)
4357	    if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4358	      fprintf (file, " %d", regno);
4359	}
4360      else
4361	fprintf (file, " n/a");
4362
4363      fprintf (file, "\nRegisters live at end:");
4364      if (bb->global_live_at_end)
4365	{
4366          for (regno = 0; regno < max_regno; regno++)
4367	    if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4368	      fprintf (file, " %d", regno);
4369	}
4370      else
4371	fprintf (file, " n/a");
4372
4373      putc('\n', file);
4374    }
4375
4376  putc('\n', file);
4377}
4378
4379static void
4380dump_edge_info (file, e, do_succ)
4381     FILE *file;
4382     edge e;
4383     int do_succ;
4384{
4385  basic_block side = (do_succ ? e->dest : e->src);
4386
4387  if (side == ENTRY_BLOCK_PTR)
4388    fputs (" ENTRY", file);
4389  else if (side == EXIT_BLOCK_PTR)
4390    fputs (" EXIT", file);
4391  else
4392    fprintf (file, " %d", side->index);
4393
4394  if (e->flags)
4395    {
4396      static char * bitnames[] = {
4397	"fallthru", "crit", "ab", "abcall", "eh", "fake"
4398      };
4399      int comma = 0;
4400      int i, flags = e->flags;
4401
4402      fputc (' ', file);
4403      fputc ('(', file);
4404      for (i = 0; flags; i++)
4405	if (flags & (1 << i))
4406	  {
4407	    flags &= ~(1 << i);
4408
4409	    if (comma)
4410	      fputc (',', file);
4411	    if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
4412	      fputs (bitnames[i], file);
4413	    else
4414	      fprintf (file, "%d", i);
4415	    comma = 1;
4416	  }
4417      fputc (')', file);
4418    }
4419}
4420
4421
4422/* Like print_rtl, but also print out live information for the start of each
4423   basic block.  */
4424
4425void
4426print_rtl_with_bb (outf, rtx_first)
4427     FILE *outf;
4428     rtx rtx_first;
4429{
4430  register rtx tmp_rtx;
4431
4432  if (rtx_first == 0)
4433    fprintf (outf, "(nil)\n");
4434  else
4435    {
4436      int i;
4437      enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
4438      int max_uid = get_max_uid ();
4439      basic_block *start = (basic_block *)
4440	alloca (max_uid * sizeof (basic_block));
4441      basic_block *end = (basic_block *)
4442	alloca (max_uid * sizeof (basic_block));
4443      enum bb_state *in_bb_p = (enum bb_state *)
4444	alloca (max_uid * sizeof (enum bb_state));
4445
4446      memset (start, 0, max_uid * sizeof (basic_block));
4447      memset (end, 0, max_uid * sizeof (basic_block));
4448      memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
4449
4450      for (i = n_basic_blocks - 1; i >= 0; i--)
4451	{
4452	  basic_block bb = BASIC_BLOCK (i);
4453	  rtx x;
4454
4455	  start[INSN_UID (bb->head)] = bb;
4456	  end[INSN_UID (bb->end)] = bb;
4457	  for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
4458	    {
4459	      enum bb_state state = IN_MULTIPLE_BB;
4460	      if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
4461		state = IN_ONE_BB;
4462	      in_bb_p[INSN_UID(x)] = state;
4463
4464	      if (x == bb->end)
4465		break;
4466	    }
4467	}
4468
4469      for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
4470	{
4471	  int did_output;
4472	  basic_block bb;
4473
4474	  if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
4475	    {
4476	      fprintf (outf, ";; Start of basic block %d, registers live:",
4477		       bb->index);
4478
4479	      EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
4480					 {
4481					   fprintf (outf, " %d", i);
4482					   if (i < FIRST_PSEUDO_REGISTER)
4483					     fprintf (outf, " [%s]",
4484						      reg_names[i]);
4485					 });
4486	      putc ('\n', outf);
4487	    }
4488
4489	  if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
4490	      && GET_CODE (tmp_rtx) != NOTE
4491	      && GET_CODE (tmp_rtx) != BARRIER
4492	      && ! obey_regdecls)
4493	    fprintf (outf, ";; Insn is not within a basic block\n");
4494	  else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
4495	    fprintf (outf, ";; Insn is in multiple basic blocks\n");
4496
4497	  did_output = print_rtl_single (outf, tmp_rtx);
4498
4499	  if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
4500	    fprintf (outf, ";; End of basic block %d\n", bb->index);
4501
4502	  if (did_output)
4503	    putc ('\n', outf);
4504	}
4505    }
4506}
4507
4508
4509/* Integer list support.  */
4510
4511/* Allocate a node from list *HEAD_PTR.  */
4512
4513static int_list_ptr
4514alloc_int_list_node (head_ptr)
4515     int_list_block **head_ptr;
4516{
4517  struct int_list_block *first_blk = *head_ptr;
4518
4519  if (first_blk == NULL || first_blk->nodes_left <= 0)
4520    {
4521      first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
4522      first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
4523      first_blk->next = *head_ptr;
4524      *head_ptr = first_blk;
4525    }
4526
4527  first_blk->nodes_left--;
4528  return &first_blk->nodes[first_blk->nodes_left];
4529}
4530
4531/* Pointer to head of predecessor/successor block list.  */
4532static int_list_block *pred_int_list_blocks;
4533
4534/* Add a new node to integer list LIST with value VAL.
4535   LIST is a pointer to a list object to allow for different implementations.
4536   If *LIST is initially NULL, the list is empty.
4537   The caller must not care whether the element is added to the front or
4538   to the end of the list (to allow for different implementations).  */
4539
4540static int_list_ptr
4541add_int_list_node (blk_list, list, val)
4542     int_list_block **blk_list;
4543     int_list **list;
4544     int val;
4545{
4546  int_list_ptr p = alloc_int_list_node (blk_list);
4547
4548  p->val = val;
4549  p->next = *list;
4550  *list = p;
4551  return p;
4552}
4553
4554/* Free the blocks of lists at BLK_LIST.  */
4555
4556void
4557free_int_list (blk_list)
4558     int_list_block **blk_list;
4559{
4560  int_list_block *p, *next;
4561
4562  for (p = *blk_list; p != NULL; p = next)
4563    {
4564      next = p->next;
4565      free (p);
4566    }
4567
4568  /* Mark list as empty for the next function we compile.  */
4569  *blk_list = NULL;
4570}
4571
4572/* Predecessor/successor computation.  */
4573
4574/* Mark PRED_BB a precessor of SUCC_BB,
4575   and conversely SUCC_BB a successor of PRED_BB.  */
4576
4577static void
4578add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
4579     int pred_bb;
4580     int succ_bb;
4581     int_list_ptr *s_preds;
4582     int_list_ptr *s_succs;
4583     int *num_preds;
4584     int *num_succs;
4585{
4586  if (succ_bb != EXIT_BLOCK)
4587    {
4588      add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
4589      num_preds[succ_bb]++;
4590    }
4591  if (pred_bb != ENTRY_BLOCK)
4592    {
4593      add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
4594      num_succs[pred_bb]++;
4595    }
4596}
4597
4598/* Convert edge lists into pred/succ lists for backward compatibility.  */
4599
4600void
4601compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
4602     int_list_ptr *s_preds;
4603     int_list_ptr *s_succs;
4604     int *num_preds;
4605     int *num_succs;
4606{
4607  int i, n = n_basic_blocks;
4608  edge e;
4609
4610  memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
4611  memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
4612  memset (num_preds, 0, n_basic_blocks * sizeof (int));
4613  memset (num_succs, 0, n_basic_blocks * sizeof (int));
4614
4615  for (i = 0; i < n; ++i)
4616    {
4617      basic_block bb = BASIC_BLOCK (i);
4618
4619      for (e = bb->succ; e ; e = e->succ_next)
4620	add_pred_succ (i, e->dest->index, s_preds, s_succs,
4621		       num_preds, num_succs);
4622    }
4623
4624  for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
4625    add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
4626		   num_preds, num_succs);
4627}
4628
4629void
4630dump_bb_data (file, preds, succs, live_info)
4631     FILE *file;
4632     int_list_ptr *preds;
4633     int_list_ptr *succs;
4634     int live_info;
4635{
4636  int bb;
4637  int_list_ptr p;
4638
4639  fprintf (file, "BB data\n\n");
4640  for (bb = 0; bb < n_basic_blocks; bb++)
4641    {
4642      fprintf (file, "BB %d, start %d, end %d\n", bb,
4643	       INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
4644      fprintf (file, "  preds:");
4645      for (p = preds[bb]; p != NULL; p = p->next)
4646	{
4647	  int pred_bb = INT_LIST_VAL (p);
4648	  if (pred_bb == ENTRY_BLOCK)
4649	    fprintf (file, " entry");
4650	  else
4651	    fprintf (file, " %d", pred_bb);
4652	}
4653      fprintf (file, "\n");
4654      fprintf (file, "  succs:");
4655      for (p = succs[bb]; p != NULL; p = p->next)
4656	{
4657	  int succ_bb = INT_LIST_VAL (p);
4658	  if (succ_bb == EXIT_BLOCK)
4659	    fprintf (file, " exit");
4660	  else
4661	    fprintf (file, " %d", succ_bb);
4662	}
4663      if (live_info)
4664	{
4665	  int regno;
4666	  fprintf (file, "\nRegisters live at start:");
4667	  for (regno = 0; regno < max_regno; regno++)
4668	    if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
4669	      fprintf (file, " %d", regno);
4670	  fprintf (file, "\n");
4671	}
4672      fprintf (file, "\n");
4673    }
4674  fprintf (file, "\n");
4675}
4676
4677/* Free basic block data storage.  */
4678
4679void
4680free_bb_mem ()
4681{
4682  free_int_list (&pred_int_list_blocks);
4683}
4684
4685/* Compute dominator relationships.  */
4686void
4687compute_dominators (dominators, post_dominators, s_preds, s_succs)
4688     sbitmap *dominators;
4689     sbitmap *post_dominators;
4690     int_list_ptr *s_preds;
4691     int_list_ptr *s_succs;
4692{
4693  int bb, changed, passes;
4694  sbitmap *temp_bitmap;
4695
4696  temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4697  sbitmap_vector_ones (dominators, n_basic_blocks);
4698  sbitmap_vector_ones (post_dominators, n_basic_blocks);
4699  sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4700
4701  sbitmap_zero (dominators[0]);
4702  SET_BIT (dominators[0], 0);
4703
4704  sbitmap_zero (post_dominators[n_basic_blocks - 1]);
4705  SET_BIT (post_dominators[n_basic_blocks - 1], 0);
4706
4707  passes = 0;
4708  changed = 1;
4709  while (changed)
4710    {
4711      changed = 0;
4712      for (bb = 1; bb < n_basic_blocks; bb++)
4713	{
4714	  sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4715					     bb, s_preds);
4716	  SET_BIT (temp_bitmap[bb], bb);
4717	  changed |= sbitmap_a_and_b (dominators[bb],
4718				      dominators[bb],
4719				      temp_bitmap[bb]);
4720	  sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4721					   bb, s_succs);
4722	  SET_BIT (temp_bitmap[bb], bb);
4723	  changed |= sbitmap_a_and_b (post_dominators[bb],
4724				      post_dominators[bb],
4725				      temp_bitmap[bb]);
4726	}
4727      passes++;
4728    }
4729
4730  free (temp_bitmap);
4731}
4732
4733/* Given DOMINATORS, compute the immediate dominators into IDOM.  */
4734
4735void
4736compute_immediate_dominators (idom, dominators)
4737     int *idom;
4738     sbitmap *dominators;
4739{
4740  sbitmap *tmp;
4741  int b;
4742
4743  tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4744
4745  /* Begin with tmp(n) = dom(n) - { n }.  */
4746  for (b = n_basic_blocks; --b >= 0; )
4747    {
4748      sbitmap_copy (tmp[b], dominators[b]);
4749      RESET_BIT (tmp[b], b);
4750    }
4751
4752  /* Subtract out all of our dominator's dominators.  */
4753  for (b = n_basic_blocks; --b >= 0; )
4754    {
4755      sbitmap tmp_b = tmp[b];
4756      int s;
4757
4758      for (s = n_basic_blocks; --s >= 0; )
4759	if (TEST_BIT (tmp_b, s))
4760	  sbitmap_difference (tmp_b, tmp_b, tmp[s]);
4761    }
4762
4763  /* Find the one bit set in the bitmap and put it in the output array.  */
4764  for (b = n_basic_blocks; --b >= 0; )
4765    {
4766      int t;
4767      EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
4768    }
4769
4770  sbitmap_vector_free (tmp);
4771}
4772
4773/* Count for a single SET rtx, X.  */
4774
4775static void
4776count_reg_sets_1 (x)
4777     rtx x;
4778{
4779  register int regno;
4780  register rtx reg = SET_DEST (x);
4781
4782  /* Find the register that's set/clobbered.  */
4783  while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4784	 || GET_CODE (reg) == SIGN_EXTRACT
4785	 || GET_CODE (reg) == STRICT_LOW_PART)
4786    reg = XEXP (reg, 0);
4787
4788  if (GET_CODE (reg) == PARALLEL
4789      && GET_MODE (reg) == BLKmode)
4790    {
4791      register int i;
4792      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4793	count_reg_sets_1 (XVECEXP (reg, 0, i));
4794      return;
4795    }
4796
4797  if (GET_CODE (reg) == REG)
4798    {
4799      regno = REGNO (reg);
4800      if (regno >= FIRST_PSEUDO_REGISTER)
4801	{
4802	  /* Count (weighted) references, stores, etc.  This counts a
4803	     register twice if it is modified, but that is correct.  */
4804	  REG_N_SETS (regno)++;
4805
4806	  REG_N_REFS (regno) += loop_depth;
4807	}
4808    }
4809}
4810
4811/* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4812   REG_N_REFS by the current loop depth for each SET or CLOBBER found.  */
4813
4814static void
4815count_reg_sets  (x)
4816     rtx x;
4817{
4818  register RTX_CODE code = GET_CODE (x);
4819
4820  if (code == SET || code == CLOBBER)
4821    count_reg_sets_1 (x);
4822  else if (code == PARALLEL)
4823    {
4824      register int i;
4825      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4826	{
4827	  code = GET_CODE (XVECEXP (x, 0, i));
4828	  if (code == SET || code == CLOBBER)
4829	    count_reg_sets_1 (XVECEXP (x, 0, i));
4830	}
4831    }
4832}
4833
4834/* Increment REG_N_REFS by the current loop depth each register reference
4835   found in X.  */
4836
4837static void
4838count_reg_references (x)
4839     rtx x;
4840{
4841  register RTX_CODE code;
4842
4843 retry:
4844  code = GET_CODE (x);
4845  switch (code)
4846    {
4847    case LABEL_REF:
4848    case SYMBOL_REF:
4849    case CONST_INT:
4850    case CONST:
4851    case CONST_DOUBLE:
4852    case PC:
4853    case ADDR_VEC:
4854    case ADDR_DIFF_VEC:
4855    case ASM_INPUT:
4856      return;
4857
4858#ifdef HAVE_cc0
4859    case CC0:
4860      return;
4861#endif
4862
4863    case CLOBBER:
4864      /* If we are clobbering a MEM, mark any registers inside the address
4865	 as being used.  */
4866      if (GET_CODE (XEXP (x, 0)) == MEM)
4867	count_reg_references (XEXP (XEXP (x, 0), 0));
4868      return;
4869
4870    case SUBREG:
4871      /* While we're here, optimize this case.  */
4872      x = SUBREG_REG (x);
4873
4874      /* In case the SUBREG is not of a register, don't optimize */
4875      if (GET_CODE (x) != REG)
4876	{
4877	  count_reg_references (x);
4878	  return;
4879	}
4880
4881      /* ... fall through ...  */
4882
4883    case REG:
4884      if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4885	REG_N_REFS (REGNO (x)) += loop_depth;
4886      return;
4887
4888    case SET:
4889      {
4890	register rtx testreg = SET_DEST (x);
4891	int mark_dest = 0;
4892
4893	/* If storing into MEM, don't show it as being used.  But do
4894	   show the address as being used.  */
4895	if (GET_CODE (testreg) == MEM)
4896	  {
4897	    count_reg_references (XEXP (testreg, 0));
4898	    count_reg_references (SET_SRC (x));
4899	    return;
4900	  }
4901
4902	/* Storing in STRICT_LOW_PART is like storing in a reg
4903	   in that this SET might be dead, so ignore it in TESTREG.
4904	   but in some other ways it is like using the reg.
4905
4906	   Storing in a SUBREG or a bit field is like storing the entire
4907	   register in that if the register's value is not used
4908	   then this SET is not needed.  */
4909	while (GET_CODE (testreg) == STRICT_LOW_PART
4910	       || GET_CODE (testreg) == ZERO_EXTRACT
4911	       || GET_CODE (testreg) == SIGN_EXTRACT
4912	       || GET_CODE (testreg) == SUBREG)
4913	  {
4914	    /* Modifying a single register in an alternate mode
4915	       does not use any of the old value.  But these other
4916	       ways of storing in a register do use the old value.  */
4917	    if (GET_CODE (testreg) == SUBREG
4918		&& !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4919	      ;
4920	    else
4921	      mark_dest = 1;
4922
4923	    testreg = XEXP (testreg, 0);
4924	  }
4925
4926	/* If this is a store into a register,
4927	   recursively scan the value being stored.  */
4928
4929	if ((GET_CODE (testreg) == PARALLEL
4930	     && GET_MODE (testreg) == BLKmode)
4931	    || GET_CODE (testreg) == REG)
4932	  {
4933	    count_reg_references (SET_SRC (x));
4934	    if (mark_dest)
4935	      count_reg_references (SET_DEST (x));
4936	    return;
4937	  }
4938      }
4939      break;
4940
4941    default:
4942      break;
4943    }
4944
4945  /* Recursively scan the operands of this expression.  */
4946
4947  {
4948    register char *fmt = GET_RTX_FORMAT (code);
4949    register int i;
4950
4951    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4952      {
4953	if (fmt[i] == 'e')
4954	  {
4955	    /* Tail recursive case: save a function call level.  */
4956	    if (i == 0)
4957	      {
4958		x = XEXP (x, 0);
4959		goto retry;
4960	      }
4961	    count_reg_references (XEXP (x, i));
4962	  }
4963	else if (fmt[i] == 'E')
4964	  {
4965	    register int j;
4966	    for (j = 0; j < XVECLEN (x, i); j++)
4967	      count_reg_references (XVECEXP (x, i, j));
4968	  }
4969      }
4970  }
4971}
4972
4973/* Recompute register set/reference counts immediately prior to register
4974   allocation.
4975
4976   This avoids problems with set/reference counts changing to/from values
4977   which have special meanings to the register allocators.
4978
4979   Additionally, the reference counts are the primary component used by the
4980   register allocators to prioritize pseudos for allocation to hard regs.
4981   More accurate reference counts generally lead to better register allocation.
4982
4983   F is the first insn to be scanned.
4984   LOOP_STEP denotes how much loop_depth should be incremented per
4985   loop nesting level in order to increase the ref count more for references
4986   in a loop.
4987
4988   It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4989   possibly other information which is used by the register allocators.  */
4990
4991void
4992recompute_reg_usage (f, loop_step)
4993     rtx f;
4994     int loop_step;
4995{
4996  rtx insn;
4997  int i, max_reg;
4998
4999  /* Clear out the old data.  */
5000  max_reg = max_reg_num ();
5001  for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5002    {
5003      REG_N_SETS (i) = 0;
5004      REG_N_REFS (i) = 0;
5005    }
5006
5007  /* Scan each insn in the chain and count how many times each register is
5008     set/used.  */
5009  loop_depth = 1;
5010  for (insn = f; insn; insn = NEXT_INSN (insn))
5011    {
5012      /* Keep track of loop depth.  */
5013      if (GET_CODE (insn) == NOTE)
5014	{
5015	  /* Look for loop boundaries.  */
5016	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5017	    loop_depth -= loop_step;
5018	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5019	    loop_depth += loop_step;
5020
5021	  /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5022	     Abort now rather than setting register status incorrectly.  */
5023	  if (loop_depth == 0)
5024	    abort ();
5025	}
5026      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5027	{
5028	  rtx links;
5029
5030	  /* This call will increment REG_N_SETS for each SET or CLOBBER
5031	     of a register in INSN.  It will also increment REG_N_REFS
5032	     by the loop depth for each set of a register in INSN.  */
5033	  count_reg_sets (PATTERN (insn));
5034
5035	  /* count_reg_sets does not detect autoincrement address modes, so
5036	     detect them here by looking at the notes attached to INSN.  */
5037	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5038	    {
5039	      if (REG_NOTE_KIND (links) == REG_INC)
5040		/* Count (weighted) references, stores, etc.  This counts a
5041		   register twice if it is modified, but that is correct.  */
5042		REG_N_SETS (REGNO (XEXP (links, 0)))++;
5043	    }
5044
5045	  /* This call will increment REG_N_REFS by the current loop depth for
5046	     each reference to a register in INSN.  */
5047	  count_reg_references (PATTERN (insn));
5048
5049	  /* count_reg_references will not include counts for arguments to
5050	     function calls, so detect them here by examining the
5051	     CALL_INSN_FUNCTION_USAGE data.  */
5052	  if (GET_CODE (insn) == CALL_INSN)
5053	    {
5054	      rtx note;
5055
5056	      for (note = CALL_INSN_FUNCTION_USAGE (insn);
5057		   note;
5058		   note = XEXP (note, 1))
5059		if (GET_CODE (XEXP (note, 0)) == USE)
5060		  count_reg_references (SET_DEST (XEXP (note, 0)));
5061	    }
5062	}
5063    }
5064}
5065
5066/* Record INSN's block as BB.  */
5067
5068void
5069set_block_for_insn (insn, bb)
5070     rtx insn;
5071     basic_block bb;
5072{
5073  size_t uid = INSN_UID (insn);
5074  if (uid >= basic_block_for_insn->num_elements)
5075    {
5076      int new_size;
5077
5078      /* Add one-eighth the size so we don't keep calling xrealloc.  */
5079      new_size = uid + (uid + 7) / 8;
5080
5081      VARRAY_GROW (basic_block_for_insn, new_size);
5082    }
5083  VARRAY_BB (basic_block_for_insn, uid) = bb;
5084}
5085
5086/* Record INSN's block number as BB.  */
5087/* ??? This has got to go.  */
5088
5089void
5090set_block_num (insn, bb)
5091     rtx insn;
5092     int bb;
5093{
5094  set_block_for_insn (insn, BASIC_BLOCK (bb));
5095}
5096
5097/* Verify the CFG consistency.  This function check some CFG invariants and
5098   aborts when something is wrong.  Hope that this function will help to
5099   convert many optimization passes to preserve CFG consistent.
5100
5101   Currently it does following checks:
5102
5103   - test head/end pointers
5104   - overlapping of basic blocks
5105   - edge list corectness
5106   - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5107   - tails of basic blocks (ensure that boundary is necesary)
5108   - scans body of the basic block for JUMP_INSN, CODE_LABEL
5109     and NOTE_INSN_BASIC_BLOCK
5110   - check that all insns are in the basic blocks
5111   (except the switch handling code, barriers and notes)
5112
5113   In future it can be extended check a lot of other stuff as well
5114   (reachability of basic blocks, life information, etc. etc.).  */
5115
5116void
5117verify_flow_info ()
5118{
5119  const int max_uid = get_max_uid ();
5120  const rtx rtx_first = get_insns ();
5121  basic_block *bb_info;
5122  rtx x;
5123  int i;
5124
5125  bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
5126  memset (bb_info, 0, max_uid * sizeof (basic_block));
5127
5128  /* First pass check head/end pointers and set bb_info array used by
5129     later passes.  */
5130  for (i = n_basic_blocks - 1; i >= 0; i--)
5131    {
5132      basic_block bb = BASIC_BLOCK (i);
5133
5134      /* Check the head pointer and make sure that it is pointing into
5135         insn list.  */
5136      for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5137	if (x == bb->head)
5138	  break;
5139      if (!x)
5140	{
5141	  fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n",
5142		 INSN_UID (bb->head), bb->index);
5143	}
5144
5145      /* Check the end pointer and make sure that it is pointing into
5146         insn list.  */
5147      for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5148	{
5149	  if (bb_info[INSN_UID (x)] != NULL)
5150	    {
5151	      fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)",
5152		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5153	    }
5154	  bb_info[INSN_UID (x)] = bb;
5155
5156	  if (x == bb->end)
5157	    break;
5158	}
5159      if (!x)
5160	{
5161	  fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n",
5162		 INSN_UID (bb->end), bb->index);
5163	}
5164    }
5165
5166  /* Now check the basic blocks (boundaries etc.) */
5167  for (i = n_basic_blocks - 1; i >= 0; i--)
5168    {
5169      basic_block bb = BASIC_BLOCK (i);
5170      /* Check corectness of edge lists */
5171      edge e;
5172
5173      e = bb->succ;
5174      while (e)
5175	{
5176	  if (e->src != bb)
5177	    {
5178	      fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5179		       bb->index);
5180	      fprintf (stderr, "Predecessor: ");
5181	      dump_edge_info (stderr, e, 0);
5182	      fprintf (stderr, "\nSuccessor: ");
5183	      dump_edge_info (stderr, e, 1);
5184	      fflush (stderr);
5185	      abort ();
5186	    }
5187	  if (e->dest != EXIT_BLOCK_PTR)
5188	    {
5189	      edge e2 = e->dest->pred;
5190	      while (e2 && e2 != e)
5191		e2 = e2->pred_next;
5192	      if (!e2)
5193		{
5194		  fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5195			 bb->index);
5196		}
5197	    }
5198	  e = e->succ_next;
5199	}
5200
5201      e = bb->pred;
5202      while (e)
5203	{
5204	  if (e->dest != bb)
5205	    {
5206	      fprintf (stderr, "verify_flow_info: Basic block %d pred edge is corrupted\n",
5207		       bb->index);
5208	      fprintf (stderr, "Predecessor: ");
5209	      dump_edge_info (stderr, e, 0);
5210	      fprintf (stderr, "\nSuccessor: ");
5211	      dump_edge_info (stderr, e, 1);
5212	      fflush (stderr);
5213	      abort ();
5214	    }
5215	  if (e->src != ENTRY_BLOCK_PTR)
5216	    {
5217	      edge e2 = e->src->succ;
5218	      while (e2 && e2 != e)
5219		e2 = e2->succ_next;
5220	      if (!e2)
5221		{
5222		  fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5223			 bb->index);
5224		}
5225	    }
5226	  e = e->pred_next;
5227	}
5228
5229      /* OK pointers are correct.  Now check the header of basic
5230         block.  It ought to contain optional CODE_LABEL followed
5231	 by NOTE_BASIC_BLOCK.  */
5232      x = bb->head;
5233      if (GET_CODE (x) == CODE_LABEL)
5234	{
5235	  if (bb->end == x)
5236	    {
5237	      fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n");
5238	    }
5239	  x = NEXT_INSN (x);
5240	}
5241      if (GET_CODE (x) != NOTE
5242	  || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5243	  || NOTE_BASIC_BLOCK (x) != bb)
5244	{
5245	  fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5246		 bb->index);
5247	}
5248
5249      if (bb->end == x)
5250	{
5251	  /* Do checks for empty blocks here */
5252	}
5253      else
5254	{
5255	  x = NEXT_INSN (x);
5256	  while (x)
5257	    {
5258	      if (GET_CODE (x) == NOTE
5259		  && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5260		{
5261		  fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n",
5262			 INSN_UID (x), bb->index);
5263		}
5264
5265	      if (x == bb->end)
5266		break;
5267
5268	      if (GET_CODE (x) == JUMP_INSN
5269		  || GET_CODE (x) == CODE_LABEL
5270		  || GET_CODE (x) == BARRIER)
5271		{
5272		  fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n",
5273			      x, bb->index);
5274		}
5275
5276	      x = NEXT_INSN (x);
5277	    }
5278	}
5279    }
5280
5281  x = rtx_first;
5282  while (x)
5283    {
5284      if (!bb_info[INSN_UID (x)])
5285	{
5286	  switch (GET_CODE (x))
5287	    {
5288	    case BARRIER:
5289	    case NOTE:
5290	      break;
5291
5292	    case CODE_LABEL:
5293	      /* An addr_vec is placed outside any block block.  */
5294	      if (NEXT_INSN (x)
5295		  && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5296		  && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5297		      || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5298		{
5299		  x = NEXT_INSN (x);
5300		}
5301
5302	      /* But in any case, non-deletable labels can appear anywhere.  */
5303	      break;
5304
5305	    default:
5306	      fatal_insn ("verify_flow_info: Insn outside basic block\n", x);
5307	    }
5308	}
5309
5310      x = NEXT_INSN (x);
5311    }
5312}
5313