flow.c revision 52284
1/* Data flow analysis for GNU compiler.
2   Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22/* This file contains the data flow analysis pass of the compiler.  It
23   computes data flow information which tells combine_instructions
24   which insns to consider combining and controls register allocation.
25
26   Additional data flow information that is too bulky to record is
27   generated during the analysis, and is used at that time to create
28   autoincrement and autodecrement addressing.
29
30   The first step is dividing the function into basic blocks.
31   find_basic_blocks does this.  Then life_analysis determines
32   where each register is live and where it is dead.
33
34   ** find_basic_blocks **
35
36   find_basic_blocks divides the current function's rtl into basic
37   blocks and constructs the CFG.  The blocks are recorded in the
38   basic_block_info array; the CFG exists in the edge structures
39   referenced by the blocks.
40
41   find_basic_blocks also finds any unreachable loops and deletes them.
42
43   ** life_analysis **
44
45   life_analysis is called immediately after find_basic_blocks.
46   It uses the basic block information to determine where each
47   hard or pseudo register is live.
48
49   ** live-register info **
50
51   The information about where each register is live is in two parts:
52   the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54   basic_block->global_live_at_start has an element for each basic
55   block, and the element is a bit-vector with a bit for each hard or
56   pseudo register.  The bit is 1 if the register is live at the
57   beginning of the basic block.
58
59   Two types of elements can be added to an insn's REG_NOTES.
60   A REG_DEAD note is added to an insn's REG_NOTES for any register
61   that meets both of two conditions:  The value in the register is not
62   needed in subsequent insns and the insn does not replace the value in
63   the register (in the case of multi-word hard registers, the value in
64   each register must be replaced by the insn to avoid a REG_DEAD note).
65
66   In the vast majority of cases, an object in a REG_DEAD note will be
67   used somewhere in the insn.  The (rare) exception to this is if an
68   insn uses a multi-word hard register and only some of the registers are
69   needed in subsequent insns.  In that case, REG_DEAD notes will be
70   provided for those hard registers that are not subsequently needed.
71   Partial REG_DEAD notes of this type do not occur when an insn sets
72   only some of the hard registers used in such a multi-word operand;
73   omitting REG_DEAD notes for objects stored in an insn is optional and
74   the desire to do so does not justify the complexity of the partial
75   REG_DEAD notes.
76
77   REG_UNUSED notes are added for each register that is set by the insn
78   but is unused subsequently (if every register set by the insn is unused
79   and the insn does not reference memory or have some other side-effect,
80   the insn is deleted instead).  If only part of a multi-word hard
81   register is used in a subsequent insn, REG_UNUSED notes are made for
82   the parts that will not be used.
83
84   To determine which registers are live after any insn, one can
85   start from the beginning of the basic block and scan insns, noting
86   which registers are set by each insn and which die there.
87
88   ** Other actions of life_analysis **
89
90   life_analysis sets up the LOG_LINKS fields of insns because the
91   information needed to do so is readily available.
92
93   life_analysis deletes insns whose only effect is to store a value
94   that is never used.
95
96   life_analysis notices cases where a reference to a register as
97   a memory address can be combined with a preceding or following
98   incrementation or decrementation of the register.  The separate
99   instruction to increment or decrement is deleted and the address
100   is changed to a POST_INC or similar rtx.
101
102   Each time an incrementing or decrementing address is created,
103   a REG_INC element is added to the insn's REG_NOTES list.
104
105   life_analysis fills in certain vectors containing information about
106   register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107   reg_n_calls_crosses and reg_basic_block.
108
109   life_analysis sets current_function_sp_is_unchanging if the function
110   doesn't modify the stack pointer.  */
111
112/* TODO:
113
114   Split out from life_analysis:
115	- local property discovery (bb->local_live, bb->local_set)
116	- global property computation
117	- log links creation
118	- pre/post modify transformation
119*/
120
121#include "config.h"
122#include "system.h"
123#include "rtl.h"
124#include "basic-block.h"
125#include "insn-config.h"
126#include "regs.h"
127#include "hard-reg-set.h"
128#include "flags.h"
129#include "output.h"
130#include "except.h"
131#include "toplev.h"
132#include "recog.h"
133#include "insn-flags.h"
134
135#include "obstack.h"
136#define obstack_chunk_alloc xmalloc
137#define obstack_chunk_free free
138
139
140/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
141   the stack pointer does not matter.  The value is tested only in
142   functions that have frame pointers.
143   No definition is equivalent to always zero.  */
144#ifndef EXIT_IGNORE_STACK
145#define EXIT_IGNORE_STACK 0
146#endif
147
148
149/* The contents of the current function definition are allocated
150   in this obstack, and all are freed at the end of the function.
151   For top-level functions, this is temporary_obstack.
152   Separate obstacks are made for nested functions.  */
153
154extern struct obstack *function_obstack;
155
156/* List of labels that must never be deleted.  */
157extern rtx forced_labels;
158
159/* Number of basic blocks in the current function.  */
160
161int n_basic_blocks;
162
163/* The basic block array.  */
164
165varray_type basic_block_info;
166
167/* The special entry and exit blocks.  */
168
169struct basic_block_def entry_exit_blocks[2] =
170{
171  {
172    NULL,			/* head */
173    NULL,			/* end */
174    NULL,			/* pred */
175    NULL,			/* succ */
176    NULL,			/* local_set */
177    NULL,			/* global_live_at_start */
178    NULL,			/* global_live_at_end */
179    NULL,			/* aux */
180    ENTRY_BLOCK,		/* index */
181    0				/* loop_depth */
182  },
183  {
184    NULL,			/* head */
185    NULL,			/* end */
186    NULL,			/* pred */
187    NULL,			/* succ */
188    NULL,			/* local_set */
189    NULL,			/* global_live_at_start */
190    NULL,			/* global_live_at_end */
191    NULL,			/* aux */
192    EXIT_BLOCK,			/* index */
193    0				/* loop_depth */
194  }
195};
196
197/* Nonzero if the second flow pass has completed.  */
198int flow2_completed;
199
200/* Maximum register number used in this function, plus one.  */
201
202int max_regno;
203
204/* Indexed by n, giving various register information */
205
206varray_type reg_n_info;
207
208/* Size of the reg_n_info table.  */
209
210unsigned int reg_n_max;
211
212/* Element N is the next insn that uses (hard or pseudo) register number N
213   within the current basic block; or zero, if there is no such insn.
214   This is valid only during the final backward scan in propagate_block.  */
215
216static rtx *reg_next_use;
217
218/* Size of a regset for the current function,
219   in (1) bytes and (2) elements.  */
220
221int regset_bytes;
222int regset_size;
223
224/* Regset of regs live when calls to `setjmp'-like functions happen.  */
225/* ??? Does this exist only for the setjmp-clobbered warning message?  */
226
227regset regs_live_at_setjmp;
228
229/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
230   that have to go in the same hard reg.
231   The first two regs in the list are a pair, and the next two
232   are another pair, etc.  */
233rtx regs_may_share;
234
235/* Depth within loops of basic block being scanned for lifetime analysis,
236   plus one.  This is the weight attached to references to registers.  */
237
238static int loop_depth;
239
240/* During propagate_block, this is non-zero if the value of CC0 is live.  */
241
242static int cc0_live;
243
244/* During propagate_block, this contains a list of all the MEMs we are
245   tracking for dead store elimination.
246
247   ?!? Note we leak memory by not free-ing items on this list.  We need to
248   write some generic routines to operate on memory lists since cse, gcse,
249   loop, sched, flow and possibly other passes all need to do basically the
250   same operations on these lists.  */
251
252static rtx mem_set_list;
253
254/* Set of registers that may be eliminable.  These are handled specially
255   in updating regs_ever_live.  */
256
257static HARD_REG_SET elim_reg_set;
258
259/* The basic block structure for every insn, indexed by uid.  */
260
261varray_type basic_block_for_insn;
262
263/* The labels mentioned in non-jump rtl.  Valid during find_basic_blocks.  */
264/* ??? Should probably be using LABEL_NUSES instead.  It would take a
265   bit of surgery to be able to use or co-opt the routines in jump.  */
266
267static rtx label_value_list;
268
269/* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile.  */
270
271#define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
272#define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
273static bitmap uid_volatile;
274
275/* Forward declarations */
276static int count_basic_blocks		PROTO((rtx));
277static rtx find_basic_blocks_1		PROTO((rtx, rtx*));
278static void create_basic_block		PROTO((int, rtx, rtx, rtx));
279static void compute_bb_for_insn		PROTO((varray_type, int));
280static void clear_edges			PROTO((void));
281static void make_edges			PROTO((rtx, rtx*));
282static void make_edge			PROTO((basic_block, basic_block, int));
283static void make_label_edge		PROTO((basic_block, rtx, int));
284static void mark_critical_edges		PROTO((void));
285
286static void commit_one_edge_insertion	PROTO((edge));
287
288static void delete_unreachable_blocks	PROTO((void));
289static void delete_eh_regions		PROTO((void));
290static int can_delete_note_p		PROTO((rtx));
291static void delete_insn_chain		PROTO((rtx, rtx));
292static int delete_block			PROTO((basic_block));
293static void expunge_block		PROTO((basic_block));
294static rtx flow_delete_insn		PROTO((rtx));
295static int can_delete_label_p		PROTO((rtx));
296static void merge_blocks_nomove		PROTO((basic_block, basic_block));
297static int merge_blocks			PROTO((edge,basic_block,basic_block));
298static void tidy_fallthru_edge		PROTO((edge,basic_block,basic_block));
299static void calculate_loop_depth	PROTO((rtx));
300
301static int set_noop_p			PROTO((rtx));
302static int noop_move_p			PROTO((rtx));
303static void notice_stack_pointer_modification PROTO ((rtx, rtx));
304static void record_volatile_insns	PROTO((rtx));
305static void mark_regs_live_at_end	PROTO((regset));
306static void life_analysis_1		PROTO((rtx, int, int));
307static void init_regset_vector		PROTO ((regset *, int,
308						struct obstack *));
309static void propagate_block		PROTO((regset, rtx, rtx, int,
310					       regset, int, int));
311static int insn_dead_p			PROTO((rtx, regset, int, rtx));
312static int libcall_dead_p		PROTO((rtx, regset, rtx, rtx));
313static void mark_set_regs		PROTO((regset, regset, rtx,
314					       rtx, regset));
315static void mark_set_1			PROTO((regset, regset, rtx,
316					       rtx, regset));
317#ifdef AUTO_INC_DEC
318static void find_auto_inc		PROTO((regset, rtx, rtx));
319static int try_pre_increment_1		PROTO((rtx));
320static int try_pre_increment		PROTO((rtx, rtx, HOST_WIDE_INT));
321#endif
322static void mark_used_regs		PROTO((regset, regset, rtx, int, rtx));
323void dump_flow_info			PROTO((FILE *));
324static void dump_edge_info		PROTO((FILE *, edge, int));
325
326static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
327static int_list_ptr add_int_list_node   PROTO ((int_list_block **,
328						int_list **, int));
329
330static void add_pred_succ		PROTO ((int, int, int_list_ptr *,
331						int_list_ptr *, int *, int *));
332
333static void count_reg_sets_1		PROTO ((rtx));
334static void count_reg_sets		PROTO ((rtx));
335static void count_reg_references	PROTO ((rtx));
336static void notice_stack_pointer_modification PROTO ((rtx, rtx));
337static void invalidate_mems_from_autoinc	PROTO ((rtx));
338void verify_flow_info			PROTO ((void));
339
340/* Find basic blocks of the current function.
341   F is the first insn of the function and NREGS the number of register
342   numbers in use.  */
343
344void
345find_basic_blocks (f, nregs, file, do_cleanup)
346     rtx f;
347     int nregs ATTRIBUTE_UNUSED;
348     FILE *file ATTRIBUTE_UNUSED;
349     int do_cleanup;
350{
351  rtx *bb_eh_end;
352  int max_uid;
353
354  /* Flush out existing data.  */
355  if (basic_block_info != NULL)
356    {
357      int i;
358
359      clear_edges ();
360
361      /* Clear bb->aux on all extant basic blocks.  We'll use this as a
362	 tag for reuse during create_basic_block, just in case some pass
363	 copies around basic block notes improperly.  */
364      for (i = 0; i < n_basic_blocks; ++i)
365	BASIC_BLOCK (i)->aux = NULL;
366
367      VARRAY_FREE (basic_block_info);
368    }
369
370  n_basic_blocks = count_basic_blocks (f);
371
372  /* Size the basic block table.  The actual structures will be allocated
373     by find_basic_blocks_1, since we want to keep the structure pointers
374     stable across calls to find_basic_blocks.  */
375  /* ??? This whole issue would be much simpler if we called find_basic_blocks
376     exactly once, and thereafter we don't have a single long chain of
377     instructions at all until close to the end of compilation when we
378     actually lay them out.  */
379
380  VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
381
382  /* An array to record the active exception region at the end of each
383     basic block.  It is filled in by find_basic_blocks_1 for make_edges.  */
384  bb_eh_end = (rtx *) alloca (n_basic_blocks * sizeof (rtx));
385
386  label_value_list = find_basic_blocks_1 (f, bb_eh_end);
387
388  /* Record the block to which an insn belongs.  */
389  /* ??? This should be done another way, by which (perhaps) a label is
390     tagged directly with the basic block that it starts.  It is used for
391     more than that currently, but IMO that is the only valid use.  */
392
393  max_uid = get_max_uid ();
394#ifdef AUTO_INC_DEC
395  /* Leave space for insns life_analysis makes in some cases for auto-inc.
396     These cases are rare, so we don't need too much space.  */
397  max_uid += max_uid / 10;
398#endif
399
400  VARRAY_BB_INIT (basic_block_for_insn, max_uid, "basic_block_for_insn");
401  compute_bb_for_insn (basic_block_for_insn, max_uid);
402
403  /* Discover the edges of our cfg.  */
404
405  make_edges (label_value_list, bb_eh_end);
406
407  /* Delete unreachable blocks.  */
408
409  if (do_cleanup)
410    delete_unreachable_blocks ();
411
412  /* Mark critical edges.  */
413
414  mark_critical_edges ();
415
416  /* Discover the loop depth at the start of each basic block to aid
417     register allocation.  */
418  calculate_loop_depth (f);
419
420  /* Kill the data we won't maintain.  */
421  label_value_list = 0;
422
423#ifdef ENABLE_CHECKING
424  verify_flow_info ();
425#endif
426}
427
428/* Count the basic blocks of the function.  */
429
430static int
431count_basic_blocks (f)
432     rtx f;
433{
434  register rtx insn;
435  register RTX_CODE prev_code;
436  register int count = 0;
437  int eh_region = 0;
438  int call_had_abnormal_edge = 0;
439  rtx prev_call = NULL_RTX;
440
441  prev_code = JUMP_INSN;
442  for (insn = f; insn; insn = NEXT_INSN (insn))
443    {
444      register RTX_CODE code = GET_CODE (insn);
445
446      if (code == CODE_LABEL
447	  || (GET_RTX_CLASS (code) == 'i'
448	      && (prev_code == JUMP_INSN
449		  || prev_code == BARRIER
450		  || (prev_code == CALL_INSN && call_had_abnormal_edge))))
451	{
452	  count++;
453
454	  /* If the previous insn was a call that did not create an
455	     abnormal edge, we want to add a nop so that the CALL_INSN
456	     itself is not at basic_block_end.  This allows us to
457	     easily distinguish between normal calls and those which
458	     create abnormal edges in the flow graph.  */
459
460	  if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
461	    {
462	      rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
463	      emit_insn_after (nop, prev_call);
464	    }
465	}
466
467      /* Record whether this call created an edge.  */
468      if (code == CALL_INSN)
469	{
470	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
471	  int region = (note ? XINT (XEXP (note, 0), 0) : 1);
472	  prev_call = insn;
473	  call_had_abnormal_edge = 0;
474
475	  /* If there is a specified EH region, we have an edge.  */
476	  if (eh_region && region > 0)
477	    call_had_abnormal_edge = 1;
478	  else
479	    {
480	      /* If there is a nonlocal goto label and the specified
481		 region number isn't -1, we have an edge. (0 means
482		 no throw, but might have a nonlocal goto).  */
483	      if (nonlocal_goto_handler_labels && region >= 0)
484		call_had_abnormal_edge = 1;
485	    }
486	}
487      else if (code != NOTE)
488	prev_call = NULL_RTX;
489
490      if (code != NOTE)
491	prev_code = code;
492      else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
493	++eh_region;
494      else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
495	--eh_region;
496
497    }
498
499  /* The rest of the compiler works a bit smoother when we don't have to
500     check for the edge case of do-nothing functions with no basic blocks.  */
501  if (count == 0)
502    {
503      emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
504      count = 1;
505    }
506
507  return count;
508}
509
510/* Find all basic blocks of the function whose first insn is F.
511   Store the correct data in the tables that describe the basic blocks,
512   set up the chains of references for each CODE_LABEL, and
513   delete any entire basic blocks that cannot be reached.
514
515   NONLOCAL_LABEL_LIST is a list of non-local labels in the function.  Blocks
516   that are otherwise unreachable may be reachable with a non-local goto.
517
518   BB_EH_END is an array in which we record the list of exception regions
519   active at the end of every basic block.  */
520
521static rtx
522find_basic_blocks_1 (f, bb_eh_end)
523     rtx f;
524     rtx *bb_eh_end;
525{
526  register rtx insn, next;
527  int call_has_abnormal_edge = 0;
528  int i = 0;
529  rtx bb_note = NULL_RTX;
530  rtx eh_list = NULL_RTX;
531  rtx label_value_list = NULL_RTX;
532  rtx head = NULL_RTX;
533  rtx end = NULL_RTX;
534
535  /* We process the instructions in a slightly different way than we did
536     previously.  This is so that we see a NOTE_BASIC_BLOCK after we have
537     closed out the previous block, so that it gets attached at the proper
538     place.  Since this form should be equivalent to the previous,
539     find_basic_blocks_0 continues to use the old form as a check.  */
540
541  for (insn = f; insn; insn = next)
542    {
543      enum rtx_code code = GET_CODE (insn);
544
545      next = NEXT_INSN (insn);
546
547      if (code == CALL_INSN)
548	{
549	  /* Record whether this call created an edge.  */
550	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
551	  int region = (note ? XINT (XEXP (note, 0), 0) : 1);
552	  call_has_abnormal_edge = 0;
553
554	  /* If there is an EH region, we have an edge.  */
555	  if (eh_list && region > 0)
556	    call_has_abnormal_edge = 1;
557	  else
558	    {
559	      /* If there is a nonlocal goto label and the specified
560		 region number isn't -1, we have an edge. (0 means
561		 no throw, but might have a nonlocal goto).  */
562	      if (nonlocal_goto_handler_labels && region >= 0)
563		call_has_abnormal_edge = 1;
564	    }
565	}
566
567      switch (code)
568	{
569	case NOTE:
570	  {
571	    int kind = NOTE_LINE_NUMBER (insn);
572
573	    /* Keep a LIFO list of the currently active exception notes.  */
574	    if (kind == NOTE_INSN_EH_REGION_BEG)
575	      eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
576	    else if (kind == NOTE_INSN_EH_REGION_END)
577	      eh_list = XEXP (eh_list, 1);
578
579	    /* Look for basic block notes with which to keep the
580	       basic_block_info pointers stable.  Unthread the note now;
581	       we'll put it back at the right place in create_basic_block.
582	       Or not at all if we've already found a note in this block.  */
583	    else if (kind == NOTE_INSN_BASIC_BLOCK)
584	      {
585		if (bb_note == NULL_RTX)
586		  bb_note = insn;
587		next = flow_delete_insn (insn);
588	      }
589
590	    break;
591	  }
592
593	case CODE_LABEL:
594	  /* A basic block starts at a label.  If we've closed one off due
595	     to a barrier or some such, no need to do it again.  */
596	  if (head != NULL_RTX)
597	    {
598	      /* While we now have edge lists with which other portions of
599		 the compiler might determine a call ending a basic block
600		 does not imply an abnormal edge, it will be a bit before
601		 everything can be updated.  So continue to emit a noop at
602		 the end of such a block.  */
603	      if (GET_CODE (end) == CALL_INSN)
604		{
605		  rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
606		  end = emit_insn_after (nop, end);
607		}
608
609	      bb_eh_end[i] = eh_list;
610	      create_basic_block (i++, head, end, bb_note);
611	      bb_note = NULL_RTX;
612	    }
613	  head = end = insn;
614	  break;
615
616	case JUMP_INSN:
617	  /* A basic block ends at a jump.  */
618	  if (head == NULL_RTX)
619	    head = insn;
620	  else
621	    {
622	      /* ??? Make a special check for table jumps.  The way this
623		 happens is truely and amazingly gross.  We are about to
624		 create a basic block that contains just a code label and
625		 an addr*vec jump insn.  Worse, an addr_diff_vec creates
626		 its own natural loop.
627
628		 Prevent this bit of brain damage, pasting things together
629		 correctly in make_edges.
630
631		 The correct solution involves emitting the table directly
632		 on the tablejump instruction as a note, or JUMP_LABEL.  */
633
634	      if (GET_CODE (PATTERN (insn)) == ADDR_VEC
635		  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
636		{
637		  head = end = NULL;
638		  n_basic_blocks--;
639		  break;
640		}
641	    }
642	  end = insn;
643	  goto new_bb_inclusive;
644
645	case BARRIER:
646	  /* A basic block ends at a barrier.  It may be that an unconditional
647	     jump already closed the basic block -- no need to do it again.  */
648	  if (head == NULL_RTX)
649	    break;
650
651	  /* While we now have edge lists with which other portions of the
652	     compiler might determine a call ending a basic block does not
653	     imply an abnormal edge, it will be a bit before everything can
654	     be updated.  So continue to emit a noop at the end of such a
655	     block.  */
656	  if (GET_CODE (end) == CALL_INSN)
657	    {
658	      rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
659	      end = emit_insn_after (nop, end);
660	    }
661	  goto new_bb_exclusive;
662
663	case CALL_INSN:
664	  /* A basic block ends at a call that can either throw or
665	     do a non-local goto.  */
666	  if (call_has_abnormal_edge)
667	    {
668	    new_bb_inclusive:
669	      if (head == NULL_RTX)
670		head = insn;
671	      end = insn;
672
673	    new_bb_exclusive:
674	      bb_eh_end[i] = eh_list;
675	      create_basic_block (i++, head, end, bb_note);
676	      head = end = NULL_RTX;
677	      bb_note = NULL_RTX;
678	      break;
679	    }
680	  /* FALLTHRU */
681
682	default:
683	  if (GET_RTX_CLASS (code) == 'i')
684	    {
685	      if (head == NULL_RTX)
686		head = insn;
687	      end = insn;
688	    }
689	  break;
690	}
691
692      if (GET_RTX_CLASS (code) == 'i')
693	{
694	  rtx note;
695
696	  /* Make a list of all labels referred to other than by jumps
697	     (which just don't have the REG_LABEL notes).
698
699	     Make a special exception for labels followed by an ADDR*VEC,
700	     as this would be a part of the tablejump setup code.
701
702	     Make a special exception for the eh_return_stub_label, which
703	     we know isn't part of any otherwise visible control flow.  */
704
705	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
706	    if (REG_NOTE_KIND (note) == REG_LABEL)
707	      {
708	        rtx lab = XEXP (note, 0), next;
709
710		if (lab == eh_return_stub_label)
711		  ;
712		else if ((next = next_nonnote_insn (lab)) != NULL
713			 && GET_CODE (next) == JUMP_INSN
714			 && (GET_CODE (PATTERN (next)) == ADDR_VEC
715			     || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
716		  ;
717		else
718		  label_value_list
719		    = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
720				         label_value_list);
721	      }
722	}
723    }
724
725  if (head != NULL_RTX)
726    {
727      bb_eh_end[i] = eh_list;
728      create_basic_block (i++, head, end, bb_note);
729    }
730
731  if (i != n_basic_blocks)
732    abort ();
733
734  return label_value_list;
735}
736
737/* Create a new basic block consisting of the instructions between
738   HEAD and END inclusive.  Reuses the note and basic block struct
739   in BB_NOTE, if any.  */
740
741static void
742create_basic_block (index, head, end, bb_note)
743     int index;
744     rtx head, end, bb_note;
745{
746  basic_block bb;
747
748  if (bb_note
749      && ! RTX_INTEGRATED_P (bb_note)
750      && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
751      && bb->aux == NULL)
752    {
753      /* If we found an existing note, thread it back onto the chain.  */
754
755      if (GET_CODE (head) == CODE_LABEL)
756	add_insn_after (bb_note, head);
757      else
758	{
759	  add_insn_before (bb_note, head);
760	  head = bb_note;
761	}
762    }
763  else
764    {
765      /* Otherwise we must create a note and a basic block structure.
766	 Since we allow basic block structs in rtl, give the struct
767	 the same lifetime by allocating it off the function obstack
768	 rather than using malloc.  */
769
770      bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
771      memset (bb, 0, sizeof (*bb));
772
773      if (GET_CODE (head) == CODE_LABEL)
774	bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
775      else
776	{
777	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
778	  head = bb_note;
779	}
780      NOTE_BASIC_BLOCK (bb_note) = bb;
781    }
782
783  /* Always include the bb note in the block.  */
784  if (NEXT_INSN (end) == bb_note)
785    end = bb_note;
786
787  bb->head = head;
788  bb->end = end;
789  bb->index = index;
790  BASIC_BLOCK (index) = bb;
791
792  /* Tag the block so that we know it has been used when considering
793     other basic block notes.  */
794  bb->aux = bb;
795}
796
797/* Records the basic block struct in BB_FOR_INSN, for every instruction
798   indexed by INSN_UID.  MAX is the size of the array.  */
799
800static void
801compute_bb_for_insn (bb_for_insn, max)
802     varray_type bb_for_insn;
803     int max;
804{
805  int i;
806
807  for (i = 0; i < n_basic_blocks; ++i)
808    {
809      basic_block bb = BASIC_BLOCK (i);
810      rtx insn, end;
811
812      end = bb->end;
813      insn = bb->head;
814      while (1)
815	{
816	  int uid = INSN_UID (insn);
817	  if (uid < max)
818	    VARRAY_BB (bb_for_insn, uid) = bb;
819	  if (insn == end)
820	    break;
821	  insn = NEXT_INSN (insn);
822	}
823    }
824}
825
826/* Free the memory associated with the edge structures.  */
827
828static void
829clear_edges ()
830{
831  int i;
832  edge n, e;
833
834  for (i = 0; i < n_basic_blocks; ++i)
835    {
836      basic_block bb = BASIC_BLOCK (i);
837
838      for (e = bb->succ; e ; e = n)
839	{
840	  n = e->succ_next;
841	  free (e);
842	}
843
844      bb->succ = 0;
845      bb->pred = 0;
846    }
847
848  for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
849    {
850      n = e->succ_next;
851      free (e);
852    }
853
854  ENTRY_BLOCK_PTR->succ = 0;
855  EXIT_BLOCK_PTR->pred = 0;
856}
857
858/* Identify the edges between basic blocks.
859
860   NONLOCAL_LABEL_LIST is a list of non-local labels in the function.  Blocks
861   that are otherwise unreachable may be reachable with a non-local goto.
862
863   BB_EH_END is an array indexed by basic block number in which we record
864   the list of exception regions active at the end of the basic block.  */
865
866static void
867make_edges (label_value_list, bb_eh_end)
868     rtx label_value_list;
869     rtx *bb_eh_end;
870{
871  int i;
872
873  /* Assume no computed jump; revise as we create edges.  */
874  current_function_has_computed_jump = 0;
875
876  /* By nature of the way these get numbered, block 0 is always the entry.  */
877  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
878
879  for (i = 0; i < n_basic_blocks; ++i)
880    {
881      basic_block bb = BASIC_BLOCK (i);
882      rtx insn, x, eh_list;
883      enum rtx_code code;
884      int force_fallthru = 0;
885
886      /* If we have asynchronous exceptions, scan the notes for all exception
887	 regions active in the block.  In the normal case, we only need the
888	 one active at the end of the block, which is bb_eh_end[i].  */
889
890      eh_list = bb_eh_end[i];
891      if (asynchronous_exceptions)
892	{
893	  for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
894	    {
895	      if (GET_CODE (insn) == NOTE
896		  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
897		eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
898	    }
899	}
900
901      /* Now examine the last instruction of the block, and discover the
902	 ways we can leave the block.  */
903
904      insn = bb->end;
905      code = GET_CODE (insn);
906
907      /* A branch.  */
908      if (code == JUMP_INSN)
909	{
910	  rtx tmp;
911
912	  /* ??? Recognize a tablejump and do the right thing.  */
913	  if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
914	      && (tmp = NEXT_INSN (tmp)) != NULL_RTX
915	      && GET_CODE (tmp) == JUMP_INSN
916	      && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
917		  || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
918	    {
919	      rtvec vec;
920	      int j;
921
922	      if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
923		vec = XVEC (PATTERN (tmp), 0);
924	      else
925		vec = XVEC (PATTERN (tmp), 1);
926
927	      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
928		make_label_edge (bb, XEXP (RTVEC_ELT (vec, j), 0), 0);
929
930	      /* Some targets (eg, ARM) emit a conditional jump that also
931		 contains the out-of-range target.  Scan for these and
932		 add an edge if necessary.  */
933	      if ((tmp = single_set (insn)) != NULL
934		  && SET_DEST (tmp) == pc_rtx
935		  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
936		  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
937		make_label_edge (bb, XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
938
939#ifdef CASE_DROPS_THROUGH
940	      /* Silly VAXen.  The ADDR_VEC is going to be in the way of
941		 us naturally detecting fallthru into the next block.  */
942	      force_fallthru = 1;
943#endif
944	    }
945
946	  /* If this is a computed jump, then mark it as reaching
947	     everything on the label_value_list and forced_labels list.  */
948	  else if (computed_jump_p (insn))
949	    {
950	      current_function_has_computed_jump = 1;
951
952	      for (x = label_value_list; x; x = XEXP (x, 1))
953		make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
954
955	      for (x = forced_labels; x; x = XEXP (x, 1))
956		make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
957	    }
958
959	  /* Returns create an exit out.  */
960	  else if (returnjump_p (insn))
961	    make_edge (bb, EXIT_BLOCK_PTR, 0);
962
963	  /* Otherwise, we have a plain conditional or unconditional jump.  */
964	  else
965	    {
966	      if (! JUMP_LABEL (insn))
967		abort ();
968	      make_label_edge (bb, JUMP_LABEL (insn), 0);
969	    }
970	}
971
972      /* If this is a CALL_INSN, then mark it as reaching the active EH
973	 handler for this CALL_INSN.  If we're handling asynchronous
974	 exceptions then any insn can reach any of the active handlers.
975
976	 Also mark the CALL_INSN as reaching any nonlocal goto handler.  */
977
978      if (code == CALL_INSN || asynchronous_exceptions)
979	{
980	  int is_call = (code == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
981	  handler_info *ptr;
982
983	  /* Use REG_EH_RETHROW and REG_EH_REGION if available.  */
984	  /* ??? REG_EH_REGION is not generated presently.  Is it
985	     inteded that there be multiple notes for the regions?
986	     or is my eh_list collection redundant with handler linking?  */
987
988	  x = find_reg_note (insn, REG_EH_RETHROW, 0);
989	  if (!x)
990	    x = find_reg_note (insn, REG_EH_REGION, 0);
991	  if (x)
992	    {
993	      if (XINT (XEXP (x, 0), 0) > 0)
994		{
995		  ptr = get_first_handler (XINT (XEXP (x, 0), 0));
996		  while (ptr)
997		    {
998		      make_label_edge (bb, ptr->handler_label,
999				       EDGE_ABNORMAL | EDGE_EH | is_call);
1000		      ptr = ptr->next;
1001		    }
1002		}
1003	    }
1004	  else
1005	    {
1006	      for (x = eh_list; x; x = XEXP (x, 1))
1007		{
1008		  ptr = get_first_handler (NOTE_BLOCK_NUMBER (XEXP (x, 0)));
1009		  while (ptr)
1010		    {
1011		      make_label_edge (bb, ptr->handler_label,
1012				       EDGE_ABNORMAL | EDGE_EH | is_call);
1013		      ptr = ptr->next;
1014		    }
1015		}
1016	    }
1017
1018	  if (code == CALL_INSN && nonlocal_goto_handler_labels)
1019	    {
1020	      /* ??? This could be made smarter: in some cases it's possible
1021		 to tell that certain calls will not do a nonlocal goto.
1022
1023		 For example, if the nested functions that do the nonlocal
1024		 gotos do not have their addresses taken, then only calls to
1025		 those functions or to other nested functions that use them
1026		 could possibly do nonlocal gotos.  */
1027
1028	      for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1029	        make_label_edge (bb, XEXP (x, 0),
1030			         EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1031	    }
1032	}
1033
1034      /* We know something about the structure of the function __throw in
1035	 libgcc2.c.  It is the only function that ever contains eh_stub
1036	 labels.  It modifies its return address so that the last block
1037	 returns to one of the eh_stub labels within it.  So we have to
1038	 make additional edges in the flow graph.  */
1039      if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1040	make_label_edge (bb, eh_return_stub_label, EDGE_EH);
1041
1042      /* Find out if we can drop through to the next block.  */
1043      insn = next_nonnote_insn (insn);
1044      if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1045	make_edge (bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1046      else if (i + 1 < n_basic_blocks)
1047	{
1048	  rtx tmp = BLOCK_HEAD (i + 1);
1049	  if (GET_CODE (tmp) == NOTE)
1050	    tmp = next_nonnote_insn (tmp);
1051	  if (force_fallthru || insn == tmp)
1052	    make_edge (bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1053	}
1054    }
1055}
1056
1057/* Create an edge between two basic blocks.  FLAGS are auxiliary information
1058   about the edge that is accumulated between calls.  */
1059
1060static void
1061make_edge (src, dst, flags)
1062     basic_block src, dst;
1063     int flags;
1064{
1065  edge e;
1066
1067  /* Make sure we don't add duplicate edges.  */
1068
1069  for (e = src->succ; e ; e = e->succ_next)
1070    if (e->dest == dst)
1071      {
1072	e->flags |= flags;
1073	return;
1074      }
1075
1076  e = (edge) xcalloc (1, sizeof (*e));
1077
1078  e->succ_next = src->succ;
1079  e->pred_next = dst->pred;
1080  e->src = src;
1081  e->dest = dst;
1082  e->flags = flags;
1083
1084  src->succ = e;
1085  dst->pred = e;
1086}
1087
1088/* Create an edge from a basic block to a label.  */
1089
1090static void
1091make_label_edge (src, label, flags)
1092     basic_block src;
1093     rtx label;
1094     int flags;
1095{
1096  if (GET_CODE (label) != CODE_LABEL)
1097    abort ();
1098
1099  /* If the label was never emitted, this insn is junk, but avoid a
1100     crash trying to refer to BLOCK_FOR_INSN (label).  This can happen
1101     as a result of a syntax error and a diagnostic has already been
1102     printed.  */
1103
1104  if (INSN_UID (label) == 0)
1105    return;
1106
1107  make_edge (src, BLOCK_FOR_INSN (label), flags);
1108}
1109
1110/* Identify critical edges and set the bits appropriately.  */
1111static void
1112mark_critical_edges ()
1113{
1114  int i, n = n_basic_blocks;
1115  basic_block bb;
1116
1117  /* We begin with the entry block.  This is not terribly important now,
1118     but could be if a front end (Fortran) implemented alternate entry
1119     points.  */
1120  bb = ENTRY_BLOCK_PTR;
1121  i = -1;
1122
1123  while (1)
1124    {
1125      edge e;
1126
1127      /* (1) Critical edges must have a source with multiple successors.  */
1128      if (bb->succ && bb->succ->succ_next)
1129	{
1130	  for (e = bb->succ; e ; e = e->succ_next)
1131	    {
1132	      /* (2) Critical edges must have a destination with multiple
1133		 predecessors.  Note that we know there is at least one
1134		 predecessor -- the edge we followed to get here.  */
1135	      if (e->dest->pred->pred_next)
1136		e->flags |= EDGE_CRITICAL;
1137	      else
1138		e->flags &= ~EDGE_CRITICAL;
1139	    }
1140	}
1141      else
1142	{
1143	  for (e = bb->succ; e ; e = e->succ_next)
1144	    e->flags &= ~EDGE_CRITICAL;
1145	}
1146
1147      if (++i >= n)
1148	break;
1149      bb = BASIC_BLOCK (i);
1150    }
1151}
1152
1153/* Split a (typically critical) edge.  Return the new block.
1154   Abort on abnormal edges.
1155
1156   ??? The code generally expects to be called on critical edges.
1157   The case of a block ending in an unconditional jump to a
1158   block with multiple predecessors is not handled optimally.  */
1159
1160basic_block
1161split_edge (edge_in)
1162     edge edge_in;
1163{
1164  basic_block old_pred, bb, old_succ;
1165  edge edge_out;
1166  rtx bb_note;
1167  int i;
1168
1169  /* Abnormal edges cannot be split.  */
1170  if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1171    abort ();
1172
1173  old_pred = edge_in->src;
1174  old_succ = edge_in->dest;
1175
1176  /* Remove the existing edge from the destination's pred list.  */
1177  {
1178    edge *pp;
1179    for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1180      continue;
1181    *pp = edge_in->pred_next;
1182    edge_in->pred_next = NULL;
1183  }
1184
1185  /* Create the new structures.  */
1186  bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1187  edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1188
1189  memset (bb, 0, sizeof (*bb));
1190  bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
1191  bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1192  bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1193
1194  /* ??? This info is likely going to be out of date very soon.  */
1195  CLEAR_REG_SET (bb->local_set);
1196  if (old_succ->global_live_at_start)
1197    {
1198      COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1199      COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1200    }
1201  else
1202    {
1203      CLEAR_REG_SET (bb->global_live_at_start);
1204      CLEAR_REG_SET (bb->global_live_at_end);
1205    }
1206
1207  /* Wire them up.  */
1208  bb->pred = edge_in;
1209  bb->succ = edge_out;
1210
1211  edge_in->dest = bb;
1212  edge_in->flags &= ~EDGE_CRITICAL;
1213
1214  edge_out->pred_next = old_succ->pred;
1215  edge_out->succ_next = NULL;
1216  edge_out->src = bb;
1217  edge_out->dest = old_succ;
1218  edge_out->flags = EDGE_FALLTHRU;
1219  edge_out->probability = REG_BR_PROB_BASE;
1220
1221  old_succ->pred = edge_out;
1222
1223  /* Tricky case -- if there existed a fallthru into the successor
1224     (and we're not it) we must add a new unconditional jump around
1225     the new block we're actually interested in.
1226
1227     Further, if that edge is critical, this means a second new basic
1228     block must be created to hold it.  In order to simplify correct
1229     insn placement, do this before we touch the existing basic block
1230     ordering for the block we were really wanting.  */
1231  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1232    {
1233      edge e;
1234      for (e = edge_out->pred_next; e ; e = e->pred_next)
1235	if (e->flags & EDGE_FALLTHRU)
1236	  break;
1237
1238      if (e)
1239	{
1240	  basic_block jump_block;
1241	  rtx pos;
1242
1243	  if ((e->flags & EDGE_CRITICAL) == 0)
1244	    {
1245	      /* Non critical -- we can simply add a jump to the end
1246		 of the existing predecessor.  */
1247	      jump_block = e->src;
1248	    }
1249	  else
1250	    {
1251	      /* We need a new block to hold the jump.  The simplest
1252	         way to do the bulk of the work here is to recursively
1253	         call ourselves.  */
1254	      jump_block = split_edge (e);
1255	      e = jump_block->succ;
1256	    }
1257
1258	  /* Now add the jump insn ...  */
1259	  pos = emit_jump_insn_after (gen_jump (old_succ->head),
1260				      jump_block->end);
1261	  jump_block->end = pos;
1262	  emit_barrier_after (pos);
1263
1264	  /* ... let jump know that label is in use, ...  */
1265	  ++LABEL_NUSES (old_succ->head);
1266
1267	  /* ... and clear fallthru on the outgoing edge.  */
1268	  e->flags &= ~EDGE_FALLTHRU;
1269
1270	  /* Continue splitting the interesting edge.  */
1271	}
1272    }
1273
1274  /* Place the new block just in front of the successor.  */
1275  VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1276  for (i = n_basic_blocks - 1; i > old_succ->index; --i)
1277    {
1278      basic_block tmp = BASIC_BLOCK (i - 1);
1279      BASIC_BLOCK (i) = tmp;
1280      tmp->index = i;
1281    }
1282  BASIC_BLOCK (i) = bb;
1283  bb->index = i;
1284
1285  /* Create the basic block note.  */
1286  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1287  NOTE_BASIC_BLOCK (bb_note) = bb;
1288  bb->head = bb->end = bb_note;
1289
1290  /* Not quite simple -- for non-fallthru edges, we must adjust the
1291     predecessor's jump instruction to target our new block.  */
1292  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1293    {
1294      rtx tmp, insn = old_pred->end;
1295      rtx old_label = old_succ->head;
1296      rtx new_label = gen_label_rtx ();
1297
1298      if (GET_CODE (insn) != JUMP_INSN)
1299	abort ();
1300
1301      /* ??? Recognize a tablejump and adjust all matching cases.  */
1302      if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1303	  && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1304	  && GET_CODE (tmp) == JUMP_INSN
1305	  && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1306	      || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1307	{
1308	  rtvec vec;
1309	  int j;
1310
1311	  if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1312	    vec = XVEC (PATTERN (tmp), 0);
1313	  else
1314	    vec = XVEC (PATTERN (tmp), 1);
1315
1316	  for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1317	    if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1318	      {
1319	        RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1320		--LABEL_NUSES (old_label);
1321		++LABEL_NUSES (new_label);
1322	      }
1323	}
1324      else
1325	{
1326	  /* This would have indicated an abnormal edge.  */
1327	  if (computed_jump_p (insn))
1328	    abort ();
1329
1330	  /* A return instruction can't be redirected.  */
1331	  if (returnjump_p (insn))
1332	    abort ();
1333
1334	  /* If the insn doesn't go where we think, we're confused.  */
1335	  if (JUMP_LABEL (insn) != old_label)
1336	    abort ();
1337
1338	  redirect_jump (insn, new_label);
1339	}
1340
1341      emit_label_before (new_label, bb_note);
1342      bb->head = new_label;
1343    }
1344
1345  return bb;
1346}
1347
1348/* Queue instructions for insertion on an edge between two basic blocks.
1349   The new instructions and basic blocks (if any) will not appear in the
1350   CFG until commit_edge_insertions is called.  */
1351
1352void
1353insert_insn_on_edge (pattern, e)
1354     rtx pattern;
1355     edge e;
1356{
1357  /* We cannot insert instructions on an abnormal critical edge.
1358     It will be easier to find the culprit if we die now.  */
1359  if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1360      == (EDGE_ABNORMAL|EDGE_CRITICAL))
1361    abort ();
1362
1363  if (e->insns == NULL_RTX)
1364    start_sequence ();
1365  else
1366    push_to_sequence (e->insns);
1367
1368  emit_insn (pattern);
1369
1370  e->insns = get_insns ();
1371  end_sequence();
1372}
1373
1374/* Update the CFG for the instructions queued on edge E.  */
1375
1376static void
1377commit_one_edge_insertion (e)
1378     edge e;
1379{
1380  rtx before = NULL_RTX, after = NULL_RTX, tmp;
1381  basic_block bb;
1382
1383  /* Figure out where to put these things.  If the destination has
1384     one predecessor, insert there.  Except for the exit block.  */
1385  if (e->dest->pred->pred_next == NULL
1386      && e->dest != EXIT_BLOCK_PTR)
1387    {
1388      bb = e->dest;
1389
1390      /* Get the location correct wrt a code label, and "nice" wrt
1391	 a basic block note, and before everything else.  */
1392      tmp = bb->head;
1393      if (GET_CODE (tmp) == CODE_LABEL)
1394	tmp = NEXT_INSN (tmp);
1395      if (GET_CODE (tmp) == NOTE
1396	  && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1397	tmp = NEXT_INSN (tmp);
1398      if (tmp == bb->head)
1399	before = tmp;
1400      else
1401	after = PREV_INSN (tmp);
1402    }
1403
1404  /* If the source has one successor and the edge is not abnormal,
1405     insert there.  Except for the entry block.  */
1406  else if ((e->flags & EDGE_ABNORMAL) == 0
1407	   && e->src->succ->succ_next == NULL
1408	   && e->src != ENTRY_BLOCK_PTR)
1409    {
1410      bb = e->src;
1411      if (GET_CODE (bb->end) == JUMP_INSN)
1412	{
1413	  /* ??? Is it possible to wind up with non-simple jumps?  Perhaps
1414	     a jump with delay slots already filled?  */
1415	  if (! simplejump_p (bb->end))
1416	    abort ();
1417
1418	  before = bb->end;
1419	}
1420      else
1421	{
1422	  /* We'd better be fallthru, or we've lost track of what's what.  */
1423	  if ((e->flags & EDGE_FALLTHRU) == 0)
1424	    abort ();
1425
1426	  after = bb->end;
1427	}
1428    }
1429
1430  /* Otherwise we must split the edge.  */
1431  else
1432    {
1433      bb = split_edge (e);
1434      after = bb->end;
1435    }
1436
1437  /* Now that we've found the spot, do the insertion.  */
1438  tmp = e->insns;
1439  e->insns = NULL_RTX;
1440  if (before)
1441    {
1442      emit_insns_before (tmp, before);
1443      if (before == bb->head)
1444	bb->head = before;
1445    }
1446  else
1447    {
1448      tmp = emit_insns_after (tmp, after);
1449      if (after == bb->end)
1450	bb->end = tmp;
1451    }
1452}
1453
1454/* Update the CFG for all queued instructions.  */
1455
1456void
1457commit_edge_insertions ()
1458{
1459  int i;
1460  basic_block bb;
1461
1462  i = -1;
1463  bb = ENTRY_BLOCK_PTR;
1464  while (1)
1465    {
1466      edge e, next;
1467
1468      for (e = bb->succ; e ; e = next)
1469	{
1470	  next = e->succ_next;
1471	  if (e->insns)
1472	    commit_one_edge_insertion (e);
1473	}
1474
1475      if (++i >= n_basic_blocks)
1476	break;
1477      bb = BASIC_BLOCK (i);
1478    }
1479}
1480
1481/* Delete all unreachable basic blocks.   */
1482
1483static void
1484delete_unreachable_blocks ()
1485{
1486  basic_block *worklist, *tos;
1487  int deleted_handler;
1488  edge e;
1489  int i, n;
1490
1491  n = n_basic_blocks;
1492  tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1493
1494  /* Use basic_block->aux as a marker.  Clear them all.  */
1495
1496  for (i = 0; i < n; ++i)
1497    BASIC_BLOCK (i)->aux = NULL;
1498
1499  /* Add our starting points to the worklist.  Almost always there will
1500     be only one.  It isn't inconcievable that we might one day directly
1501     support Fortran alternate entry points.  */
1502
1503  for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1504    {
1505      *tos++ = e->dest;
1506
1507      /* Mark the block with a handy non-null value.  */
1508      e->dest->aux = e;
1509    }
1510
1511  /* Iterate: find everything reachable from what we've already seen.  */
1512
1513  while (tos != worklist)
1514    {
1515      basic_block b = *--tos;
1516
1517      for (e = b->succ; e ; e = e->succ_next)
1518	if (!e->dest->aux)
1519	  {
1520	    *tos++ = e->dest;
1521	    e->dest->aux = e;
1522	  }
1523    }
1524
1525  /* Delete all unreachable basic blocks.  Count down so that we don't
1526     interfere with the block renumbering that happens in delete_block.  */
1527
1528  deleted_handler = 0;
1529
1530  for (i = n - 1; i >= 0; --i)
1531    {
1532      basic_block b = BASIC_BLOCK (i);
1533
1534      if (b->aux != NULL)
1535	/* This block was found.  Tidy up the mark.  */
1536	b->aux = NULL;
1537      else
1538	deleted_handler |= delete_block (b);
1539    }
1540
1541  /* Fix up edges that now fall through, or rather should now fall through
1542     but previously required a jump around now deleted blocks.  Simplify
1543     the search by only examining blocks numerically adjacent, since this
1544     is how find_basic_blocks created them.  */
1545
1546  for (i = 1; i < n_basic_blocks; ++i)
1547    {
1548      basic_block b = BASIC_BLOCK (i - 1);
1549      basic_block c = BASIC_BLOCK (i);
1550      edge s;
1551
1552      /* We care about simple conditional or unconditional jumps with
1553	 a single successor.
1554
1555	 If we had a conditional branch to the next instruction when
1556	 find_basic_blocks was called, then there will only be one
1557	 out edge for the block which ended with the conditional
1558	 branch (since we do not create duplicate edges).
1559
1560	 Furthermore, the edge will be marked as a fallthru because we
1561	 merge the flags for the duplicate edges.  So we do not want to
1562	 check that the edge is not a FALLTHRU edge.  */
1563      if ((s = b->succ) != NULL
1564	  && s->succ_next == NULL
1565	  && s->dest == c
1566	  /* If the last insn is not a normal conditional jump
1567	     (or an unconditional jump), then we can not tidy the
1568	     fallthru edge because we can not delete the jump.  */
1569	  && GET_CODE (b->end) == JUMP_INSN
1570	  && condjump_p (b->end))
1571	tidy_fallthru_edge (s, b, c);
1572    }
1573
1574  /* Attempt to merge blocks as made possible by edge removal.  If a block
1575     has only one successor, and the successor has only one predecessor,
1576     they may be combined.  */
1577
1578  for (i = 0; i < n_basic_blocks; )
1579    {
1580      basic_block c, b = BASIC_BLOCK (i);
1581      edge s;
1582
1583      /* A loop because chains of blocks might be combineable.  */
1584      while ((s = b->succ) != NULL
1585	     && s->succ_next == NULL
1586	     && (s->flags & EDGE_EH) == 0
1587	     && (c = s->dest) != EXIT_BLOCK_PTR
1588	     && c->pred->pred_next == NULL
1589	     /* If the last insn is not a normal conditional jump
1590		(or an unconditional jump), then we can not merge
1591		the blocks because we can not delete the jump.  */
1592	     && GET_CODE (b->end) == JUMP_INSN
1593	     && condjump_p (b->end)
1594	     && merge_blocks (s, b, c))
1595	continue;
1596
1597      /* Don't get confused by the index shift caused by deleting blocks.  */
1598      i = b->index + 1;
1599    }
1600
1601  /* If we deleted an exception handler, we may have EH region begin/end
1602     blocks to remove as well. */
1603  if (deleted_handler)
1604    delete_eh_regions ();
1605}
1606
1607/* Find EH regions for which there is no longer a handler, and delete them.  */
1608
1609static void
1610delete_eh_regions ()
1611{
1612  rtx insn;
1613
1614  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1615    if (GET_CODE (insn) == NOTE)
1616      {
1617	if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1618	    (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1619	  {
1620	    int num = CODE_LABEL_NUMBER (insn);
1621	    /* A NULL handler indicates a region is no longer needed */
1622	    if (get_first_handler (num) == NULL)
1623	      {
1624		NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1625		NOTE_SOURCE_FILE (insn) = 0;
1626	      }
1627	  }
1628      }
1629}
1630
1631/* Return true if NOTE is not one of the ones that must be kept paired,
1632   so that we may simply delete them.  */
1633
1634static int
1635can_delete_note_p (note)
1636     rtx note;
1637{
1638  return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1639	  || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1640}
1641
1642/* Unlink a chain of insns between START and FINISH, leaving notes
1643   that must be paired.  */
1644
1645static void
1646delete_insn_chain (start, finish)
1647     rtx start, finish;
1648{
1649  /* Unchain the insns one by one.  It would be quicker to delete all
1650     of these with a single unchaining, rather than one at a time, but
1651     we need to keep the NOTE's.  */
1652
1653  rtx next;
1654
1655  while (1)
1656    {
1657      next = NEXT_INSN (start);
1658      if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1659	;
1660      else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1661	;
1662      else
1663	next = flow_delete_insn (start);
1664
1665      if (start == finish)
1666	break;
1667      start = next;
1668    }
1669}
1670
1671/* Delete the insns in a (non-live) block.  We physically delete every
1672   non-deleted-note insn, and update the flow graph appropriately.
1673
1674   Return nonzero if we deleted an exception handler.  */
1675
1676/* ??? Preserving all such notes strikes me as wrong.  It would be nice
1677   to post-process the stream to remove empty blocks, loops, ranges, etc.  */
1678
1679static int
1680delete_block (b)
1681     basic_block b;
1682{
1683  int deleted_handler = 0;
1684  rtx insn, end;
1685
1686  /* If the head of this block is a CODE_LABEL, then it might be the
1687     label for an exception handler which can't be reached.
1688
1689     We need to remove the label from the exception_handler_label list
1690     and remove the associated NOTE_EH_REGION_BEG and NOTE_EH_REGION_END
1691     notes.  */
1692
1693  insn = b->head;
1694
1695  if (GET_CODE (insn) == CODE_LABEL)
1696    {
1697      rtx x, *prev = &exception_handler_labels;
1698
1699      for (x = exception_handler_labels; x; x = XEXP (x, 1))
1700	{
1701	  if (XEXP (x, 0) == insn)
1702	    {
1703	      /* Found a match, splice this label out of the EH label list.  */
1704	      *prev = XEXP (x, 1);
1705	      XEXP (x, 1) = NULL_RTX;
1706	      XEXP (x, 0) = NULL_RTX;
1707
1708	      /* Remove the handler from all regions */
1709	      remove_handler (insn);
1710	      deleted_handler = 1;
1711	      break;
1712	    }
1713	  prev = &XEXP (x, 1);
1714	}
1715
1716      /* This label may be referenced by code solely for its value, or
1717	 referenced by static data, or something.  We have determined
1718	 that it is not reachable, but cannot delete the label itself.
1719	 Save code space and continue to delete the balance of the block,
1720	 along with properly updating the cfg.  */
1721      if (!can_delete_label_p (insn))
1722	{
1723	  /* If we've only got one of these, skip the whole deleting
1724	     insns thing.  */
1725	  if (insn == b->end)
1726	    goto no_delete_insns;
1727	  insn = NEXT_INSN (insn);
1728	}
1729    }
1730
1731  /* Selectively unlink the insn chain.  Include any BARRIER that may
1732     follow the basic block.  */
1733  end = next_nonnote_insn (b->end);
1734  if (!end || GET_CODE (end) != BARRIER)
1735    end = b->end;
1736  delete_insn_chain (insn, end);
1737
1738no_delete_insns:
1739
1740  /* Remove the edges into and out of this block.  Note that there may
1741     indeed be edges in, if we are removing an unreachable loop.  */
1742  {
1743    edge e, next, *q;
1744
1745    for (e = b->pred; e ; e = next)
1746      {
1747	for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1748	  continue;
1749	*q = e->succ_next;
1750	next = e->pred_next;
1751	free (e);
1752      }
1753    for (e = b->succ; e ; e = next)
1754      {
1755	for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1756	  continue;
1757	*q = e->pred_next;
1758	next = e->succ_next;
1759	free (e);
1760      }
1761
1762    b->pred = NULL;
1763    b->succ = NULL;
1764  }
1765
1766  /* Remove the basic block from the array, and compact behind it.  */
1767  expunge_block (b);
1768
1769  return deleted_handler;
1770}
1771
1772/* Remove block B from the basic block array and compact behind it.  */
1773
1774static void
1775expunge_block (b)
1776     basic_block b;
1777{
1778  int i, n = n_basic_blocks;
1779
1780  for (i = b->index; i + 1 < n; ++i)
1781    {
1782      basic_block x = BASIC_BLOCK (i + 1);
1783      BASIC_BLOCK (i) = x;
1784      x->index = i;
1785    }
1786
1787  basic_block_info->num_elements--;
1788  n_basic_blocks--;
1789}
1790
1791/* Delete INSN by patching it out.  Return the next insn.  */
1792
1793static rtx
1794flow_delete_insn (insn)
1795     rtx insn;
1796{
1797  rtx prev = PREV_INSN (insn);
1798  rtx next = NEXT_INSN (insn);
1799
1800  PREV_INSN (insn) = NULL_RTX;
1801  NEXT_INSN (insn) = NULL_RTX;
1802
1803  if (prev)
1804    NEXT_INSN (prev) = next;
1805  if (next)
1806    PREV_INSN (next) = prev;
1807  else
1808    set_last_insn (prev);
1809
1810  if (GET_CODE (insn) == CODE_LABEL)
1811    remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1812
1813  /* If deleting a jump, decrement the use count of the label.  Deleting
1814     the label itself should happen in the normal course of block merging.  */
1815  if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1816    LABEL_NUSES (JUMP_LABEL (insn))--;
1817
1818  return next;
1819}
1820
1821/* True if a given label can be deleted.  */
1822
1823static int
1824can_delete_label_p (label)
1825     rtx label;
1826{
1827  rtx x;
1828
1829  if (LABEL_PRESERVE_P (label))
1830    return 0;
1831
1832  for (x = forced_labels; x ; x = XEXP (x, 1))
1833    if (label == XEXP (x, 0))
1834      return 0;
1835  for (x = label_value_list; x ; x = XEXP (x, 1))
1836    if (label == XEXP (x, 0))
1837      return 0;
1838  for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1839    if (label == XEXP (x, 0))
1840      return 0;
1841
1842  /* User declared labels must be preserved.  */
1843  if (LABEL_NAME (label) != 0)
1844    return 0;
1845
1846  return 1;
1847}
1848
1849/* Blocks A and B are to be merged into a single block.  The insns
1850   are already contiguous, hence `nomove'.  */
1851
1852static void
1853merge_blocks_nomove (a, b)
1854     basic_block a, b;
1855{
1856  edge e;
1857  rtx b_head, b_end, a_end;
1858  int b_empty = 0;
1859
1860  /* If there was a CODE_LABEL beginning B, delete it.  */
1861  b_head = b->head;
1862  b_end = b->end;
1863  if (GET_CODE (b_head) == CODE_LABEL)
1864    {
1865      /* Detect basic blocks with nothing but a label.  This can happen
1866	 in particular at the end of a function.  */
1867      if (b_head == b_end)
1868	b_empty = 1;
1869      b_head = flow_delete_insn (b_head);
1870    }
1871
1872  /* Delete the basic block note.  */
1873  if (GET_CODE (b_head) == NOTE
1874      && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
1875    {
1876      if (b_head == b_end)
1877	b_empty = 1;
1878      b_head = flow_delete_insn (b_head);
1879    }
1880
1881  /* If there was a jump out of A, delete it.  */
1882  a_end = a->end;
1883  if (GET_CODE (a_end) == JUMP_INSN)
1884    {
1885      rtx prev;
1886
1887      prev = prev_nonnote_insn (a_end);
1888      if (!prev)
1889	prev = a->head;
1890
1891#ifdef HAVE_cc0
1892      /* If this was a conditional jump, we need to also delete
1893	 the insn that set cc0.  */
1894
1895      if (prev && sets_cc0_p (prev))
1896	{
1897          rtx tmp = prev;
1898	  prev = prev_nonnote_insn (prev);
1899	  if (!prev)
1900	    prev = a->head;
1901	  flow_delete_insn (tmp);
1902	}
1903#endif
1904
1905      /* Note that a->head != a->end, since we should have at least a
1906	 bb note plus the jump, so prev != insn.  */
1907      flow_delete_insn (a_end);
1908      a_end = prev;
1909    }
1910
1911  /* By definition, there should only be one successor of A, and that is
1912     B.  Free that edge struct.  */
1913  free (a->succ);
1914
1915  /* Adjust the edges out of B for the new owner.  */
1916  for (e = b->succ; e ; e = e->succ_next)
1917    e->src = a;
1918  a->succ = b->succ;
1919
1920  /* Reassociate the insns of B with A.  */
1921  if (!b_empty)
1922    {
1923      BLOCK_FOR_INSN (b_head) = a;
1924      while (b_head != b_end)
1925	{
1926	  b_head = NEXT_INSN (b_head);
1927	  BLOCK_FOR_INSN (b_head) = a;
1928	}
1929      a_end = b_head;
1930    }
1931  a->end = a_end;
1932
1933  /* Compact the basic block array.  */
1934  expunge_block (b);
1935}
1936
1937/* Attempt to merge basic blocks that are potentially non-adjacent.
1938   Return true iff the attempt succeeded.  */
1939
1940static int
1941merge_blocks (e, b, c)
1942     edge e;
1943     basic_block b, c;
1944{
1945  /* If B has a fallthru edge to C, no need to move anything.  */
1946  if (!(e->flags & EDGE_FALLTHRU))
1947    {
1948      /* ??? From here on out we must make sure to not munge nesting
1949	 of exception regions and lexical blocks.  Need to think about
1950	 these cases before this gets implemented.  */
1951      return 0;
1952
1953      /* If C has an outgoing fallthru, and B does not have an incoming
1954	 fallthru, move B before C.  The later clause is somewhat arbitrary,
1955	 but avoids modifying blocks other than the two we've been given.  */
1956
1957      /* Otherwise, move C after B.  If C had a fallthru, which doesn't
1958	 happen to be the physical successor to B, insert an unconditional
1959	 branch.  If C already ended with a conditional branch, the new
1960	 jump must go in a new basic block D.  */
1961    }
1962
1963  /* If a label still appears somewhere and we cannot delete the label,
1964     then we cannot merge the blocks.  The edge was tidied already.  */
1965  {
1966    rtx insn, stop = NEXT_INSN (c->head);
1967    for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
1968      if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
1969	return 0;
1970  }
1971
1972  merge_blocks_nomove (b, c);
1973  return 1;
1974}
1975
1976/* The given edge should potentially a fallthru edge.  If that is in
1977   fact true, delete the unconditional jump and barriers that are in
1978   the way.  */
1979
1980static void
1981tidy_fallthru_edge (e, b, c)
1982     edge e;
1983     basic_block b, c;
1984{
1985  rtx q;
1986
1987  /* ??? In a late-running flow pass, other folks may have deleted basic
1988     blocks by nopping out blocks, leaving multiple BARRIERs between here
1989     and the target label. They ought to be chastized and fixed.
1990
1991     We can also wind up with a sequence of undeletable labels between
1992     one block and the next.
1993
1994     So search through a sequence of barriers, labels, and notes for
1995     the head of block C and assert that we really do fall through.  */
1996
1997  if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
1998    return;
1999
2000  /* Remove what will soon cease being the jump insn from the source block.
2001     If block B consisted only of this single jump, turn it into a deleted
2002     note.  */
2003  q = b->end;
2004  if (GET_CODE (q) == JUMP_INSN)
2005    {
2006#ifdef HAVE_cc0
2007      /* If this was a conditional jump, we need to also delete
2008	 the insn that set cc0.  */
2009      if (! simplejump_p (q) && condjump_p (q))
2010	q = PREV_INSN (q);
2011#endif
2012
2013      if (b->head == q)
2014	{
2015	  PUT_CODE (q, NOTE);
2016	  NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2017	  NOTE_SOURCE_FILE (q) = 0;
2018	}
2019      else
2020	b->end = q = PREV_INSN (q);
2021    }
2022
2023  /* Selectively unlink the sequence.  */
2024  if (q != PREV_INSN (c->head))
2025    delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2026
2027  e->flags |= EDGE_FALLTHRU;
2028}
2029
2030/* Discover and record the loop depth at the head of each basic block.  */
2031
2032static void
2033calculate_loop_depth (insns)
2034     rtx insns;
2035{
2036  basic_block bb;
2037  rtx insn;
2038  int i = 0, depth = 1;
2039
2040  bb = BASIC_BLOCK (i);
2041  for (insn = insns; insn ; insn = NEXT_INSN (insn))
2042    {
2043      if (insn == bb->head)
2044	{
2045	  bb->loop_depth = depth;
2046	  if (++i >= n_basic_blocks)
2047	    break;
2048	  bb = BASIC_BLOCK (i);
2049	}
2050
2051      if (GET_CODE (insn) == NOTE)
2052	{
2053	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2054	    depth++;
2055	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2056	    depth--;
2057
2058	  /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2059	  if (depth == 0)
2060	    abort ();
2061	}
2062    }
2063}
2064
2065/* Perform data flow analysis.
2066   F is the first insn of the function and NREGS the number of register numbers
2067   in use.  */
2068
2069void
2070life_analysis (f, nregs, file, remove_dead_code)
2071     rtx f;
2072     int nregs;
2073     FILE *file;
2074     int remove_dead_code;
2075{
2076#ifdef ELIMINABLE_REGS
2077  register size_t i;
2078  static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2079#endif
2080
2081  /* Record which registers will be eliminated.  We use this in
2082     mark_used_regs.  */
2083
2084  CLEAR_HARD_REG_SET (elim_reg_set);
2085
2086#ifdef ELIMINABLE_REGS
2087  for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2088    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2089#else
2090  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2091#endif
2092
2093  /* Allocate a bitmap to be filled in by record_volatile_insns.  */
2094  uid_volatile = BITMAP_ALLOCA ();
2095
2096  /* We want alias analysis information for local dead store elimination.  */
2097  init_alias_analysis ();
2098  life_analysis_1 (f, nregs, remove_dead_code);
2099  end_alias_analysis ();
2100
2101  if (file)
2102    dump_flow_info (file);
2103
2104  BITMAP_FREE (uid_volatile);
2105  free_basic_block_vars (1);
2106}
2107
2108/* Free the variables allocated by find_basic_blocks.
2109
2110   KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed.  */
2111
2112void
2113free_basic_block_vars (keep_head_end_p)
2114     int keep_head_end_p;
2115{
2116  if (basic_block_for_insn)
2117    {
2118      VARRAY_FREE (basic_block_for_insn);
2119      basic_block_for_insn = NULL;
2120    }
2121
2122  if (! keep_head_end_p)
2123    {
2124      clear_edges ();
2125      VARRAY_FREE (basic_block_info);
2126      n_basic_blocks = 0;
2127
2128      ENTRY_BLOCK_PTR->aux = NULL;
2129      ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2130      EXIT_BLOCK_PTR->aux = NULL;
2131      EXIT_BLOCK_PTR->global_live_at_start = NULL;
2132    }
2133}
2134
2135/* Return nonzero if the destination of SET equals the source.  */
2136static int
2137set_noop_p (set)
2138     rtx set;
2139{
2140  rtx src = SET_SRC (set);
2141  rtx dst = SET_DEST (set);
2142  if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2143      && REGNO (src) == REGNO (dst))
2144    return 1;
2145  if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2146      || SUBREG_WORD (src) != SUBREG_WORD (dst))
2147    return 0;
2148  src = SUBREG_REG (src);
2149  dst = SUBREG_REG (dst);
2150  if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2151      && REGNO (src) == REGNO (dst))
2152    return 1;
2153  return 0;
2154}
2155
2156/* Return nonzero if an insn consists only of SETs, each of which only sets a
2157   value to itself.  */
2158static int
2159noop_move_p (insn)
2160     rtx insn;
2161{
2162  rtx pat = PATTERN (insn);
2163
2164  /* Insns carrying these notes are useful later on.  */
2165  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2166    return 0;
2167
2168  if (GET_CODE (pat) == SET && set_noop_p (pat))
2169    return 1;
2170
2171  if (GET_CODE (pat) == PARALLEL)
2172    {
2173      int i;
2174      /* If nothing but SETs of registers to themselves,
2175	 this insn can also be deleted.  */
2176      for (i = 0; i < XVECLEN (pat, 0); i++)
2177	{
2178	  rtx tem = XVECEXP (pat, 0, i);
2179
2180	  if (GET_CODE (tem) == USE
2181	      || GET_CODE (tem) == CLOBBER)
2182	    continue;
2183
2184	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2185	    return 0;
2186	}
2187
2188      return 1;
2189    }
2190  return 0;
2191}
2192
2193static void
2194notice_stack_pointer_modification (x, pat)
2195     rtx x;
2196     rtx pat ATTRIBUTE_UNUSED;
2197{
2198  if (x == stack_pointer_rtx
2199      /* The stack pointer is only modified indirectly as the result
2200	 of a push until later in flow.  See the comments in rtl.texi
2201	 regarding Embedded Side-Effects on Addresses.  */
2202      || (GET_CODE (x) == MEM
2203	  && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2204	      || GET_CODE (XEXP (x, 0)) == PRE_INC
2205	      || GET_CODE (XEXP (x, 0)) == POST_DEC
2206	      || GET_CODE (XEXP (x, 0)) == POST_INC)
2207	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2208    current_function_sp_is_unchanging = 0;
2209}
2210
2211/* Record which insns refer to any volatile memory
2212   or for any reason can't be deleted just because they are dead stores.
2213   Also, delete any insns that copy a register to itself.
2214   And see if the stack pointer is modified.  */
2215static void
2216record_volatile_insns (f)
2217     rtx f;
2218{
2219  rtx insn;
2220  for (insn = f; insn; insn = NEXT_INSN (insn))
2221    {
2222      enum rtx_code code1 = GET_CODE (insn);
2223      if (code1 == CALL_INSN)
2224	SET_INSN_VOLATILE (insn);
2225      else if (code1 == INSN || code1 == JUMP_INSN)
2226	{
2227	  if (GET_CODE (PATTERN (insn)) != USE
2228	      && volatile_refs_p (PATTERN (insn)))
2229	    SET_INSN_VOLATILE (insn);
2230
2231	  /* A SET that makes space on the stack cannot be dead.
2232	     (Such SETs occur only for allocating variable-size data,
2233	     so they will always have a PLUS or MINUS according to the
2234	     direction of stack growth.)
2235	     Even if this function never uses this stack pointer value,
2236	     signal handlers do!  */
2237	  else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2238		   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2239#ifdef STACK_GROWS_DOWNWARD
2240		   && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2241#else
2242		   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2243#endif
2244		   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2245	    SET_INSN_VOLATILE (insn);
2246
2247	  /* Delete (in effect) any obvious no-op moves.  */
2248	  else if (noop_move_p (insn))
2249	    {
2250	      PUT_CODE (insn, NOTE);
2251	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2252	      NOTE_SOURCE_FILE (insn) = 0;
2253	    }
2254	}
2255
2256      /* Check if insn modifies the stack pointer.  */
2257      if ( current_function_sp_is_unchanging
2258	   && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2259	note_stores (PATTERN (insn), notice_stack_pointer_modification);
2260    }
2261}
2262
2263/* Mark those regs which are needed at the end of the function as live
2264   at the end of the last basic block.  */
2265static void
2266mark_regs_live_at_end (set)
2267     regset set;
2268{
2269  int i;
2270
2271  /* If exiting needs the right stack value, consider the stack pointer
2272     live at the end of the function.  */
2273  if (! EXIT_IGNORE_STACK
2274      || (! FRAME_POINTER_REQUIRED
2275	  && ! current_function_calls_alloca
2276	  && flag_omit_frame_pointer)
2277      || current_function_sp_is_unchanging)
2278    {
2279      SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2280    }
2281
2282  /* Mark the frame pointer if needed at the end of the function.  If
2283     we end up eliminating it, it will be removed from the live list
2284     of each basic block by reload.  */
2285
2286  if (! reload_completed || frame_pointer_needed)
2287    {
2288      SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2289#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2290      /* If they are different, also mark the hard frame pointer as live */
2291      SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2292#endif
2293    }
2294
2295  /* Mark all global registers, and all registers used by the epilogue
2296     as being live at the end of the function since they may be
2297     referenced by our caller.  */
2298  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2299    if (global_regs[i]
2300#ifdef EPILOGUE_USES
2301	|| EPILOGUE_USES (i)
2302#endif
2303	)
2304      SET_REGNO_REG_SET (set, i);
2305
2306  /* ??? Mark function return value here rather than as uses.  */
2307}
2308
2309/* Determine which registers are live at the start of each
2310   basic block of the function whose first insn is F.
2311   NREGS is the number of registers used in F.
2312   We allocate the vector basic_block_live_at_start
2313   and the regsets that it points to, and fill them with the data.
2314   regset_size and regset_bytes are also set here.  */
2315
2316static void
2317life_analysis_1 (f, nregs, remove_dead_code)
2318     rtx f;
2319     int nregs;
2320     int remove_dead_code;
2321{
2322  int first_pass;
2323  int changed;
2324  register int i;
2325  char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2326  regset *new_live_at_end;
2327
2328  struct obstack flow_obstack;
2329
2330  gcc_obstack_init (&flow_obstack);
2331
2332  max_regno = nregs;
2333
2334  /* Allocate and zero out many data structures
2335     that will record the data from lifetime analysis.  */
2336
2337  allocate_reg_life_data ();
2338  allocate_bb_life_data ();
2339
2340  reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2341  memset (reg_next_use, 0, nregs * sizeof (rtx));
2342
2343  /* Set up regset-vectors used internally within this function.
2344     Their meanings are documented above, with their declarations.  */
2345
2346  new_live_at_end = (regset *) alloca ((n_basic_blocks + 1) * sizeof (regset));
2347  init_regset_vector (new_live_at_end, n_basic_blocks + 1, &flow_obstack);
2348
2349  /* Stick these vectors into the AUX field of the basic block, so that
2350     we don't have to keep going through the index.  */
2351
2352  for (i = 0; i < n_basic_blocks; ++i)
2353    BASIC_BLOCK (i)->aux = new_live_at_end[i];
2354  ENTRY_BLOCK_PTR->aux = new_live_at_end[i];
2355
2356  /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2357     This will be cleared by record_volatile_insns if it encounters an insn
2358     which modifies the stack pointer.  */
2359  current_function_sp_is_unchanging = !current_function_calls_alloca;
2360
2361  record_volatile_insns (f);
2362
2363  if (n_basic_blocks > 0)
2364    {
2365      regset theend;
2366      register edge e;
2367
2368      theend = EXIT_BLOCK_PTR->global_live_at_start;
2369      mark_regs_live_at_end (theend);
2370
2371      /* Propogate this exit data to each of EXIT's predecessors.  */
2372      for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2373	{
2374	  COPY_REG_SET (e->src->global_live_at_end, theend);
2375	  COPY_REG_SET ((regset) e->src->aux, theend);
2376	}
2377    }
2378
2379  /* The post-reload life analysis have (on a global basis) the same registers
2380     live as was computed by reload itself.
2381
2382     Otherwise elimination offsets and such may be incorrect.
2383
2384     Reload will make some registers as live even though they do not appear
2385     in the rtl.  */
2386  if (reload_completed)
2387    memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2388  memset (regs_ever_live, 0, sizeof regs_ever_live);
2389
2390  /* Propagate life info through the basic blocks
2391     around the graph of basic blocks.
2392
2393     This is a relaxation process: each time a new register
2394     is live at the end of the basic block, we must scan the block
2395     to determine which registers are, as a consequence, live at the beginning
2396     of that block.  These registers must then be marked live at the ends
2397     of all the blocks that can transfer control to that block.
2398     The process continues until it reaches a fixed point.  */
2399
2400  first_pass = 1;
2401  changed = 1;
2402  while (changed)
2403    {
2404      changed = 0;
2405      for (i = n_basic_blocks - 1; i >= 0; i--)
2406	{
2407	  basic_block bb = BASIC_BLOCK (i);
2408	  int consider = first_pass;
2409	  int must_rescan = first_pass;
2410	  register int j;
2411
2412	  if (!first_pass)
2413	    {
2414	      /* Set CONSIDER if this block needs thinking about at all
2415		 (that is, if the regs live now at the end of it
2416		 are not the same as were live at the end of it when
2417		 we last thought about it).
2418		 Set must_rescan if it needs to be thought about
2419		 instruction by instruction (that is, if any additional
2420		 reg that is live at the end now but was not live there before
2421		 is one of the significant regs of this basic block).  */
2422
2423	      EXECUTE_IF_AND_COMPL_IN_REG_SET
2424		((regset) bb->aux, bb->global_live_at_end, 0, j,
2425		 {
2426		   consider = 1;
2427		   if (REGNO_REG_SET_P (bb->local_set, j))
2428		     {
2429		       must_rescan = 1;
2430		       goto done;
2431		     }
2432		 });
2433	    done:
2434	      if (! consider)
2435		continue;
2436	    }
2437
2438	  /* The live_at_start of this block may be changing,
2439	     so another pass will be required after this one.  */
2440	  changed = 1;
2441
2442	  if (! must_rescan)
2443	    {
2444	      /* No complete rescan needed;
2445		 just record those variables newly known live at end
2446		 as live at start as well.  */
2447	      IOR_AND_COMPL_REG_SET (bb->global_live_at_start,
2448				     (regset) bb->aux,
2449				     bb->global_live_at_end);
2450
2451	      IOR_AND_COMPL_REG_SET (bb->global_live_at_end,
2452				     (regset) bb->aux,
2453				     bb->global_live_at_end);
2454	    }
2455	  else
2456	    {
2457	      /* Update the basic_block_live_at_start
2458		 by propagation backwards through the block.  */
2459	      COPY_REG_SET (bb->global_live_at_end, (regset) bb->aux);
2460	      COPY_REG_SET (bb->global_live_at_start,
2461			    bb->global_live_at_end);
2462	      propagate_block (bb->global_live_at_start,
2463			       bb->head, bb->end, 0,
2464			       first_pass ? bb->local_set : (regset) 0,
2465			       i, remove_dead_code);
2466	    }
2467
2468	  /* Update the new_live_at_end's of the block's predecessors.  */
2469	  {
2470	    register edge e;
2471
2472	    for (e = bb->pred; e ; e = e->pred_next)
2473	      IOR_REG_SET ((regset) e->src->aux, bb->global_live_at_start);
2474	  }
2475
2476#ifdef USE_C_ALLOCA
2477	  alloca (0);
2478#endif
2479	}
2480      first_pass = 0;
2481    }
2482
2483  /* The only pseudos that are live at the beginning of the function are
2484     those that were not set anywhere in the function.  local-alloc doesn't
2485     know how to handle these correctly, so mark them as not local to any
2486     one basic block.  */
2487
2488  if (n_basic_blocks > 0)
2489    EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start,
2490			       FIRST_PSEUDO_REGISTER, i,
2491			       {
2492				 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2493			       });
2494
2495  /* Now the life information is accurate.  Make one more pass over each
2496     basic block to delete dead stores, create autoincrement addressing
2497     and record how many times each register is used, is set, or dies.  */
2498
2499  for (i = 0; i < n_basic_blocks; i++)
2500    {
2501      basic_block bb = BASIC_BLOCK (i);
2502
2503      /* We start with global_live_at_end to determine which stores are
2504	 dead.  This process is destructive, and we wish to preserve the
2505	 contents of global_live_at_end for posterity.  Fortunately,
2506	 new_live_at_end, due to the way we converged on a solution,
2507	 contains a duplicate of global_live_at_end that we can kill.  */
2508      propagate_block ((regset) bb->aux, bb->head, bb->end, 1, (regset) 0, i, remove_dead_code);
2509
2510#ifdef USE_C_ALLOCA
2511      alloca (0);
2512#endif
2513    }
2514
2515  /* We have a problem with any pseudoreg that lives across the setjmp.
2516     ANSI says that if a user variable does not change in value between
2517     the setjmp and the longjmp, then the longjmp preserves it.  This
2518     includes longjmp from a place where the pseudo appears dead.
2519     (In principle, the value still exists if it is in scope.)
2520     If the pseudo goes in a hard reg, some other value may occupy
2521     that hard reg where this pseudo is dead, thus clobbering the pseudo.
2522     Conclusion: such a pseudo must not go in a hard reg.  */
2523  EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2524			     FIRST_PSEUDO_REGISTER, i,
2525			     {
2526			       if (regno_reg_rtx[i] != 0)
2527				 {
2528				   REG_LIVE_LENGTH (i) = -1;
2529				   REG_BASIC_BLOCK (i) = -1;
2530				 }
2531			     });
2532
2533  /* Restore regs_ever_live that was provided by reload.  */
2534  if (reload_completed)
2535    memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2536
2537  free_regset_vector (new_live_at_end, n_basic_blocks);
2538  obstack_free (&flow_obstack, NULL_PTR);
2539
2540  for (i = 0; i < n_basic_blocks; ++i)
2541    BASIC_BLOCK (i)->aux = NULL;
2542  ENTRY_BLOCK_PTR->aux = NULL;
2543}
2544
2545/* Subroutines of life analysis.  */
2546
2547/* Allocate the permanent data structures that represent the results
2548   of life analysis.  Not static since used also for stupid life analysis.  */
2549
2550void
2551allocate_bb_life_data ()
2552{
2553  register int i;
2554
2555  for (i = 0; i < n_basic_blocks; i++)
2556    {
2557      basic_block bb = BASIC_BLOCK (i);
2558
2559      bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
2560      bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
2561      bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
2562    }
2563
2564  ENTRY_BLOCK_PTR->global_live_at_end
2565    = OBSTACK_ALLOC_REG_SET (function_obstack);
2566  EXIT_BLOCK_PTR->global_live_at_start
2567    = OBSTACK_ALLOC_REG_SET (function_obstack);
2568
2569  regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
2570}
2571
2572void
2573allocate_reg_life_data ()
2574{
2575  int i;
2576
2577  /* Recalculate the register space, in case it has grown.  Old style
2578     vector oriented regsets would set regset_{size,bytes} here also.  */
2579  allocate_reg_info (max_regno, FALSE, FALSE);
2580
2581  /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2582     information, explicitly reset it here.  The allocation should have
2583     already happened on the previous reg_scan pass.  Make sure in case
2584     some more registers were allocated.  */
2585  for (i = 0; i < max_regno; i++)
2586    REG_N_SETS (i) = 0;
2587}
2588
2589/* Make each element of VECTOR point at a regset.  The vector has
2590   NELTS elements, and space is allocated from the ALLOC_OBSTACK
2591   obstack.  */
2592
2593static void
2594init_regset_vector (vector, nelts, alloc_obstack)
2595     regset *vector;
2596     int nelts;
2597     struct obstack *alloc_obstack;
2598{
2599  register int i;
2600
2601  for (i = 0; i < nelts; i++)
2602    {
2603      vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
2604      CLEAR_REG_SET (vector[i]);
2605    }
2606}
2607
2608/* Release any additional space allocated for each element of VECTOR point
2609   other than the regset header itself.  The vector has NELTS elements.  */
2610
2611void
2612free_regset_vector (vector, nelts)
2613     regset *vector;
2614     int nelts;
2615{
2616  register int i;
2617
2618  for (i = 0; i < nelts; i++)
2619    FREE_REG_SET (vector[i]);
2620}
2621
2622/* Compute the registers live at the beginning of a basic block
2623   from those live at the end.
2624
2625   When called, OLD contains those live at the end.
2626   On return, it contains those live at the beginning.
2627   FIRST and LAST are the first and last insns of the basic block.
2628
2629   FINAL is nonzero if we are doing the final pass which is not
2630   for computing the life info (since that has already been done)
2631   but for acting on it.  On this pass, we delete dead stores,
2632   set up the logical links and dead-variables lists of instructions,
2633   and merge instructions for autoincrement and autodecrement addresses.
2634
2635   SIGNIFICANT is nonzero only the first time for each basic block.
2636   If it is nonzero, it points to a regset in which we store
2637   a 1 for each register that is set within the block.
2638
2639   BNUM is the number of the basic block.  */
2640
2641static void
2642propagate_block (old, first, last, final, significant, bnum, remove_dead_code)
2643     register regset old;
2644     rtx first;
2645     rtx last;
2646     int final;
2647     regset significant;
2648     int bnum;
2649     int remove_dead_code;
2650{
2651  register rtx insn;
2652  rtx prev;
2653  regset live;
2654  regset dead;
2655
2656  /* Find the loop depth for this block.  Ignore loop level changes in the
2657     middle of the basic block -- for register allocation purposes, the
2658     important uses will be in the blocks wholely contained within the loop
2659     not in the loop pre-header or post-trailer.  */
2660  loop_depth = BASIC_BLOCK (bnum)->loop_depth;
2661
2662  dead = ALLOCA_REG_SET ();
2663  live = ALLOCA_REG_SET ();
2664
2665  cc0_live = 0;
2666  mem_set_list = NULL_RTX;
2667
2668  if (final)
2669    {
2670      register int i;
2671
2672      /* Process the regs live at the end of the block.
2673	 Mark them as not local to any one basic block. */
2674      EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2675				 {
2676				   REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2677				 });
2678    }
2679
2680  /* Scan the block an insn at a time from end to beginning.  */
2681
2682  for (insn = last; ; insn = prev)
2683    {
2684      prev = PREV_INSN (insn);
2685
2686      if (GET_CODE (insn) == NOTE)
2687	{
2688	  /* If this is a call to `setjmp' et al,
2689	     warn if any non-volatile datum is live.  */
2690
2691	  if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2692	    IOR_REG_SET (regs_live_at_setjmp, old);
2693	}
2694
2695      /* Update the life-status of regs for this insn.
2696	 First DEAD gets which regs are set in this insn
2697	 then LIVE gets which regs are used in this insn.
2698	 Then the regs live before the insn
2699	 are those live after, with DEAD regs turned off,
2700	 and then LIVE regs turned on.  */
2701
2702      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2703	{
2704	  register int i;
2705	  rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
2706	  int insn_is_dead = 0;
2707	  int libcall_is_dead = 0;
2708
2709	  if (remove_dead_code)
2710	    {
2711	      insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
2712	                      /* Don't delete something that refers to volatile storage!  */
2713	                      && ! INSN_VOLATILE (insn));
2714	      libcall_is_dead = (insn_is_dead && note != 0
2715	                         && libcall_dead_p (PATTERN (insn), old, note, insn));
2716	    }
2717
2718	  /* If an instruction consists of just dead store(s) on final pass,
2719	     "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2720	     We could really delete it with delete_insn, but that
2721	     can cause trouble for first or last insn in a basic block.  */
2722	  if (final && insn_is_dead)
2723	    {
2724	      PUT_CODE (insn, NOTE);
2725	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2726	      NOTE_SOURCE_FILE (insn) = 0;
2727
2728	      /* CC0 is now known to be dead.  Either this insn used it,
2729		 in which case it doesn't anymore, or clobbered it,
2730		 so the next insn can't use it.  */
2731	      cc0_live = 0;
2732
2733	      /* If this insn is copying the return value from a library call,
2734		 delete the entire library call.  */
2735	      if (libcall_is_dead)
2736		{
2737		  rtx first = XEXP (note, 0);
2738		  rtx p = insn;
2739		  while (INSN_DELETED_P (first))
2740		    first = NEXT_INSN (first);
2741		  while (p != first)
2742		    {
2743		      p = PREV_INSN (p);
2744		      PUT_CODE (p, NOTE);
2745		      NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
2746		      NOTE_SOURCE_FILE (p) = 0;
2747		    }
2748		}
2749	      goto flushed;
2750	    }
2751
2752	  CLEAR_REG_SET (dead);
2753	  CLEAR_REG_SET (live);
2754
2755	  /* See if this is an increment or decrement that can be
2756	     merged into a following memory address.  */
2757#ifdef AUTO_INC_DEC
2758	  {
2759	    register rtx x = single_set (insn);
2760
2761	    /* Does this instruction increment or decrement a register?  */
2762	    if (!reload_completed
2763		&& final && x != 0
2764		&& GET_CODE (SET_DEST (x)) == REG
2765		&& (GET_CODE (SET_SRC (x)) == PLUS
2766		    || GET_CODE (SET_SRC (x)) == MINUS)
2767		&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
2768		&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2769		/* Ok, look for a following memory ref we can combine with.
2770		   If one is found, change the memory ref to a PRE_INC
2771		   or PRE_DEC, cancel this insn, and return 1.
2772		   Return 0 if nothing has been done.  */
2773		&& try_pre_increment_1 (insn))
2774	      goto flushed;
2775	  }
2776#endif /* AUTO_INC_DEC */
2777
2778	  /* If this is not the final pass, and this insn is copying the
2779	     value of a library call and it's dead, don't scan the
2780	     insns that perform the library call, so that the call's
2781	     arguments are not marked live.  */
2782	  if (libcall_is_dead)
2783	    {
2784	      /* Mark the dest reg as `significant'.  */
2785	      mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
2786
2787	      insn = XEXP (note, 0);
2788	      prev = PREV_INSN (insn);
2789	    }
2790	  else if (GET_CODE (PATTERN (insn)) == SET
2791		   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2792		   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2793		   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
2794		   && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
2795	    /* We have an insn to pop a constant amount off the stack.
2796	       (Such insns use PLUS regardless of the direction of the stack,
2797	       and any insn to adjust the stack by a constant is always a pop.)
2798	       These insns, if not dead stores, have no effect on life.  */
2799	    ;
2800	  else
2801	    {
2802	      /* Any regs live at the time of a call instruction
2803		 must not go in a register clobbered by calls.
2804		 Find all regs now live and record this for them.  */
2805
2806	      if (GET_CODE (insn) == CALL_INSN && final)
2807		EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2808					   {
2809					     REG_N_CALLS_CROSSED (i)++;
2810					   });
2811
2812	      /* LIVE gets the regs used in INSN;
2813		 DEAD gets those set by it.  Dead insns don't make anything
2814		 live.  */
2815
2816	      mark_set_regs (old, dead, PATTERN (insn),
2817			     final ? insn : NULL_RTX, significant);
2818
2819	      /* If an insn doesn't use CC0, it becomes dead since we
2820		 assume that every insn clobbers it.  So show it dead here;
2821		 mark_used_regs will set it live if it is referenced.  */
2822	      cc0_live = 0;
2823
2824	      if (! insn_is_dead)
2825		mark_used_regs (old, live, PATTERN (insn), final, insn);
2826
2827	      /* Sometimes we may have inserted something before INSN (such as
2828		 a move) when we make an auto-inc.  So ensure we will scan
2829		 those insns.  */
2830#ifdef AUTO_INC_DEC
2831	      prev = PREV_INSN (insn);
2832#endif
2833
2834	      if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
2835		{
2836		  register int i;
2837
2838		  rtx note;
2839
2840	          for (note = CALL_INSN_FUNCTION_USAGE (insn);
2841		       note;
2842		       note = XEXP (note, 1))
2843		    if (GET_CODE (XEXP (note, 0)) == USE)
2844		      mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
2845				      final, insn);
2846
2847		  /* Each call clobbers all call-clobbered regs that are not
2848		     global or fixed.  Note that the function-value reg is a
2849		     call-clobbered reg, and mark_set_regs has already had
2850		     a chance to handle it.  */
2851
2852		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2853		    if (call_used_regs[i] && ! global_regs[i]
2854			&& ! fixed_regs[i])
2855		      SET_REGNO_REG_SET (dead, i);
2856
2857		  /* The stack ptr is used (honorarily) by a CALL insn.  */
2858		  SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2859
2860		  /* Calls may also reference any of the global registers,
2861		     so they are made live.  */
2862		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2863		    if (global_regs[i])
2864		      mark_used_regs (old, live,
2865				      gen_rtx_REG (reg_raw_mode[i], i),
2866				      final, insn);
2867
2868		  /* Calls also clobber memory.  */
2869		  mem_set_list = NULL_RTX;
2870		}
2871
2872	      /* Update OLD for the registers used or set.  */
2873	      AND_COMPL_REG_SET (old, dead);
2874	      IOR_REG_SET (old, live);
2875
2876	    }
2877
2878	  /* On final pass, update counts of how many insns each reg is live
2879	     at.  */
2880	  if (final)
2881	    EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2882				       { REG_LIVE_LENGTH (i)++; });
2883	}
2884    flushed: ;
2885      if (insn == first)
2886	break;
2887    }
2888
2889  FREE_REG_SET (dead);
2890  FREE_REG_SET (live);
2891}
2892
2893/* Return 1 if X (the body of an insn, or part of it) is just dead stores
2894   (SET expressions whose destinations are registers dead after the insn).
2895   NEEDED is the regset that says which regs are alive after the insn.
2896
2897   Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2898
2899   If X is the entire body of an insn, NOTES contains the reg notes
2900   pertaining to the insn.  */
2901
2902static int
2903insn_dead_p (x, needed, call_ok, notes)
2904     rtx x;
2905     regset needed;
2906     int call_ok;
2907     rtx notes ATTRIBUTE_UNUSED;
2908{
2909  enum rtx_code code = GET_CODE (x);
2910
2911#ifdef AUTO_INC_DEC
2912  /* If flow is invoked after reload, we must take existing AUTO_INC
2913     expresions into account.  */
2914  if (reload_completed)
2915    {
2916      for ( ; notes; notes = XEXP (notes, 1))
2917	{
2918	  if (REG_NOTE_KIND (notes) == REG_INC)
2919	    {
2920	      int regno = REGNO (XEXP (notes, 0));
2921
2922	      /* Don't delete insns to set global regs.  */
2923	      if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2924		  || REGNO_REG_SET_P (needed, regno))
2925		return 0;
2926	    }
2927	}
2928    }
2929#endif
2930
2931  /* If setting something that's a reg or part of one,
2932     see if that register's altered value will be live.  */
2933
2934  if (code == SET)
2935    {
2936      rtx r = SET_DEST (x);
2937
2938      /* A SET that is a subroutine call cannot be dead.  */
2939      if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
2940	return 0;
2941
2942#ifdef HAVE_cc0
2943      if (GET_CODE (r) == CC0)
2944	return ! cc0_live;
2945#endif
2946
2947      if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
2948	{
2949	  rtx temp;
2950	  /* Walk the set of memory locations we are currently tracking
2951	     and see if one is an identical match to this memory location.
2952	     If so, this memory write is dead (remember, we're walking
2953	     backwards from the end of the block to the start.  */
2954	  temp = mem_set_list;
2955	  while (temp)
2956	    {
2957	      if (rtx_equal_p (XEXP (temp, 0), r))
2958		return 1;
2959	      temp = XEXP (temp, 1);
2960	    }
2961	}
2962
2963      while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
2964	     || GET_CODE (r) == ZERO_EXTRACT)
2965	r = SUBREG_REG (r);
2966
2967      if (GET_CODE (r) == REG)
2968	{
2969	  int regno = REGNO (r);
2970
2971	  /* Don't delete insns to set global regs.  */
2972	  if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2973	      /* Make sure insns to set frame pointer aren't deleted.  */
2974	      || (regno == FRAME_POINTER_REGNUM
2975		  && (! reload_completed || frame_pointer_needed))
2976#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2977	      || (regno == HARD_FRAME_POINTER_REGNUM
2978		  && (! reload_completed || frame_pointer_needed))
2979#endif
2980#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2981	      /* Make sure insns to set arg pointer are never deleted
2982		 (if the arg pointer isn't fixed, there will be a USE for
2983		 it, so we can treat it normally).  */
2984	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2985#endif
2986	      || REGNO_REG_SET_P (needed, regno))
2987	    return 0;
2988
2989	  /* If this is a hard register, verify that subsequent words are
2990	     not needed.  */
2991	  if (regno < FIRST_PSEUDO_REGISTER)
2992	    {
2993	      int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2994
2995	      while (--n > 0)
2996		if (REGNO_REG_SET_P (needed, regno+n))
2997		  return 0;
2998	    }
2999
3000	  return 1;
3001	}
3002    }
3003
3004  /* If performing several activities,
3005     insn is dead if each activity is individually dead.
3006     Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3007     that's inside a PARALLEL doesn't make the insn worth keeping.  */
3008  else if (code == PARALLEL)
3009    {
3010      int i = XVECLEN (x, 0);
3011
3012      for (i--; i >= 0; i--)
3013	if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3014	    && GET_CODE (XVECEXP (x, 0, i)) != USE
3015	    && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3016	  return 0;
3017
3018      return 1;
3019    }
3020
3021  /* A CLOBBER of a pseudo-register that is dead serves no purpose.  That
3022     is not necessarily true for hard registers.  */
3023  else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3024	   && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3025	   && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3026    return 1;
3027
3028  /* We do not check other CLOBBER or USE here.  An insn consisting of just
3029     a CLOBBER or just a USE should not be deleted.  */
3030  return 0;
3031}
3032
3033/* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3034   return 1 if the entire library call is dead.
3035   This is true if X copies a register (hard or pseudo)
3036   and if the hard return  reg of the call insn is dead.
3037   (The caller should have tested the destination of X already for death.)
3038
3039   If this insn doesn't just copy a register, then we don't
3040   have an ordinary libcall.  In that case, cse could not have
3041   managed to substitute the source for the dest later on,
3042   so we can assume the libcall is dead.
3043
3044   NEEDED is the bit vector of pseudoregs live before this insn.
3045   NOTE is the REG_RETVAL note of the insn.  INSN is the insn itself.  */
3046
3047static int
3048libcall_dead_p (x, needed, note, insn)
3049     rtx x;
3050     regset needed;
3051     rtx note;
3052     rtx insn;
3053{
3054  register RTX_CODE code = GET_CODE (x);
3055
3056  if (code == SET)
3057    {
3058      register rtx r = SET_SRC (x);
3059      if (GET_CODE (r) == REG)
3060	{
3061	  rtx call = XEXP (note, 0);
3062	  rtx call_pat;
3063	  register int i;
3064
3065	  /* Find the call insn.  */
3066	  while (call != insn && GET_CODE (call) != CALL_INSN)
3067	    call = NEXT_INSN (call);
3068
3069	  /* If there is none, do nothing special,
3070	     since ordinary death handling can understand these insns.  */
3071	  if (call == insn)
3072	    return 0;
3073
3074	  /* See if the hard reg holding the value is dead.
3075	     If this is a PARALLEL, find the call within it.  */
3076	  call_pat = PATTERN (call);
3077	  if (GET_CODE (call_pat) == PARALLEL)
3078	    {
3079	      for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3080		if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3081		    && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3082		  break;
3083
3084	      /* This may be a library call that is returning a value
3085		 via invisible pointer.  Do nothing special, since
3086		 ordinary death handling can understand these insns.  */
3087	      if (i < 0)
3088		return 0;
3089
3090	      call_pat = XVECEXP (call_pat, 0, i);
3091	    }
3092
3093	  return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3094	}
3095    }
3096  return 1;
3097}
3098
3099/* Return 1 if register REGNO was used before it was set, i.e. if it is
3100   live at function entry.  Don't count global register variables, variables
3101   in registers that can be used for function arg passing, or variables in
3102   fixed hard registers.  */
3103
3104int
3105regno_uninitialized (regno)
3106     int regno;
3107{
3108  if (n_basic_blocks == 0
3109      || (regno < FIRST_PSEUDO_REGISTER
3110	  && (global_regs[regno]
3111	      || fixed_regs[regno]
3112	      || FUNCTION_ARG_REGNO_P (regno))))
3113    return 0;
3114
3115  return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3116}
3117
3118/* 1 if register REGNO was alive at a place where `setjmp' was called
3119   and was set more than once or is an argument.
3120   Such regs may be clobbered by `longjmp'.  */
3121
3122int
3123regno_clobbered_at_setjmp (regno)
3124     int regno;
3125{
3126  if (n_basic_blocks == 0)
3127    return 0;
3128
3129  return ((REG_N_SETS (regno) > 1
3130	   || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3131	  && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3132}
3133
3134/* INSN references memory, possibly using autoincrement addressing modes.
3135   Find any entries on the mem_set_list that need to be invalidated due
3136   to an address change.  */
3137static void
3138invalidate_mems_from_autoinc (insn)
3139     rtx insn;
3140{
3141  rtx note = REG_NOTES (insn);
3142  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3143    {
3144      if (REG_NOTE_KIND (note) == REG_INC)
3145        {
3146          rtx temp = mem_set_list;
3147          rtx prev = NULL_RTX;
3148
3149          while (temp)
3150	    {
3151	      if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3152	        {
3153	          /* Splice temp out of list.  */
3154	          if (prev)
3155	            XEXP (prev, 1) = XEXP (temp, 1);
3156	          else
3157	            mem_set_list = XEXP (temp, 1);
3158	        }
3159	      else
3160	        prev = temp;
3161              temp = XEXP (temp, 1);
3162	    }
3163	}
3164    }
3165}
3166
3167/* Process the registers that are set within X.
3168   Their bits are set to 1 in the regset DEAD,
3169   because they are dead prior to this insn.
3170
3171   If INSN is nonzero, it is the insn being processed
3172   and the fact that it is nonzero implies this is the FINAL pass
3173   in propagate_block.  In this case, various info about register
3174   usage is stored, LOG_LINKS fields of insns are set up.  */
3175
3176static void
3177mark_set_regs (needed, dead, x, insn, significant)
3178     regset needed;
3179     regset dead;
3180     rtx x;
3181     rtx insn;
3182     regset significant;
3183{
3184  register RTX_CODE code = GET_CODE (x);
3185
3186  if (code == SET || code == CLOBBER)
3187    mark_set_1 (needed, dead, x, insn, significant);
3188  else if (code == PARALLEL)
3189    {
3190      register int i;
3191      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3192	{
3193	  code = GET_CODE (XVECEXP (x, 0, i));
3194	  if (code == SET || code == CLOBBER)
3195	    mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
3196	}
3197    }
3198}
3199
3200/* Process a single SET rtx, X.  */
3201
3202static void
3203mark_set_1 (needed, dead, x, insn, significant)
3204     regset needed;
3205     regset dead;
3206     rtx x;
3207     rtx insn;
3208     regset significant;
3209{
3210  register int regno;
3211  register rtx reg = SET_DEST (x);
3212
3213  /* Some targets place small structures in registers for
3214     return values of functions.  We have to detect this
3215     case specially here to get correct flow information.  */
3216  if (GET_CODE (reg) == PARALLEL
3217      && GET_MODE (reg) == BLKmode)
3218    {
3219      register int i;
3220
3221      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3222	  mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
3223      return;
3224    }
3225
3226  /* Modifying just one hardware register of a multi-reg value
3227     or just a byte field of a register
3228     does not mean the value from before this insn is now dead.
3229     But it does mean liveness of that register at the end of the block
3230     is significant.
3231
3232     Within mark_set_1, however, we treat it as if the register is
3233     indeed modified.  mark_used_regs will, however, also treat this
3234     register as being used.  Thus, we treat these insns as setting a
3235     new value for the register as a function of its old value.  This
3236     cases LOG_LINKS to be made appropriately and this will help combine.  */
3237
3238  while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3239	 || GET_CODE (reg) == SIGN_EXTRACT
3240	 || GET_CODE (reg) == STRICT_LOW_PART)
3241    reg = XEXP (reg, 0);
3242
3243  /* If this set is a MEM, then it kills any aliased writes.
3244     If this set is a REG, then it kills any MEMs which use the reg.  */
3245  if (GET_CODE (reg) == MEM
3246      || GET_CODE (reg) == REG)
3247    {
3248      rtx temp = mem_set_list;
3249      rtx prev = NULL_RTX;
3250
3251      while (temp)
3252	{
3253	  if ((GET_CODE (reg) == MEM
3254	       && output_dependence (XEXP (temp, 0), reg))
3255	      || (GET_CODE (reg) == REG
3256		  && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3257	    {
3258	      /* Splice this entry out of the list.  */
3259	      if (prev)
3260		XEXP (prev, 1) = XEXP (temp, 1);
3261	      else
3262		mem_set_list = XEXP (temp, 1);
3263	    }
3264	  else
3265	    prev = temp;
3266	  temp = XEXP (temp, 1);
3267	}
3268    }
3269
3270  /* If the memory reference had embedded side effects (autoincrement
3271     address modes.  Then we may need to kill some entries on the
3272     memory set list.  */
3273  if (insn && GET_CODE (reg) == MEM)
3274    invalidate_mems_from_autoinc (insn);
3275
3276  if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3277      /* We do not know the size of a BLKmode store, so we do not track
3278	 them for redundant store elimination.  */
3279      && GET_MODE (reg) != BLKmode
3280      /* There are no REG_INC notes for SP, so we can't assume we'll see
3281	 everything that invalidates it.  To be safe, don't eliminate any
3282	 stores though SP; none of them should be redundant anyway.  */
3283      && ! reg_mentioned_p (stack_pointer_rtx, reg))
3284    mem_set_list = gen_rtx_EXPR_LIST (VOIDmode, reg, mem_set_list);
3285
3286  if (GET_CODE (reg) == REG
3287      && (regno = REGNO (reg), ! (regno == FRAME_POINTER_REGNUM
3288				  && (! reload_completed || frame_pointer_needed)))
3289#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3290      && ! (regno == HARD_FRAME_POINTER_REGNUM
3291	    && (! reload_completed || frame_pointer_needed))
3292#endif
3293#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3294      && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3295#endif
3296      && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3297    /* && regno != STACK_POINTER_REGNUM) -- let's try without this.  */
3298    {
3299      int some_needed = REGNO_REG_SET_P (needed, regno);
3300      int some_not_needed = ! some_needed;
3301
3302      /* Mark it as a significant register for this basic block.  */
3303      if (significant)
3304	SET_REGNO_REG_SET (significant, regno);
3305
3306      /* Mark it as dead before this insn.  */
3307      SET_REGNO_REG_SET (dead, regno);
3308
3309      /* A hard reg in a wide mode may really be multiple registers.
3310	 If so, mark all of them just like the first.  */
3311      if (regno < FIRST_PSEUDO_REGISTER)
3312	{
3313	  int n;
3314
3315	  /* Nothing below is needed for the stack pointer; get out asap.
3316	     Eg, log links aren't needed, since combine won't use them.  */
3317	  if (regno == STACK_POINTER_REGNUM)
3318	    return;
3319
3320	  n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3321	  while (--n > 0)
3322	    {
3323	      int regno_n = regno + n;
3324	      int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3325	      if (significant)
3326		SET_REGNO_REG_SET (significant, regno_n);
3327
3328	      SET_REGNO_REG_SET (dead, regno_n);
3329	      some_needed |= needed_regno;
3330	      some_not_needed |= ! needed_regno;
3331	    }
3332	}
3333      /* Additional data to record if this is the final pass.  */
3334      if (insn)
3335	{
3336	  register rtx y = reg_next_use[regno];
3337	  register int blocknum = BLOCK_NUM (insn);
3338
3339	  /* If this is a hard reg, record this function uses the reg.  */
3340
3341	  if (regno < FIRST_PSEUDO_REGISTER)
3342	    {
3343	      register int i;
3344	      int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3345
3346	      for (i = regno; i < endregno; i++)
3347		{
3348		  /* The next use is no longer "next", since a store
3349		     intervenes.  */
3350		  reg_next_use[i] = 0;
3351
3352		  regs_ever_live[i] = 1;
3353		  REG_N_SETS (i)++;
3354		}
3355	    }
3356	  else
3357	    {
3358	      /* The next use is no longer "next", since a store
3359		 intervenes.  */
3360	      reg_next_use[regno] = 0;
3361
3362	      /* Keep track of which basic blocks each reg appears in.  */
3363
3364	      if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3365		REG_BASIC_BLOCK (regno) = blocknum;
3366	      else if (REG_BASIC_BLOCK (regno) != blocknum)
3367		REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3368
3369	      /* Count (weighted) references, stores, etc.  This counts a
3370		 register twice if it is modified, but that is correct.  */
3371	      REG_N_SETS (regno)++;
3372
3373	      REG_N_REFS (regno) += loop_depth;
3374
3375	      /* The insns where a reg is live are normally counted
3376		 elsewhere, but we want the count to include the insn
3377		 where the reg is set, and the normal counting mechanism
3378		 would not count it.  */
3379	      REG_LIVE_LENGTH (regno)++;
3380	    }
3381
3382	  if (! some_not_needed)
3383	    {
3384	      /* Make a logical link from the next following insn
3385		 that uses this register, back to this insn.
3386		 The following insns have already been processed.
3387
3388		 We don't build a LOG_LINK for hard registers containing
3389		 in ASM_OPERANDs.  If these registers get replaced,
3390		 we might wind up changing the semantics of the insn,
3391		 even if reload can make what appear to be valid assignments
3392		 later.  */
3393	      if (y && (BLOCK_NUM (y) == blocknum)
3394		  && (regno >= FIRST_PSEUDO_REGISTER
3395		      || asm_noperands (PATTERN (y)) < 0))
3396		LOG_LINKS (y)
3397		  = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
3398	    }
3399	  else if (! some_needed)
3400	    {
3401	      /* Note that dead stores have already been deleted when possible
3402		 If we get here, we have found a dead store that cannot
3403		 be eliminated (because the same insn does something useful).
3404		 Indicate this by marking the reg being set as dying here.  */
3405	      REG_NOTES (insn)
3406		= gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3407	      REG_N_DEATHS (REGNO (reg))++;
3408	    }
3409	  else
3410	    {
3411	      /* This is a case where we have a multi-word hard register
3412		 and some, but not all, of the words of the register are
3413		 needed in subsequent insns.  Write REG_UNUSED notes
3414		 for those parts that were not needed.  This case should
3415		 be rare.  */
3416
3417	      int i;
3418
3419	      for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
3420		   i >= 0; i--)
3421		if (!REGNO_REG_SET_P (needed, regno + i))
3422		  REG_NOTES (insn)
3423		    = gen_rtx_EXPR_LIST (REG_UNUSED,
3424					 gen_rtx_REG (reg_raw_mode[regno + i],
3425						      regno + i),
3426					 REG_NOTES (insn));
3427	    }
3428	}
3429    }
3430  else if (GET_CODE (reg) == REG)
3431    reg_next_use[regno] = 0;
3432
3433  /* If this is the last pass and this is a SCRATCH, show it will be dying
3434     here and count it.  */
3435  else if (GET_CODE (reg) == SCRATCH && insn != 0)
3436    {
3437      REG_NOTES (insn)
3438	= gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3439    }
3440}
3441
3442#ifdef AUTO_INC_DEC
3443
3444/* X is a MEM found in INSN.  See if we can convert it into an auto-increment
3445   reference.  */
3446
3447static void
3448find_auto_inc (needed, x, insn)
3449     regset needed;
3450     rtx x;
3451     rtx insn;
3452{
3453  rtx addr = XEXP (x, 0);
3454  HOST_WIDE_INT offset = 0;
3455  rtx set;
3456
3457  /* Here we detect use of an index register which might be good for
3458     postincrement, postdecrement, preincrement, or predecrement.  */
3459
3460  if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3461    offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3462
3463  if (GET_CODE (addr) == REG)
3464    {
3465      register rtx y;
3466      register int size = GET_MODE_SIZE (GET_MODE (x));
3467      rtx use;
3468      rtx incr;
3469      int regno = REGNO (addr);
3470
3471      /* Is the next use an increment that might make auto-increment? */
3472      if ((incr = reg_next_use[regno]) != 0
3473	  && (set = single_set (incr)) != 0
3474	  && GET_CODE (set) == SET
3475	  && BLOCK_NUM (incr) == BLOCK_NUM (insn)
3476	  /* Can't add side effects to jumps; if reg is spilled and
3477	     reloaded, there's no way to store back the altered value.  */
3478	  && GET_CODE (insn) != JUMP_INSN
3479	  && (y = SET_SRC (set), GET_CODE (y) == PLUS)
3480	  && XEXP (y, 0) == addr
3481	  && GET_CODE (XEXP (y, 1)) == CONST_INT
3482	  && ((HAVE_POST_INCREMENT
3483	       && (INTVAL (XEXP (y, 1)) == size && offset == 0))
3484	      || (HAVE_POST_DECREMENT
3485		  && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
3486	      || (HAVE_PRE_INCREMENT
3487		  && (INTVAL (XEXP (y, 1)) == size && offset == size))
3488	      || (HAVE_PRE_DECREMENT
3489		  && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
3490	  /* Make sure this reg appears only once in this insn.  */
3491	  && (use = find_use_as_address (PATTERN (insn), addr, offset),
3492	      use != 0 && use != (rtx) 1))
3493	{
3494	  rtx q = SET_DEST (set);
3495	  enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
3496				    ? (offset ? PRE_INC : POST_INC)
3497				    : (offset ? PRE_DEC : POST_DEC));
3498
3499	  if (dead_or_set_p (incr, addr))
3500	    {
3501	      /* This is the simple case.  Try to make the auto-inc.  If
3502		 we can't, we are done.  Otherwise, we will do any
3503		 needed updates below.  */
3504	      if (! validate_change (insn, &XEXP (x, 0),
3505				     gen_rtx_fmt_e (inc_code, Pmode, addr),
3506				     0))
3507		return;
3508	    }
3509	  else if (GET_CODE (q) == REG
3510		   /* PREV_INSN used here to check the semi-open interval
3511		      [insn,incr).  */
3512		   && ! reg_used_between_p (q,  PREV_INSN (insn), incr)
3513		   /* We must also check for sets of q as q may be
3514		      a call clobbered hard register and there may
3515		      be a call between PREV_INSN (insn) and incr.  */
3516		   && ! reg_set_between_p (q,  PREV_INSN (insn), incr))
3517	    {
3518	      /* We have *p followed sometime later by q = p+size.
3519		 Both p and q must be live afterward,
3520		 and q is not used between INSN and its assignment.
3521		 Change it to q = p, ...*q..., q = q+size.
3522		 Then fall into the usual case.  */
3523	      rtx insns, temp;
3524	      basic_block bb;
3525
3526	      start_sequence ();
3527	      emit_move_insn (q, addr);
3528	      insns = get_insns ();
3529	      end_sequence ();
3530
3531	      bb = BLOCK_FOR_INSN (insn);
3532	      for (temp = insns; temp; temp = NEXT_INSN (temp))
3533		set_block_for_insn (temp, bb);
3534
3535	      /* If we can't make the auto-inc, or can't make the
3536		 replacement into Y, exit.  There's no point in making
3537		 the change below if we can't do the auto-inc and doing
3538		 so is not correct in the pre-inc case.  */
3539
3540	      validate_change (insn, &XEXP (x, 0),
3541			       gen_rtx_fmt_e (inc_code, Pmode, q),
3542			       1);
3543	      validate_change (incr, &XEXP (y, 0), q, 1);
3544	      if (! apply_change_group ())
3545		return;
3546
3547	      /* We now know we'll be doing this change, so emit the
3548		 new insn(s) and do the updates.  */
3549	      emit_insns_before (insns, insn);
3550
3551	      if (BLOCK_FOR_INSN (insn)->head == insn)
3552		BLOCK_FOR_INSN (insn)->head = insns;
3553
3554	      /* INCR will become a NOTE and INSN won't contain a
3555		 use of ADDR.  If a use of ADDR was just placed in
3556		 the insn before INSN, make that the next use.
3557		 Otherwise, invalidate it.  */
3558	      if (GET_CODE (PREV_INSN (insn)) == INSN
3559		  && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3560		  && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
3561		reg_next_use[regno] = PREV_INSN (insn);
3562	      else
3563		reg_next_use[regno] = 0;
3564
3565	      addr = q;
3566	      regno = REGNO (q);
3567
3568	      /* REGNO is now used in INCR which is below INSN, but
3569		 it previously wasn't live here.  If we don't mark
3570		 it as needed, we'll put a REG_DEAD note for it
3571		 on this insn, which is incorrect.  */
3572	      SET_REGNO_REG_SET (needed, regno);
3573
3574	      /* If there are any calls between INSN and INCR, show
3575		 that REGNO now crosses them.  */
3576	      for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3577		if (GET_CODE (temp) == CALL_INSN)
3578		  REG_N_CALLS_CROSSED (regno)++;
3579	    }
3580	  else
3581	    return;
3582
3583	  /* If we haven't returned, it means we were able to make the
3584	     auto-inc, so update the status.  First, record that this insn
3585	     has an implicit side effect.  */
3586
3587	  REG_NOTES (insn)
3588	    = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
3589
3590	  /* Modify the old increment-insn to simply copy
3591	     the already-incremented value of our register.  */
3592	  if (! validate_change (incr, &SET_SRC (set), addr, 0))
3593	    abort ();
3594
3595	  /* If that makes it a no-op (copying the register into itself) delete
3596	     it so it won't appear to be a "use" and a "set" of this
3597	     register.  */
3598	  if (SET_DEST (set) == addr)
3599	    {
3600	      PUT_CODE (incr, NOTE);
3601	      NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3602	      NOTE_SOURCE_FILE (incr) = 0;
3603	    }
3604
3605	  if (regno >= FIRST_PSEUDO_REGISTER)
3606	    {
3607	      /* Count an extra reference to the reg.  When a reg is
3608		 incremented, spilling it is worse, so we want to make
3609		 that less likely.  */
3610	      REG_N_REFS (regno) += loop_depth;
3611
3612	      /* Count the increment as a setting of the register,
3613		 even though it isn't a SET in rtl.  */
3614	      REG_N_SETS (regno)++;
3615	    }
3616	}
3617    }
3618}
3619#endif /* AUTO_INC_DEC */
3620
3621/* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3622   This is done assuming the registers needed from X
3623   are those that have 1-bits in NEEDED.
3624
3625   On the final pass, FINAL is 1.  This means try for autoincrement
3626   and count the uses and deaths of each pseudo-reg.
3627
3628   INSN is the containing instruction.  If INSN is dead, this function is not
3629   called.  */
3630
3631static void
3632mark_used_regs (needed, live, x, final, insn)
3633     regset needed;
3634     regset live;
3635     rtx x;
3636     int final;
3637     rtx insn;
3638{
3639  register RTX_CODE code;
3640  register int regno;
3641  int i;
3642
3643 retry:
3644  code = GET_CODE (x);
3645  switch (code)
3646    {
3647    case LABEL_REF:
3648    case SYMBOL_REF:
3649    case CONST_INT:
3650    case CONST:
3651    case CONST_DOUBLE:
3652    case PC:
3653    case ADDR_VEC:
3654    case ADDR_DIFF_VEC:
3655      return;
3656
3657#ifdef HAVE_cc0
3658    case CC0:
3659      cc0_live = 1;
3660      return;
3661#endif
3662
3663    case CLOBBER:
3664      /* If we are clobbering a MEM, mark any registers inside the address
3665	 as being used.  */
3666      if (GET_CODE (XEXP (x, 0)) == MEM)
3667	mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
3668      return;
3669
3670    case MEM:
3671      /* Invalidate the data for the last MEM stored, but only if MEM is
3672	 something that can be stored into.  */
3673      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3674	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3675	; /* needn't clear the memory set list */
3676      else
3677	{
3678	  rtx temp = mem_set_list;
3679	  rtx prev = NULL_RTX;
3680
3681	  while (temp)
3682	    {
3683	      if (anti_dependence (XEXP (temp, 0), x))
3684		{
3685		  /* Splice temp out of the list.  */
3686		  if (prev)
3687		    XEXP (prev, 1) = XEXP (temp, 1);
3688		  else
3689		    mem_set_list = XEXP (temp, 1);
3690		}
3691	      else
3692		prev = temp;
3693	      temp = XEXP (temp, 1);
3694	    }
3695	}
3696
3697      /* If the memory reference had embedded side effects (autoincrement
3698	 address modes.  Then we may need to kill some entries on the
3699	 memory set list.  */
3700      if (insn)
3701	invalidate_mems_from_autoinc (insn);
3702
3703#ifdef AUTO_INC_DEC
3704      if (final)
3705	find_auto_inc (needed, x, insn);
3706#endif
3707      break;
3708
3709    case SUBREG:
3710      if (GET_CODE (SUBREG_REG (x)) == REG
3711	  && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3712	  && (GET_MODE_SIZE (GET_MODE (x))
3713	      != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3714	REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
3715
3716      /* While we're here, optimize this case.  */
3717      x = SUBREG_REG (x);
3718
3719      /* In case the SUBREG is not of a register, don't optimize */
3720      if (GET_CODE (x) != REG)
3721	{
3722	  mark_used_regs (needed, live, x, final, insn);
3723	  return;
3724	}
3725
3726      /* ... fall through ...  */
3727
3728    case REG:
3729      /* See a register other than being set
3730	 => mark it as needed.  */
3731
3732      regno = REGNO (x);
3733      {
3734	int some_needed = REGNO_REG_SET_P (needed, regno);
3735	int some_not_needed = ! some_needed;
3736
3737	SET_REGNO_REG_SET (live, regno);
3738
3739	/* A hard reg in a wide mode may really be multiple registers.
3740	   If so, mark all of them just like the first.  */
3741	if (regno < FIRST_PSEUDO_REGISTER)
3742	  {
3743	    int n;
3744
3745	    /* For stack ptr or fixed arg pointer,
3746	       nothing below can be necessary, so waste no more time.  */
3747	    if (regno == STACK_POINTER_REGNUM
3748#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3749		|| (regno == HARD_FRAME_POINTER_REGNUM
3750		    && (! reload_completed || frame_pointer_needed))
3751#endif
3752#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3753		|| (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3754#endif
3755		|| (regno == FRAME_POINTER_REGNUM
3756		    && (! reload_completed || frame_pointer_needed)))
3757	      {
3758		/* If this is a register we are going to try to eliminate,
3759		   don't mark it live here.  If we are successful in
3760		   eliminating it, it need not be live unless it is used for
3761		   pseudos, in which case it will have been set live when
3762		   it was allocated to the pseudos.  If the register will not
3763		   be eliminated, reload will set it live at that point.  */
3764
3765		if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
3766		  regs_ever_live[regno] = 1;
3767		return;
3768	      }
3769	    /* No death notes for global register variables;
3770	       their values are live after this function exits.  */
3771	    if (global_regs[regno])
3772	      {
3773		if (final)
3774		  reg_next_use[regno] = insn;
3775		return;
3776	      }
3777
3778	    n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3779	    while (--n > 0)
3780	      {
3781		int regno_n = regno + n;
3782		int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3783
3784		SET_REGNO_REG_SET (live, regno_n);
3785		some_needed |= needed_regno;
3786		some_not_needed |= ! needed_regno;
3787	      }
3788	  }
3789	if (final)
3790	  {
3791	    /* Record where each reg is used, so when the reg
3792	       is set we know the next insn that uses it.  */
3793
3794	    reg_next_use[regno] = insn;
3795
3796	    if (regno < FIRST_PSEUDO_REGISTER)
3797	      {
3798		/* If a hard reg is being used,
3799		   record that this function does use it.  */
3800
3801		i = HARD_REGNO_NREGS (regno, GET_MODE (x));
3802		if (i == 0)
3803		  i = 1;
3804		do
3805		  regs_ever_live[regno + --i] = 1;
3806		while (i > 0);
3807	      }
3808	    else
3809	      {
3810		/* Keep track of which basic block each reg appears in.  */
3811
3812		register int blocknum = BLOCK_NUM (insn);
3813
3814		if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3815		  REG_BASIC_BLOCK (regno) = blocknum;
3816		else if (REG_BASIC_BLOCK (regno) != blocknum)
3817		  REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3818
3819		/* Count (weighted) number of uses of each reg.  */
3820
3821		REG_N_REFS (regno) += loop_depth;
3822	      }
3823
3824	    /* Record and count the insns in which a reg dies.
3825	       If it is used in this insn and was dead below the insn
3826	       then it dies in this insn.  If it was set in this insn,
3827	       we do not make a REG_DEAD note; likewise if we already
3828	       made such a note.  */
3829
3830	    if (some_not_needed
3831		&& ! dead_or_set_p (insn, x)
3832#if 0
3833		&& (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
3834#endif
3835		)
3836	      {
3837		/* Check for the case where the register dying partially
3838		   overlaps the register set by this insn.  */
3839		if (regno < FIRST_PSEUDO_REGISTER
3840		    && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
3841		  {
3842		    int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3843		    while (--n >= 0)
3844		      some_needed |= dead_or_set_regno_p (insn, regno + n);
3845		  }
3846
3847		/* If none of the words in X is needed, make a REG_DEAD
3848		   note.  Otherwise, we must make partial REG_DEAD notes.  */
3849		if (! some_needed)
3850		  {
3851		    REG_NOTES (insn)
3852		      = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
3853		    REG_N_DEATHS (regno)++;
3854		  }
3855		else
3856		  {
3857		    int i;
3858
3859		    /* Don't make a REG_DEAD note for a part of a register
3860		       that is set in the insn.  */
3861
3862		    for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
3863			 i >= 0; i--)
3864		      if (!REGNO_REG_SET_P (needed, regno + i)
3865			  && ! dead_or_set_regno_p (insn, regno + i))
3866			REG_NOTES (insn)
3867			  = gen_rtx_EXPR_LIST (REG_DEAD,
3868					       gen_rtx_REG (reg_raw_mode[regno + i],
3869							    regno + i),
3870					       REG_NOTES (insn));
3871		  }
3872	      }
3873	  }
3874      }
3875      return;
3876
3877    case SET:
3878      {
3879	register rtx testreg = SET_DEST (x);
3880	int mark_dest = 0;
3881
3882	/* If storing into MEM, don't show it as being used.  But do
3883	   show the address as being used.  */
3884	if (GET_CODE (testreg) == MEM)
3885	  {
3886#ifdef AUTO_INC_DEC
3887	    if (final)
3888	      find_auto_inc (needed, testreg, insn);
3889#endif
3890	    mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
3891	    mark_used_regs (needed, live, SET_SRC (x), final, insn);
3892	    return;
3893	  }
3894
3895	/* Storing in STRICT_LOW_PART is like storing in a reg
3896	   in that this SET might be dead, so ignore it in TESTREG.
3897	   but in some other ways it is like using the reg.
3898
3899	   Storing in a SUBREG or a bit field is like storing the entire
3900	   register in that if the register's value is not used
3901	   then this SET is not needed.  */
3902	while (GET_CODE (testreg) == STRICT_LOW_PART
3903	       || GET_CODE (testreg) == ZERO_EXTRACT
3904	       || GET_CODE (testreg) == SIGN_EXTRACT
3905	       || GET_CODE (testreg) == SUBREG)
3906	  {
3907	    if (GET_CODE (testreg) == SUBREG
3908		&& GET_CODE (SUBREG_REG (testreg)) == REG
3909		&& REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3910		&& (GET_MODE_SIZE (GET_MODE (testreg))
3911		    != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
3912	      REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
3913
3914	    /* Modifying a single register in an alternate mode
3915	       does not use any of the old value.  But these other
3916	       ways of storing in a register do use the old value.  */
3917	    if (GET_CODE (testreg) == SUBREG
3918		&& !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
3919	      ;
3920	    else
3921	      mark_dest = 1;
3922
3923	    testreg = XEXP (testreg, 0);
3924	  }
3925
3926	/* If this is a store into a register,
3927	   recursively scan the value being stored.  */
3928
3929	if ((GET_CODE (testreg) == PARALLEL
3930	     && GET_MODE (testreg) == BLKmode)
3931	    || (GET_CODE (testreg) == REG
3932		&& (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
3933						&& (! reload_completed || frame_pointer_needed)))
3934#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3935		&& ! (regno == HARD_FRAME_POINTER_REGNUM
3936		      && (! reload_completed || frame_pointer_needed))
3937#endif
3938#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3939		&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3940#endif
3941		))
3942	  /* We used to exclude global_regs here, but that seems wrong.
3943	     Storing in them is like storing in mem.  */
3944	  {
3945	    mark_used_regs (needed, live, SET_SRC (x), final, insn);
3946	    if (mark_dest)
3947	      mark_used_regs (needed, live, SET_DEST (x), final, insn);
3948	    return;
3949	  }
3950      }
3951      break;
3952
3953    case RETURN:
3954      /* If exiting needs the right stack value, consider this insn as
3955	 using the stack pointer.  In any event, consider it as using
3956	 all global registers and all registers used by return.  */
3957      if (! EXIT_IGNORE_STACK
3958	  || (! FRAME_POINTER_REQUIRED
3959	      && ! current_function_calls_alloca
3960	      && flag_omit_frame_pointer)
3961	  || current_function_sp_is_unchanging)
3962	SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3963
3964      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3965	if (global_regs[i]
3966#ifdef EPILOGUE_USES
3967	    || EPILOGUE_USES (i)
3968#endif
3969	    )
3970	  SET_REGNO_REG_SET (live, i);
3971      break;
3972
3973    case ASM_OPERANDS:
3974    case UNSPEC_VOLATILE:
3975    case TRAP_IF:
3976    case ASM_INPUT:
3977      {
3978	/* Traditional and volatile asm instructions must be considered to use
3979	   and clobber all hard registers, all pseudo-registers and all of
3980	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
3981
3982	   Consider for instance a volatile asm that changes the fpu rounding
3983	   mode.  An insn should not be moved across this even if it only uses
3984	   pseudo-regs because it might give an incorrectly rounded result.
3985
3986	   ?!? Unfortunately, marking all hard registers as live causes massive
3987	   problems for the register allocator and marking all pseudos as live
3988	   creates mountains of uninitialized variable warnings.
3989
3990	   So for now, just clear the memory set list and mark any regs
3991	   we can find in ASM_OPERANDS as used.  */
3992	if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3993	  mem_set_list = NULL_RTX;
3994
3995        /* For all ASM_OPERANDS, we must traverse the vector of input operands.
3996	   We can not just fall through here since then we would be confused
3997	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3998	   traditional asms unlike their normal usage.  */
3999	if (code == ASM_OPERANDS)
4000	  {
4001	    int j;
4002
4003	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4004	      mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4005			      final, insn);
4006	  }
4007	break;
4008      }
4009
4010
4011    default:
4012      break;
4013    }
4014
4015  /* Recursively scan the operands of this expression.  */
4016
4017  {
4018    register char *fmt = GET_RTX_FORMAT (code);
4019    register int i;
4020
4021    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4022      {
4023	if (fmt[i] == 'e')
4024	  {
4025	    /* Tail recursive case: save a function call level.  */
4026	    if (i == 0)
4027	      {
4028		x = XEXP (x, 0);
4029		goto retry;
4030	      }
4031	    mark_used_regs (needed, live, XEXP (x, i), final, insn);
4032	  }
4033	else if (fmt[i] == 'E')
4034	  {
4035	    register int j;
4036	    for (j = 0; j < XVECLEN (x, i); j++)
4037	      mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
4038	  }
4039      }
4040  }
4041}
4042
4043#ifdef AUTO_INC_DEC
4044
4045static int
4046try_pre_increment_1 (insn)
4047     rtx insn;
4048{
4049  /* Find the next use of this reg.  If in same basic block,
4050     make it do pre-increment or pre-decrement if appropriate.  */
4051  rtx x = single_set (insn);
4052  HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4053		* INTVAL (XEXP (SET_SRC (x), 1)));
4054  int regno = REGNO (SET_DEST (x));
4055  rtx y = reg_next_use[regno];
4056  if (y != 0
4057      && BLOCK_NUM (y) == BLOCK_NUM (insn)
4058      /* Don't do this if the reg dies, or gets set in y; a standard addressing
4059	 mode would be better.  */
4060      && ! dead_or_set_p (y, SET_DEST (x))
4061      && try_pre_increment (y, SET_DEST (x), amount))
4062    {
4063      /* We have found a suitable auto-increment
4064	 and already changed insn Y to do it.
4065	 So flush this increment-instruction.  */
4066      PUT_CODE (insn, NOTE);
4067      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4068      NOTE_SOURCE_FILE (insn) = 0;
4069      /* Count a reference to this reg for the increment
4070	 insn we are deleting.  When a reg is incremented.
4071	 spilling it is worse, so we want to make that
4072	 less likely.  */
4073      if (regno >= FIRST_PSEUDO_REGISTER)
4074	{
4075	  REG_N_REFS (regno) += loop_depth;
4076	  REG_N_SETS (regno)++;
4077	}
4078      return 1;
4079    }
4080  return 0;
4081}
4082
4083/* Try to change INSN so that it does pre-increment or pre-decrement
4084   addressing on register REG in order to add AMOUNT to REG.
4085   AMOUNT is negative for pre-decrement.
4086   Returns 1 if the change could be made.
4087   This checks all about the validity of the result of modifying INSN.  */
4088
4089static int
4090try_pre_increment (insn, reg, amount)
4091     rtx insn, reg;
4092     HOST_WIDE_INT amount;
4093{
4094  register rtx use;
4095
4096  /* Nonzero if we can try to make a pre-increment or pre-decrement.
4097     For example, addl $4,r1; movl (r1),... can become movl +(r1),...  */
4098  int pre_ok = 0;
4099  /* Nonzero if we can try to make a post-increment or post-decrement.
4100     For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4101     It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4102     supports both pre-inc and post-inc, or both pre-dec and post-dec.  */
4103  int post_ok = 0;
4104
4105  /* Nonzero if the opportunity actually requires post-inc or post-dec.  */
4106  int do_post = 0;
4107
4108  /* From the sign of increment, see which possibilities are conceivable
4109     on this target machine.  */
4110  if (HAVE_PRE_INCREMENT && amount > 0)
4111    pre_ok = 1;
4112  if (HAVE_POST_INCREMENT && amount > 0)
4113    post_ok = 1;
4114
4115  if (HAVE_PRE_DECREMENT && amount < 0)
4116    pre_ok = 1;
4117  if (HAVE_POST_DECREMENT && amount < 0)
4118    post_ok = 1;
4119
4120  if (! (pre_ok || post_ok))
4121    return 0;
4122
4123  /* It is not safe to add a side effect to a jump insn
4124     because if the incremented register is spilled and must be reloaded
4125     there would be no way to store the incremented value back in memory.  */
4126
4127  if (GET_CODE (insn) == JUMP_INSN)
4128    return 0;
4129
4130  use = 0;
4131  if (pre_ok)
4132    use = find_use_as_address (PATTERN (insn), reg, 0);
4133  if (post_ok && (use == 0 || use == (rtx) 1))
4134    {
4135      use = find_use_as_address (PATTERN (insn), reg, -amount);
4136      do_post = 1;
4137    }
4138
4139  if (use == 0 || use == (rtx) 1)
4140    return 0;
4141
4142  if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4143    return 0;
4144
4145  /* See if this combination of instruction and addressing mode exists.  */
4146  if (! validate_change (insn, &XEXP (use, 0),
4147			 gen_rtx_fmt_e (amount > 0
4148					? (do_post ? POST_INC : PRE_INC)
4149					: (do_post ? POST_DEC : PRE_DEC),
4150					Pmode, reg), 0))
4151    return 0;
4152
4153  /* Record that this insn now has an implicit side effect on X.  */
4154  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4155  return 1;
4156}
4157
4158#endif /* AUTO_INC_DEC */
4159
4160/* Find the place in the rtx X where REG is used as a memory address.
4161   Return the MEM rtx that so uses it.
4162   If PLUSCONST is nonzero, search instead for a memory address equivalent to
4163   (plus REG (const_int PLUSCONST)).
4164
4165   If such an address does not appear, return 0.
4166   If REG appears more than once, or is used other than in such an address,
4167   return (rtx)1.  */
4168
4169rtx
4170find_use_as_address (x, reg, plusconst)
4171     register rtx x;
4172     rtx reg;
4173     HOST_WIDE_INT plusconst;
4174{
4175  enum rtx_code code = GET_CODE (x);
4176  char *fmt = GET_RTX_FORMAT (code);
4177  register int i;
4178  register rtx value = 0;
4179  register rtx tem;
4180
4181  if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4182    return x;
4183
4184  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4185      && XEXP (XEXP (x, 0), 0) == reg
4186      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4187      && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4188    return x;
4189
4190  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4191    {
4192      /* If REG occurs inside a MEM used in a bit-field reference,
4193	 that is unacceptable.  */
4194      if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4195	return (rtx) (HOST_WIDE_INT) 1;
4196    }
4197
4198  if (x == reg)
4199    return (rtx) (HOST_WIDE_INT) 1;
4200
4201  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4202    {
4203      if (fmt[i] == 'e')
4204	{
4205	  tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4206	  if (value == 0)
4207	    value = tem;
4208	  else if (tem != 0)
4209	    return (rtx) (HOST_WIDE_INT) 1;
4210	}
4211      if (fmt[i] == 'E')
4212	{
4213	  register int j;
4214	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4215	    {
4216	      tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4217	      if (value == 0)
4218		value = tem;
4219	      else if (tem != 0)
4220		return (rtx) (HOST_WIDE_INT) 1;
4221	    }
4222	}
4223    }
4224
4225  return value;
4226}
4227
4228/* Write information about registers and basic blocks into FILE.
4229   This is part of making a debugging dump.  */
4230
4231void
4232dump_flow_info (file)
4233     FILE *file;
4234{
4235  register int i;
4236  static char *reg_class_names[] = REG_CLASS_NAMES;
4237
4238  fprintf (file, "%d registers.\n", max_regno);
4239  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4240    if (REG_N_REFS (i))
4241      {
4242	enum reg_class class, altclass;
4243	fprintf (file, "\nRegister %d used %d times across %d insns",
4244		 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4245	if (REG_BASIC_BLOCK (i) >= 0)
4246	  fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4247	if (REG_N_SETS (i))
4248  	  fprintf (file, "; set %d time%s", REG_N_SETS (i),
4249   		   (REG_N_SETS (i) == 1) ? "" : "s");
4250	if (REG_USERVAR_P (regno_reg_rtx[i]))
4251  	  fprintf (file, "; user var");
4252	if (REG_N_DEATHS (i) != 1)
4253	  fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4254	if (REG_N_CALLS_CROSSED (i) == 1)
4255	  fprintf (file, "; crosses 1 call");
4256	else if (REG_N_CALLS_CROSSED (i))
4257	  fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4258	if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4259	  fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4260	class = reg_preferred_class (i);
4261	altclass = reg_alternate_class (i);
4262	if (class != GENERAL_REGS || altclass != ALL_REGS)
4263	  {
4264	    if (altclass == ALL_REGS || class == ALL_REGS)
4265	      fprintf (file, "; pref %s", reg_class_names[(int) class]);
4266	    else if (altclass == NO_REGS)
4267	      fprintf (file, "; %s or none", reg_class_names[(int) class]);
4268	    else
4269	      fprintf (file, "; pref %s, else %s",
4270		       reg_class_names[(int) class],
4271		       reg_class_names[(int) altclass]);
4272	  }
4273	if (REGNO_POINTER_FLAG (i))
4274	  fprintf (file, "; pointer");
4275	fprintf (file, ".\n");
4276      }
4277
4278  fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
4279  for (i = 0; i < n_basic_blocks; i++)
4280    {
4281      register basic_block bb = BASIC_BLOCK (i);
4282      register int regno;
4283      register edge e;
4284
4285      fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4286	       i, INSN_UID (bb->head), INSN_UID (bb->end));
4287
4288      fprintf (file, "Predecessors: ");
4289      for (e = bb->pred; e ; e = e->pred_next)
4290	dump_edge_info (file, e, 0);
4291
4292      fprintf (file, "\nSuccessors: ");
4293      for (e = bb->succ; e ; e = e->succ_next)
4294	dump_edge_info (file, e, 1);
4295
4296      fprintf (file, "\nRegisters live at start:");
4297      if (bb->global_live_at_start)
4298	{
4299          for (regno = 0; regno < max_regno; regno++)
4300	    if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4301	      fprintf (file, " %d", regno);
4302	}
4303      else
4304	fprintf (file, " n/a");
4305
4306      fprintf (file, "\nRegisters live at end:");
4307      if (bb->global_live_at_end)
4308	{
4309          for (regno = 0; regno < max_regno; regno++)
4310	    if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4311	      fprintf (file, " %d", regno);
4312	}
4313      else
4314	fprintf (file, " n/a");
4315
4316      putc('\n', file);
4317    }
4318
4319  putc('\n', file);
4320}
4321
4322static void
4323dump_edge_info (file, e, do_succ)
4324     FILE *file;
4325     edge e;
4326     int do_succ;
4327{
4328  basic_block side = (do_succ ? e->dest : e->src);
4329
4330  if (side == ENTRY_BLOCK_PTR)
4331    fputs (" ENTRY", file);
4332  else if (side == EXIT_BLOCK_PTR)
4333    fputs (" EXIT", file);
4334  else
4335    fprintf (file, " %d", side->index);
4336
4337  if (e->flags)
4338    {
4339      static char * bitnames[] = {
4340	"fallthru", "crit", "ab", "abcall", "eh", "fake"
4341      };
4342      int comma = 0;
4343      int i, flags = e->flags;
4344
4345      fputc (' ', file);
4346      fputc ('(', file);
4347      for (i = 0; flags; i++)
4348	if (flags & (1 << i))
4349	  {
4350	    flags &= ~(1 << i);
4351
4352	    if (comma)
4353	      fputc (',', file);
4354	    if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
4355	      fputs (bitnames[i], file);
4356	    else
4357	      fprintf (file, "%d", i);
4358	    comma = 1;
4359	  }
4360      fputc (')', file);
4361    }
4362}
4363
4364
4365/* Like print_rtl, but also print out live information for the start of each
4366   basic block.  */
4367
4368void
4369print_rtl_with_bb (outf, rtx_first)
4370     FILE *outf;
4371     rtx rtx_first;
4372{
4373  register rtx tmp_rtx;
4374
4375  if (rtx_first == 0)
4376    fprintf (outf, "(nil)\n");
4377  else
4378    {
4379      int i;
4380      enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
4381      int max_uid = get_max_uid ();
4382      basic_block *start = (basic_block *)
4383	alloca (max_uid * sizeof (basic_block));
4384      basic_block *end = (basic_block *)
4385	alloca (max_uid * sizeof (basic_block));
4386      enum bb_state *in_bb_p = (enum bb_state *)
4387	alloca (max_uid * sizeof (enum bb_state));
4388
4389      memset (start, 0, max_uid * sizeof (basic_block));
4390      memset (end, 0, max_uid * sizeof (basic_block));
4391      memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
4392
4393      for (i = n_basic_blocks - 1; i >= 0; i--)
4394	{
4395	  basic_block bb = BASIC_BLOCK (i);
4396	  rtx x;
4397
4398	  start[INSN_UID (bb->head)] = bb;
4399	  end[INSN_UID (bb->end)] = bb;
4400	  for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
4401	    {
4402	      enum bb_state state = IN_MULTIPLE_BB;
4403	      if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
4404		state = IN_ONE_BB;
4405	      in_bb_p[INSN_UID(x)] = state;
4406
4407	      if (x == bb->end)
4408		break;
4409	    }
4410	}
4411
4412      for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
4413	{
4414	  int did_output;
4415	  basic_block bb;
4416
4417	  if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
4418	    {
4419	      fprintf (outf, ";; Start of basic block %d, registers live:",
4420		       bb->index);
4421
4422	      EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
4423					 {
4424					   fprintf (outf, " %d", i);
4425					   if (i < FIRST_PSEUDO_REGISTER)
4426					     fprintf (outf, " [%s]",
4427						      reg_names[i]);
4428					 });
4429	      putc ('\n', outf);
4430	    }
4431
4432	  if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
4433	      && GET_CODE (tmp_rtx) != NOTE
4434	      && GET_CODE (tmp_rtx) != BARRIER
4435	      && ! obey_regdecls)
4436	    fprintf (outf, ";; Insn is not within a basic block\n");
4437	  else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
4438	    fprintf (outf, ";; Insn is in multiple basic blocks\n");
4439
4440	  did_output = print_rtl_single (outf, tmp_rtx);
4441
4442	  if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
4443	    fprintf (outf, ";; End of basic block %d\n", bb->index);
4444
4445	  if (did_output)
4446	    putc ('\n', outf);
4447	}
4448    }
4449}
4450
4451
4452/* Integer list support.  */
4453
4454/* Allocate a node from list *HEAD_PTR.  */
4455
4456static int_list_ptr
4457alloc_int_list_node (head_ptr)
4458     int_list_block **head_ptr;
4459{
4460  struct int_list_block *first_blk = *head_ptr;
4461
4462  if (first_blk == NULL || first_blk->nodes_left <= 0)
4463    {
4464      first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
4465      first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
4466      first_blk->next = *head_ptr;
4467      *head_ptr = first_blk;
4468    }
4469
4470  first_blk->nodes_left--;
4471  return &first_blk->nodes[first_blk->nodes_left];
4472}
4473
4474/* Pointer to head of predecessor/successor block list.  */
4475static int_list_block *pred_int_list_blocks;
4476
4477/* Add a new node to integer list LIST with value VAL.
4478   LIST is a pointer to a list object to allow for different implementations.
4479   If *LIST is initially NULL, the list is empty.
4480   The caller must not care whether the element is added to the front or
4481   to the end of the list (to allow for different implementations).  */
4482
4483static int_list_ptr
4484add_int_list_node (blk_list, list, val)
4485     int_list_block **blk_list;
4486     int_list **list;
4487     int val;
4488{
4489  int_list_ptr p = alloc_int_list_node (blk_list);
4490
4491  p->val = val;
4492  p->next = *list;
4493  *list = p;
4494  return p;
4495}
4496
4497/* Free the blocks of lists at BLK_LIST.  */
4498
4499void
4500free_int_list (blk_list)
4501     int_list_block **blk_list;
4502{
4503  int_list_block *p, *next;
4504
4505  for (p = *blk_list; p != NULL; p = next)
4506    {
4507      next = p->next;
4508      free (p);
4509    }
4510
4511  /* Mark list as empty for the next function we compile.  */
4512  *blk_list = NULL;
4513}
4514
4515/* Predecessor/successor computation.  */
4516
4517/* Mark PRED_BB a precessor of SUCC_BB,
4518   and conversely SUCC_BB a successor of PRED_BB.  */
4519
4520static void
4521add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
4522     int pred_bb;
4523     int succ_bb;
4524     int_list_ptr *s_preds;
4525     int_list_ptr *s_succs;
4526     int *num_preds;
4527     int *num_succs;
4528{
4529  if (succ_bb != EXIT_BLOCK)
4530    {
4531      add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
4532      num_preds[succ_bb]++;
4533    }
4534  if (pred_bb != ENTRY_BLOCK)
4535    {
4536      add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
4537      num_succs[pred_bb]++;
4538    }
4539}
4540
4541/* Convert edge lists into pred/succ lists for backward compatibility.  */
4542
4543void
4544compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
4545     int_list_ptr *s_preds;
4546     int_list_ptr *s_succs;
4547     int *num_preds;
4548     int *num_succs;
4549{
4550  int i, n = n_basic_blocks;
4551  edge e;
4552
4553  memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
4554  memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
4555  memset (num_preds, 0, n_basic_blocks * sizeof (int));
4556  memset (num_succs, 0, n_basic_blocks * sizeof (int));
4557
4558  for (i = 0; i < n; ++i)
4559    {
4560      basic_block bb = BASIC_BLOCK (i);
4561
4562      for (e = bb->succ; e ; e = e->succ_next)
4563	add_pred_succ (i, e->dest->index, s_preds, s_succs,
4564		       num_preds, num_succs);
4565    }
4566
4567  for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
4568    add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
4569		   num_preds, num_succs);
4570}
4571
4572void
4573dump_bb_data (file, preds, succs, live_info)
4574     FILE *file;
4575     int_list_ptr *preds;
4576     int_list_ptr *succs;
4577     int live_info;
4578{
4579  int bb;
4580  int_list_ptr p;
4581
4582  fprintf (file, "BB data\n\n");
4583  for (bb = 0; bb < n_basic_blocks; bb++)
4584    {
4585      fprintf (file, "BB %d, start %d, end %d\n", bb,
4586	       INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
4587      fprintf (file, "  preds:");
4588      for (p = preds[bb]; p != NULL; p = p->next)
4589	{
4590	  int pred_bb = INT_LIST_VAL (p);
4591	  if (pred_bb == ENTRY_BLOCK)
4592	    fprintf (file, " entry");
4593	  else
4594	    fprintf (file, " %d", pred_bb);
4595	}
4596      fprintf (file, "\n");
4597      fprintf (file, "  succs:");
4598      for (p = succs[bb]; p != NULL; p = p->next)
4599	{
4600	  int succ_bb = INT_LIST_VAL (p);
4601	  if (succ_bb == EXIT_BLOCK)
4602	    fprintf (file, " exit");
4603	  else
4604	    fprintf (file, " %d", succ_bb);
4605	}
4606      if (live_info)
4607	{
4608	  int regno;
4609	  fprintf (file, "\nRegisters live at start:");
4610	  for (regno = 0; regno < max_regno; regno++)
4611	    if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
4612	      fprintf (file, " %d", regno);
4613	  fprintf (file, "\n");
4614	}
4615      fprintf (file, "\n");
4616    }
4617  fprintf (file, "\n");
4618}
4619
4620/* Free basic block data storage.  */
4621
4622void
4623free_bb_mem ()
4624{
4625  free_int_list (&pred_int_list_blocks);
4626}
4627
4628/* Compute dominator relationships.  */
4629void
4630compute_dominators (dominators, post_dominators, s_preds, s_succs)
4631     sbitmap *dominators;
4632     sbitmap *post_dominators;
4633     int_list_ptr *s_preds;
4634     int_list_ptr *s_succs;
4635{
4636  int bb, changed, passes;
4637  sbitmap *temp_bitmap;
4638
4639  temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4640  sbitmap_vector_ones (dominators, n_basic_blocks);
4641  sbitmap_vector_ones (post_dominators, n_basic_blocks);
4642  sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4643
4644  sbitmap_zero (dominators[0]);
4645  SET_BIT (dominators[0], 0);
4646
4647  sbitmap_zero (post_dominators[n_basic_blocks - 1]);
4648  SET_BIT (post_dominators[n_basic_blocks - 1], 0);
4649
4650  passes = 0;
4651  changed = 1;
4652  while (changed)
4653    {
4654      changed = 0;
4655      for (bb = 1; bb < n_basic_blocks; bb++)
4656	{
4657	  sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4658					     bb, s_preds);
4659	  SET_BIT (temp_bitmap[bb], bb);
4660	  changed |= sbitmap_a_and_b (dominators[bb],
4661				      dominators[bb],
4662				      temp_bitmap[bb]);
4663	  sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4664					   bb, s_succs);
4665	  SET_BIT (temp_bitmap[bb], bb);
4666	  changed |= sbitmap_a_and_b (post_dominators[bb],
4667				      post_dominators[bb],
4668				      temp_bitmap[bb]);
4669	}
4670      passes++;
4671    }
4672
4673  free (temp_bitmap);
4674}
4675
4676/* Given DOMINATORS, compute the immediate dominators into IDOM.  */
4677
4678void
4679compute_immediate_dominators (idom, dominators)
4680     int *idom;
4681     sbitmap *dominators;
4682{
4683  sbitmap *tmp;
4684  int b;
4685
4686  tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4687
4688  /* Begin with tmp(n) = dom(n) - { n }.  */
4689  for (b = n_basic_blocks; --b >= 0; )
4690    {
4691      sbitmap_copy (tmp[b], dominators[b]);
4692      RESET_BIT (tmp[b], b);
4693    }
4694
4695  /* Subtract out all of our dominator's dominators.  */
4696  for (b = n_basic_blocks; --b >= 0; )
4697    {
4698      sbitmap tmp_b = tmp[b];
4699      int s;
4700
4701      for (s = n_basic_blocks; --s >= 0; )
4702	if (TEST_BIT (tmp_b, s))
4703	  sbitmap_difference (tmp_b, tmp_b, tmp[s]);
4704    }
4705
4706  /* Find the one bit set in the bitmap and put it in the output array.  */
4707  for (b = n_basic_blocks; --b >= 0; )
4708    {
4709      int t;
4710      EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
4711    }
4712
4713  sbitmap_vector_free (tmp);
4714}
4715
4716/* Count for a single SET rtx, X.  */
4717
4718static void
4719count_reg_sets_1 (x)
4720     rtx x;
4721{
4722  register int regno;
4723  register rtx reg = SET_DEST (x);
4724
4725  /* Find the register that's set/clobbered.  */
4726  while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4727	 || GET_CODE (reg) == SIGN_EXTRACT
4728	 || GET_CODE (reg) == STRICT_LOW_PART)
4729    reg = XEXP (reg, 0);
4730
4731  if (GET_CODE (reg) == PARALLEL
4732      && GET_MODE (reg) == BLKmode)
4733    {
4734      register int i;
4735      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4736	count_reg_sets_1 (XVECEXP (reg, 0, i));
4737      return;
4738    }
4739
4740  if (GET_CODE (reg) == REG)
4741    {
4742      regno = REGNO (reg);
4743      if (regno >= FIRST_PSEUDO_REGISTER)
4744	{
4745	  /* Count (weighted) references, stores, etc.  This counts a
4746	     register twice if it is modified, but that is correct.  */
4747	  REG_N_SETS (regno)++;
4748
4749	  REG_N_REFS (regno) += loop_depth;
4750	}
4751    }
4752}
4753
4754/* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4755   REG_N_REFS by the current loop depth for each SET or CLOBBER found.  */
4756
4757static void
4758count_reg_sets  (x)
4759     rtx x;
4760{
4761  register RTX_CODE code = GET_CODE (x);
4762
4763  if (code == SET || code == CLOBBER)
4764    count_reg_sets_1 (x);
4765  else if (code == PARALLEL)
4766    {
4767      register int i;
4768      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4769	{
4770	  code = GET_CODE (XVECEXP (x, 0, i));
4771	  if (code == SET || code == CLOBBER)
4772	    count_reg_sets_1 (XVECEXP (x, 0, i));
4773	}
4774    }
4775}
4776
4777/* Increment REG_N_REFS by the current loop depth each register reference
4778   found in X.  */
4779
4780static void
4781count_reg_references (x)
4782     rtx x;
4783{
4784  register RTX_CODE code;
4785
4786 retry:
4787  code = GET_CODE (x);
4788  switch (code)
4789    {
4790    case LABEL_REF:
4791    case SYMBOL_REF:
4792    case CONST_INT:
4793    case CONST:
4794    case CONST_DOUBLE:
4795    case PC:
4796    case ADDR_VEC:
4797    case ADDR_DIFF_VEC:
4798    case ASM_INPUT:
4799      return;
4800
4801#ifdef HAVE_cc0
4802    case CC0:
4803      return;
4804#endif
4805
4806    case CLOBBER:
4807      /* If we are clobbering a MEM, mark any registers inside the address
4808	 as being used.  */
4809      if (GET_CODE (XEXP (x, 0)) == MEM)
4810	count_reg_references (XEXP (XEXP (x, 0), 0));
4811      return;
4812
4813    case SUBREG:
4814      /* While we're here, optimize this case.  */
4815      x = SUBREG_REG (x);
4816
4817      /* In case the SUBREG is not of a register, don't optimize */
4818      if (GET_CODE (x) != REG)
4819	{
4820	  count_reg_references (x);
4821	  return;
4822	}
4823
4824      /* ... fall through ...  */
4825
4826    case REG:
4827      if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4828	REG_N_REFS (REGNO (x)) += loop_depth;
4829      return;
4830
4831    case SET:
4832      {
4833	register rtx testreg = SET_DEST (x);
4834	int mark_dest = 0;
4835
4836	/* If storing into MEM, don't show it as being used.  But do
4837	   show the address as being used.  */
4838	if (GET_CODE (testreg) == MEM)
4839	  {
4840	    count_reg_references (XEXP (testreg, 0));
4841	    count_reg_references (SET_SRC (x));
4842	    return;
4843	  }
4844
4845	/* Storing in STRICT_LOW_PART is like storing in a reg
4846	   in that this SET might be dead, so ignore it in TESTREG.
4847	   but in some other ways it is like using the reg.
4848
4849	   Storing in a SUBREG or a bit field is like storing the entire
4850	   register in that if the register's value is not used
4851	   then this SET is not needed.  */
4852	while (GET_CODE (testreg) == STRICT_LOW_PART
4853	       || GET_CODE (testreg) == ZERO_EXTRACT
4854	       || GET_CODE (testreg) == SIGN_EXTRACT
4855	       || GET_CODE (testreg) == SUBREG)
4856	  {
4857	    /* Modifying a single register in an alternate mode
4858	       does not use any of the old value.  But these other
4859	       ways of storing in a register do use the old value.  */
4860	    if (GET_CODE (testreg) == SUBREG
4861		&& !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4862	      ;
4863	    else
4864	      mark_dest = 1;
4865
4866	    testreg = XEXP (testreg, 0);
4867	  }
4868
4869	/* If this is a store into a register,
4870	   recursively scan the value being stored.  */
4871
4872	if ((GET_CODE (testreg) == PARALLEL
4873	     && GET_MODE (testreg) == BLKmode)
4874	    || GET_CODE (testreg) == REG)
4875	  {
4876	    count_reg_references (SET_SRC (x));
4877	    if (mark_dest)
4878	      count_reg_references (SET_DEST (x));
4879	    return;
4880	  }
4881      }
4882      break;
4883
4884    default:
4885      break;
4886    }
4887
4888  /* Recursively scan the operands of this expression.  */
4889
4890  {
4891    register char *fmt = GET_RTX_FORMAT (code);
4892    register int i;
4893
4894    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4895      {
4896	if (fmt[i] == 'e')
4897	  {
4898	    /* Tail recursive case: save a function call level.  */
4899	    if (i == 0)
4900	      {
4901		x = XEXP (x, 0);
4902		goto retry;
4903	      }
4904	    count_reg_references (XEXP (x, i));
4905	  }
4906	else if (fmt[i] == 'E')
4907	  {
4908	    register int j;
4909	    for (j = 0; j < XVECLEN (x, i); j++)
4910	      count_reg_references (XVECEXP (x, i, j));
4911	  }
4912      }
4913  }
4914}
4915
4916/* Recompute register set/reference counts immediately prior to register
4917   allocation.
4918
4919   This avoids problems with set/reference counts changing to/from values
4920   which have special meanings to the register allocators.
4921
4922   Additionally, the reference counts are the primary component used by the
4923   register allocators to prioritize pseudos for allocation to hard regs.
4924   More accurate reference counts generally lead to better register allocation.
4925
4926   F is the first insn to be scanned.
4927   LOOP_STEP denotes how much loop_depth should be incremented per
4928   loop nesting level in order to increase the ref count more for references
4929   in a loop.
4930
4931   It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4932   possibly other information which is used by the register allocators.  */
4933
4934void
4935recompute_reg_usage (f, loop_step)
4936     rtx f;
4937     int loop_step;
4938{
4939  rtx insn;
4940  int i, max_reg;
4941
4942  /* Clear out the old data.  */
4943  max_reg = max_reg_num ();
4944  for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4945    {
4946      REG_N_SETS (i) = 0;
4947      REG_N_REFS (i) = 0;
4948    }
4949
4950  /* Scan each insn in the chain and count how many times each register is
4951     set/used.  */
4952  loop_depth = 1;
4953  for (insn = f; insn; insn = NEXT_INSN (insn))
4954    {
4955      /* Keep track of loop depth.  */
4956      if (GET_CODE (insn) == NOTE)
4957	{
4958	  /* Look for loop boundaries.  */
4959	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
4960	    loop_depth -= loop_step;
4961	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
4962	    loop_depth += loop_step;
4963
4964	  /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4965	     Abort now rather than setting register status incorrectly.  */
4966	  if (loop_depth == 0)
4967	    abort ();
4968	}
4969      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4970	{
4971	  rtx links;
4972
4973	  /* This call will increment REG_N_SETS for each SET or CLOBBER
4974	     of a register in INSN.  It will also increment REG_N_REFS
4975	     by the loop depth for each set of a register in INSN.  */
4976	  count_reg_sets (PATTERN (insn));
4977
4978	  /* count_reg_sets does not detect autoincrement address modes, so
4979	     detect them here by looking at the notes attached to INSN.  */
4980	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
4981	    {
4982	      if (REG_NOTE_KIND (links) == REG_INC)
4983		/* Count (weighted) references, stores, etc.  This counts a
4984		   register twice if it is modified, but that is correct.  */
4985		REG_N_SETS (REGNO (XEXP (links, 0)))++;
4986	    }
4987
4988	  /* This call will increment REG_N_REFS by the current loop depth for
4989	     each reference to a register in INSN.  */
4990	  count_reg_references (PATTERN (insn));
4991
4992	  /* count_reg_references will not include counts for arguments to
4993	     function calls, so detect them here by examining the
4994	     CALL_INSN_FUNCTION_USAGE data.  */
4995	  if (GET_CODE (insn) == CALL_INSN)
4996	    {
4997	      rtx note;
4998
4999	      for (note = CALL_INSN_FUNCTION_USAGE (insn);
5000		   note;
5001		   note = XEXP (note, 1))
5002		if (GET_CODE (XEXP (note, 0)) == USE)
5003		  count_reg_references (SET_DEST (XEXP (note, 0)));
5004	    }
5005	}
5006    }
5007}
5008
5009/* Record INSN's block as BB.  */
5010
5011void
5012set_block_for_insn (insn, bb)
5013     rtx insn;
5014     basic_block bb;
5015{
5016  size_t uid = INSN_UID (insn);
5017  if (uid >= basic_block_for_insn->num_elements)
5018    {
5019      int new_size;
5020
5021      /* Add one-eighth the size so we don't keep calling xrealloc.  */
5022      new_size = uid + (uid + 7) / 8;
5023
5024      VARRAY_GROW (basic_block_for_insn, new_size);
5025    }
5026  VARRAY_BB (basic_block_for_insn, uid) = bb;
5027}
5028
5029/* Record INSN's block number as BB.  */
5030/* ??? This has got to go.  */
5031
5032void
5033set_block_num (insn, bb)
5034     rtx insn;
5035     int bb;
5036{
5037  set_block_for_insn (insn, BASIC_BLOCK (bb));
5038}
5039
5040/* Verify the CFG consistency.  This function check some CFG invariants and
5041   aborts when something is wrong.  Hope that this function will help to
5042   convert many optimization passes to preserve CFG consistent.
5043
5044   Currently it does following checks:
5045
5046   - test head/end pointers
5047   - overlapping of basic blocks
5048   - edge list corectness
5049   - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5050   - tails of basic blocks (ensure that boundary is necesary)
5051   - scans body of the basic block for JUMP_INSN, CODE_LABEL
5052     and NOTE_INSN_BASIC_BLOCK
5053   - check that all insns are in the basic blocks
5054   (except the switch handling code, barriers and notes)
5055
5056   In future it can be extended check a lot of other stuff as well
5057   (reachability of basic blocks, life information, etc. etc.).  */
5058
5059void
5060verify_flow_info ()
5061{
5062  const int max_uid = get_max_uid ();
5063  const rtx rtx_first = get_insns ();
5064  basic_block *bb_info;
5065  rtx x;
5066  int i;
5067
5068  bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
5069  memset (bb_info, 0, max_uid * sizeof (basic_block));
5070
5071  /* First pass check head/end pointers and set bb_info array used by
5072     later passes.  */
5073  for (i = n_basic_blocks - 1; i >= 0; i--)
5074    {
5075      basic_block bb = BASIC_BLOCK (i);
5076
5077      /* Check the head pointer and make sure that it is pointing into
5078         insn list.  */
5079      for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5080	if (x == bb->head)
5081	  break;
5082      if (!x)
5083	{
5084	  fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n",
5085		 INSN_UID (bb->head), bb->index);
5086	}
5087
5088      /* Check the end pointer and make sure that it is pointing into
5089         insn list.  */
5090      for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5091	{
5092	  if (bb_info[INSN_UID (x)] != NULL)
5093	    {
5094	      fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)",
5095		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5096	    }
5097	  bb_info[INSN_UID (x)] = bb;
5098
5099	  if (x == bb->end)
5100	    break;
5101	}
5102      if (!x)
5103	{
5104	  fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n",
5105		 INSN_UID (bb->end), bb->index);
5106	}
5107    }
5108
5109  /* Now check the basic blocks (boundaries etc.) */
5110  for (i = n_basic_blocks - 1; i >= 0; i--)
5111    {
5112      basic_block bb = BASIC_BLOCK (i);
5113      /* Check corectness of edge lists */
5114      edge e;
5115
5116      e = bb->succ;
5117      while (e)
5118	{
5119	  if (e->src != bb)
5120	    {
5121	      fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5122		       bb->index);
5123	      fprintf (stderr, "Predecessor: ");
5124	      dump_edge_info (stderr, e, 0);
5125	      fprintf (stderr, "\nSuccessor: ");
5126	      dump_edge_info (stderr, e, 1);
5127	      fflush (stderr);
5128	      abort ();
5129	    }
5130	  if (e->dest != EXIT_BLOCK_PTR)
5131	    {
5132	      edge e2 = e->dest->pred;
5133	      while (e2 && e2 != e)
5134		e2 = e2->pred_next;
5135	      if (!e2)
5136		{
5137		  fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5138			 bb->index);
5139		}
5140	    }
5141	  e = e->succ_next;
5142	}
5143
5144      e = bb->pred;
5145      while (e)
5146	{
5147	  if (e->dest != bb)
5148	    {
5149	      fprintf (stderr, "verify_flow_info: Basic block %d pred edge is corrupted\n",
5150		       bb->index);
5151	      fprintf (stderr, "Predecessor: ");
5152	      dump_edge_info (stderr, e, 0);
5153	      fprintf (stderr, "\nSuccessor: ");
5154	      dump_edge_info (stderr, e, 1);
5155	      fflush (stderr);
5156	      abort ();
5157	    }
5158	  if (e->src != ENTRY_BLOCK_PTR)
5159	    {
5160	      edge e2 = e->src->succ;
5161	      while (e2 && e2 != e)
5162		e2 = e2->succ_next;
5163	      if (!e2)
5164		{
5165		  fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5166			 bb->index);
5167		}
5168	    }
5169	  e = e->pred_next;
5170	}
5171
5172      /* OK pointers are correct.  Now check the header of basic
5173         block.  It ought to contain optional CODE_LABEL followed
5174	 by NOTE_BASIC_BLOCK.  */
5175      x = bb->head;
5176      if (GET_CODE (x) == CODE_LABEL)
5177	{
5178	  if (bb->end == x)
5179	    {
5180	      fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n");
5181	    }
5182	  x = NEXT_INSN (x);
5183	}
5184      if (GET_CODE (x) != NOTE
5185	  || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5186	  || NOTE_BASIC_BLOCK (x) != bb)
5187	{
5188	  fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5189		 bb->index);
5190	}
5191
5192      if (bb->end == x)
5193	{
5194	  /* Do checks for empty blocks here */
5195	}
5196      else
5197	{
5198	  x = NEXT_INSN (x);
5199	  while (x)
5200	    {
5201	      if (GET_CODE (x) == NOTE
5202		  && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5203		{
5204		  fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n",
5205			 INSN_UID (x), bb->index);
5206		}
5207
5208	      if (x == bb->end)
5209		break;
5210
5211	      if (GET_CODE (x) == JUMP_INSN
5212		  || GET_CODE (x) == CODE_LABEL
5213		  || GET_CODE (x) == BARRIER)
5214		{
5215		  fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n",
5216			      x, bb->index);
5217		}
5218
5219	      x = NEXT_INSN (x);
5220	    }
5221	}
5222    }
5223
5224  x = rtx_first;
5225  while (x)
5226    {
5227      if (!bb_info[INSN_UID (x)])
5228	{
5229	  switch (GET_CODE (x))
5230	    {
5231	    case BARRIER:
5232	    case NOTE:
5233	      break;
5234
5235	    case CODE_LABEL:
5236	      /* An addr_vec is placed outside any block block.  */
5237	      if (NEXT_INSN (x)
5238		  && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5239		  && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5240		      || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5241		{
5242		  x = NEXT_INSN (x);
5243		}
5244
5245	      /* But in any case, non-deletable labels can appear anywhere.  */
5246	      break;
5247
5248	    default:
5249	      fatal_insn ("verify_flow_info: Insn outside basic block\n", x);
5250	    }
5251	}
5252
5253      x = NEXT_INSN (x);
5254    }
5255}
5256