flow.c revision 107590
1232935Stheraven/* Data flow analysis for GNU compiler.
2232935Stheraven   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
31573Srgrimes   1999, 2000, 2001 Free Software Foundation, Inc.
4232935Stheraven
5232935StheravenThis file is part of GCC.
6232935Stheraven
71573SrgrimesGCC is free software; you can redistribute it and/or modify it under
81573Srgrimesthe terms of the GNU General Public License as published by the Free
91573SrgrimesSoftware Foundation; either version 2, or (at your option) any later
101573Srgrimesversion.
111573Srgrimes
121573SrgrimesGCC is distributed in the hope that it will be useful, but WITHOUT ANY
131573SrgrimesWARRANTY; without even the implied warranty of MERCHANTABILITY or
141573SrgrimesFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
151573Srgrimesfor more details.
161573Srgrimes
171573SrgrimesYou should have received a copy of the GNU General Public License
181573Srgrimesalong with GCC; see the file COPYING.  If not, write to the Free
191573SrgrimesSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
201573Srgrimes02111-1307, USA.  */
211573Srgrimes
221573Srgrimes/* This file contains the data flow analysis pass of the compiler.  It
231573Srgrimes   computes data flow information which tells combine_instructions
241573Srgrimes   which insns to consider combining and controls register allocation.
251573Srgrimes
261573Srgrimes   Additional data flow information that is too bulky to record is
271573Srgrimes   generated during the analysis, and is used at that time to create
2850476Speter   autoincrement and autodecrement addressing.
291573Srgrimes
30232935Stheraven   The first step is dividing the function into basic blocks.
31232935Stheraven   find_basic_blocks does this.  Then life_analysis determines
321573Srgrimes   where each register is live and where it is dead.
331573Srgrimes
34232935Stheraven   ** find_basic_blocks **
35232935Stheraven
36232935Stheraven   find_basic_blocks divides the current function's rtl into basic
37232935Stheraven   blocks and constructs the CFG.  The blocks are recorded in the
38232935Stheraven   basic_block_info array; the CFG exists in the edge structures
39232935Stheraven   referenced by the blocks.
40232935Stheraven
41232935Stheraven   find_basic_blocks also finds any unreachable loops and deletes them.
42232935Stheraven
43232935Stheraven   ** life_analysis **
44232935Stheraven
45232935Stheraven   life_analysis is called immediately after find_basic_blocks.
46232935Stheraven   It uses the basic block information to determine where each
47232935Stheraven   hard or pseudo register is live.
48232935Stheraven
49232935Stheraven   ** live-register info **
50232935Stheraven
51232935Stheraven   The information about where each register is live is in two parts:
52232935Stheraven   the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53232935Stheraven
54232935Stheraven   basic_block->global_live_at_start has an element for each basic
55232935Stheraven   block, and the element is a bit-vector with a bit for each hard or
56232935Stheraven   pseudo register.  The bit is 1 if the register is live at the
5759460Sphantom   beginning of the basic block.
5859460Sphantom
591573Srgrimes   Two types of elements can be added to an insn's REG_NOTES.
6084306Sru   A REG_DEAD note is added to an insn's REG_NOTES for any register
6122149Smpp   that meets both of two conditions:  The value in the register is not
62232935Stheraven   needed in subsequent insns and the insn does not replace the value in
6389257Sbde   the register (in the case of multi-word hard registers, the value in
64232935Stheraven   each register must be replaced by the insn to avoid a REG_DEAD note).
6522149Smpp
66232935Stheraven   In the vast majority of cases, an object in a REG_DEAD note will be
6722149Smpp   used somewhere in the insn.  The (rare) exception to this is if an
68232935Stheraven   insn uses a multi-word hard register and only some of the registers are
6922149Smpp   needed in subsequent insns.  In that case, REG_DEAD notes will be
70232935Stheraven   provided for those hard registers that are not subsequently needed.
7122149Smpp   Partial REG_DEAD notes of this type do not occur when an insn sets
72232935Stheraven   only some of the hard registers used in such a multi-word operand;
7322149Smpp   omitting REG_DEAD notes for objects stored in an insn is optional and
74232935Stheraven   the desire to do so does not justify the complexity of the partial
7522149Smpp   REG_DEAD notes.
76232935Stheraven
7746191Sghelmer   REG_UNUSED notes are added for each register that is set by the insn
78232935Stheraven   but is unused subsequently (if every register set by the insn is unused
7946191Sghelmer   and the insn does not reference memory or have some other side-effect,
80232935Stheraven   the insn is deleted instead).  If only part of a multi-word hard
8122149Smpp   register is used in a subsequent insn, REG_UNUSED notes are made for
82232935Stheraven   the parts that will not be used.
8346191Sghelmer
84232935Stheraven   To determine which registers are live after any insn, one can
8546191Sghelmer   start from the beginning of the basic block and scan insns, noting
86232935Stheraven   which registers are set by each insn and which die there.
8746191Sghelmer
88232935Stheraven   ** Other actions of life_analysis **
8922149Smpp
90232935Stheraven   life_analysis sets up the LOG_LINKS fields of insns because the
9122149Smpp   information needed to do so is readily available.
92232935Stheraven
9346191Sghelmer   life_analysis deletes insns whose only effect is to store a value
94232935Stheraven   that is never used.
9522149Smpp
96232935Stheraven   life_analysis notices cases where a reference to a register as
9722149Smpp   a memory address can be combined with a preceding or following
98232935Stheraven   incrementation or decrementation of the register.  The separate
9922149Smpp   instruction to increment or decrement is deleted and the address
100232935Stheraven   is changed to a POST_INC or similar rtx.
10122149Smpp
102232935Stheraven   Each time an incrementing or decrementing address is created,
1031573Srgrimes   a REG_INC element is added to the insn's REG_NOTES list.
1041573Srgrimes
105233648Seadler   life_analysis fills in certain vectors containing information about
106232935Stheraven   register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107232935Stheraven   REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108232935Stheraven
109233648Seadler   life_analysis sets current_function_sp_is_unchanging if the function
110119893Sru   doesn't modify the stack pointer.  */
1111573Srgrimes
1121573Srgrimes/* TODO:
1131573Srgrimes
11489257Sbde   Split out from life_analysis:
1151573Srgrimes	- local property discovery (bb->local_live, bb->local_set)
1161573Srgrimes	- global property computation
1171573Srgrimes	- log links creation
1181573Srgrimes	- pre/post modify transformation
1191573Srgrimes*/
1201573Srgrimes
1211573Srgrimes#include "config.h"
122127613Stjr#include "system.h"
1231573Srgrimes#include "tree.h"
124127613Stjr#include "rtl.h"
1251573Srgrimes#include "tm_p.h"
1261573Srgrimes#include "hard-reg-set.h"
127131365Sru#include "basic-block.h"
1281573Srgrimes#include "insn-config.h"
129127613Stjr#include "regs.h"
1301573Srgrimes#include "flags.h"
1311573Srgrimes#include "output.h"
1321573Srgrimes#include "function.h"
1331573Srgrimes#include "except.h"
134127613Stjr#include "toplev.h"
135232935Stheraven#include "recog.h"
1361573Srgrimes#include "expr.h"
137232935Stheraven#include "ssa.h"
138237939Sjilles#include "timevar.h"
139237939Sjilles
140237939Sjilles#include "obstack.h"
141237939Sjilles#include "splay-tree.h"
142237939Sjilles
143237939Sjilles#define obstack_chunk_alloc xmalloc
144237939Sjilles#define obstack_chunk_free free
145237939Sjilles
146237939Sjilles/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147237939Sjilles   the stack pointer does not matter.  The value is tested only in
148237939Sjilles   functions that have frame pointers.
149237939Sjilles   No definition is equivalent to always zero.  */
150237939Sjilles#ifndef EXIT_IGNORE_STACK
151237939Sjilles#define EXIT_IGNORE_STACK 0
152#endif
153
154#ifndef HAVE_epilogue
155#define HAVE_epilogue 0
156#endif
157#ifndef HAVE_prologue
158#define HAVE_prologue 0
159#endif
160#ifndef HAVE_sibcall_epilogue
161#define HAVE_sibcall_epilogue 0
162#endif
163
164#ifndef LOCAL_REGNO
165#define LOCAL_REGNO(REGNO)  0
166#endif
167#ifndef EPILOGUE_USES
168#define EPILOGUE_USES(REGNO)  0
169#endif
170#ifndef EH_USES
171#define EH_USES(REGNO)  0
172#endif
173
174#ifdef HAVE_conditional_execution
175#ifndef REVERSE_CONDEXEC_PREDICATES_P
176#define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
177#endif
178#endif
179
180/* Nonzero if the second flow pass has completed.  */
181int flow2_completed;
182
183/* Maximum register number used in this function, plus one.  */
184
185int max_regno;
186
187/* Indexed by n, giving various register information */
188
189varray_type reg_n_info;
190
191/* Size of a regset for the current function,
192   in (1) bytes and (2) elements.  */
193
194int regset_bytes;
195int regset_size;
196
197/* Regset of regs live when calls to `setjmp'-like functions happen.  */
198/* ??? Does this exist only for the setjmp-clobbered warning message?  */
199
200regset regs_live_at_setjmp;
201
202/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
203   that have to go in the same hard reg.
204   The first two regs in the list are a pair, and the next two
205   are another pair, etc.  */
206rtx regs_may_share;
207
208/* Callback that determines if it's ok for a function to have no
209   noreturn attribute.  */
210int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
211
212/* Set of registers that may be eliminable.  These are handled specially
213   in updating regs_ever_live.  */
214
215static HARD_REG_SET elim_reg_set;
216
217/* Holds information for tracking conditional register life information.  */
218struct reg_cond_life_info
219{
220  /* A boolean expression of conditions under which a register is dead.  */
221  rtx condition;
222  /* Conditions under which a register is dead at the basic block end.  */
223  rtx orig_condition;
224
225  /* A boolean expression of conditions under which a register has been
226     stored into.  */
227  rtx stores;
228
229  /* ??? Could store mask of bytes that are dead, so that we could finally
230     track lifetimes of multi-word registers accessed via subregs.  */
231};
232
233/* For use in communicating between propagate_block and its subroutines.
234   Holds all information needed to compute life and def-use information.  */
235
236struct propagate_block_info
237{
238  /* The basic block we're considering.  */
239  basic_block bb;
240
241  /* Bit N is set if register N is conditionally or unconditionally live.  */
242  regset reg_live;
243
244  /* Bit N is set if register N is set this insn.  */
245  regset new_set;
246
247  /* Element N is the next insn that uses (hard or pseudo) register N
248     within the current basic block; or zero, if there is no such insn.  */
249  rtx *reg_next_use;
250
251  /* Contains a list of all the MEMs we are tracking for dead store
252     elimination.  */
253  rtx mem_set_list;
254
255  /* If non-null, record the set of registers set unconditionally in the
256     basic block.  */
257  regset local_set;
258
259  /* If non-null, record the set of registers set conditionally in the
260     basic block.  */
261  regset cond_local_set;
262
263#ifdef HAVE_conditional_execution
264  /* Indexed by register number, holds a reg_cond_life_info for each
265     register that is not unconditionally live or dead.  */
266  splay_tree reg_cond_dead;
267
268  /* Bit N is set if register N is in an expression in reg_cond_dead.  */
269  regset reg_cond_reg;
270#endif
271
272  /* The length of mem_set_list.  */
273  int mem_set_list_len;
274
275  /* Non-zero if the value of CC0 is live.  */
276  int cc0_live;
277
278  /* Flags controling the set of information propagate_block collects.  */
279  int flags;
280};
281
282/* Maximum length of pbi->mem_set_list before we start dropping
283   new elements on the floor.  */
284#define MAX_MEM_SET_LIST_LEN	100
285
286/* Forward declarations */
287static int verify_wide_reg_1		PARAMS ((rtx *, void *));
288static void verify_wide_reg		PARAMS ((int, basic_block));
289static void verify_local_live_at_start	PARAMS ((regset, basic_block));
290static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
291static void notice_stack_pointer_modification PARAMS ((rtx));
292static void mark_reg			PARAMS ((rtx, void *));
293static void mark_regs_live_at_end	PARAMS ((regset));
294static int set_phi_alternative_reg      PARAMS ((rtx, int, int, void *));
295static void calculate_global_regs_live	PARAMS ((sbitmap, sbitmap, int));
296static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
297static rtx propagate_block_delete_libcall PARAMS ((rtx, rtx));
298static int insn_dead_p			PARAMS ((struct propagate_block_info *,
299						 rtx, int, rtx));
300static int libcall_dead_p		PARAMS ((struct propagate_block_info *,
301						 rtx, rtx));
302static void mark_set_regs		PARAMS ((struct propagate_block_info *,
303						 rtx, rtx));
304static void mark_set_1			PARAMS ((struct propagate_block_info *,
305						 enum rtx_code, rtx, rtx,
306						 rtx, int));
307static int find_regno_partial		PARAMS ((rtx *, void *));
308
309#ifdef HAVE_conditional_execution
310static int mark_regno_cond_dead		PARAMS ((struct propagate_block_info *,
311						 int, rtx));
312static void free_reg_cond_life_info	PARAMS ((splay_tree_value));
313static int flush_reg_cond_reg_1		PARAMS ((splay_tree_node, void *));
314static void flush_reg_cond_reg		PARAMS ((struct propagate_block_info *,
315						 int));
316static rtx elim_reg_cond		PARAMS ((rtx, unsigned int));
317static rtx ior_reg_cond			PARAMS ((rtx, rtx, int));
318static rtx not_reg_cond			PARAMS ((rtx));
319static rtx and_reg_cond			PARAMS ((rtx, rtx, int));
320#endif
321#ifdef AUTO_INC_DEC
322static void attempt_auto_inc		PARAMS ((struct propagate_block_info *,
323						 rtx, rtx, rtx, rtx, rtx));
324static void find_auto_inc		PARAMS ((struct propagate_block_info *,
325						 rtx, rtx));
326static int try_pre_increment_1		PARAMS ((struct propagate_block_info *,
327						 rtx));
328static int try_pre_increment		PARAMS ((rtx, rtx, HOST_WIDE_INT));
329#endif
330static void mark_used_reg		PARAMS ((struct propagate_block_info *,
331						 rtx, rtx, rtx));
332static void mark_used_regs		PARAMS ((struct propagate_block_info *,
333						 rtx, rtx, rtx));
334void dump_flow_info			PARAMS ((FILE *));
335void debug_flow_info			PARAMS ((void));
336static void add_to_mem_set_list		PARAMS ((struct propagate_block_info *,
337						 rtx));
338static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
339						  rtx));
340static void invalidate_mems_from_set	PARAMS ((struct propagate_block_info *,
341						 rtx));
342static void delete_dead_jumptables	PARAMS ((void));
343static void clear_log_links		PARAMS ((sbitmap));
344
345
346void
347check_function_return_warnings ()
348{
349  if (warn_missing_noreturn
350      && !TREE_THIS_VOLATILE (cfun->decl)
351      && EXIT_BLOCK_PTR->pred == NULL
352      && (lang_missing_noreturn_ok_p
353	  && !lang_missing_noreturn_ok_p (cfun->decl)))
354    warning ("function might be possible candidate for attribute `noreturn'");
355
356  /* If we have a path to EXIT, then we do return.  */
357  if (TREE_THIS_VOLATILE (cfun->decl)
358      && EXIT_BLOCK_PTR->pred != NULL)
359    warning ("`noreturn' function does return");
360
361  /* If the clobber_return_insn appears in some basic block, then we
362     do reach the end without returning a value.  */
363  else if (warn_return_type
364	   && cfun->x_clobber_return_insn != NULL
365	   && EXIT_BLOCK_PTR->pred != NULL)
366    {
367      int max_uid = get_max_uid ();
368
369      /* If clobber_return_insn was excised by jump1, then renumber_insns
370	 can make max_uid smaller than the number still recorded in our rtx.
371	 That's fine, since this is a quick way of verifying that the insn
372	 is no longer in the chain.  */
373      if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
374	{
375	  /* Recompute insn->block mapping, since the initial mapping is
376	     set before we delete unreachable blocks.  */
377	  if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
378	    warning ("control reaches end of non-void function");
379	}
380    }
381}
382
383/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
384   note associated with the BLOCK.  */
385
386rtx
387first_insn_after_basic_block_note (block)
388     basic_block block;
389{
390  rtx insn;
391
392  /* Get the first instruction in the block.  */
393  insn = block->head;
394
395  if (insn == NULL_RTX)
396    return NULL_RTX;
397  if (GET_CODE (insn) == CODE_LABEL)
398    insn = NEXT_INSN (insn);
399  if (!NOTE_INSN_BASIC_BLOCK_P (insn))
400    abort ();
401
402  return NEXT_INSN (insn);
403}
404
405/* Perform data flow analysis.
406   F is the first insn of the function; FLAGS is a set of PROP_* flags
407   to be used in accumulating flow info.  */
408
409void
410life_analysis (f, file, flags)
411     rtx f;
412     FILE *file;
413     int flags;
414{
415#ifdef ELIMINABLE_REGS
416  int i;
417  static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
418#endif
419
420  /* Record which registers will be eliminated.  We use this in
421     mark_used_regs.  */
422
423  CLEAR_HARD_REG_SET (elim_reg_set);
424
425#ifdef ELIMINABLE_REGS
426  for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
427    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
428#else
429  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
430#endif
431
432  if (! optimize)
433    flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
434
435  /* The post-reload life analysis have (on a global basis) the same
436     registers live as was computed by reload itself.  elimination
437     Otherwise offsets and such may be incorrect.
438
439     Reload will make some registers as live even though they do not
440     appear in the rtl.
441
442     We don't want to create new auto-incs after reload, since they
443     are unlikely to be useful and can cause problems with shared
444     stack slots.  */
445  if (reload_completed)
446    flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
447
448  /* We want alias analysis information for local dead store elimination.  */
449  if (optimize && (flags & PROP_SCAN_DEAD_CODE))
450    init_alias_analysis ();
451
452  /* Always remove no-op moves.  Do this before other processing so
453     that we don't have to keep re-scanning them.  */
454  delete_noop_moves (f);
455  purge_all_dead_edges (false);
456
457  /* Some targets can emit simpler epilogues if they know that sp was
458     not ever modified during the function.  After reload, of course,
459     we've already emitted the epilogue so there's no sense searching.  */
460  if (! reload_completed)
461    notice_stack_pointer_modification (f);
462
463  /* Allocate and zero out data structures that will record the
464     data from lifetime analysis.  */
465  allocate_reg_life_data ();
466  allocate_bb_life_data ();
467
468  /* Find the set of registers live on function exit.  */
469  mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
470
471  /* "Update" life info from zero.  It'd be nice to begin the
472     relaxation with just the exit and noreturn blocks, but that set
473     is not immediately handy.  */
474
475  if (flags & PROP_REG_INFO)
476    memset (regs_ever_live, 0, sizeof (regs_ever_live));
477  update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
478
479  /* Clean up.  */
480  if (optimize && (flags & PROP_SCAN_DEAD_CODE))
481    end_alias_analysis ();
482
483  if (file)
484    dump_flow_info (file);
485
486  free_basic_block_vars (1);
487
488#ifdef ENABLE_CHECKING
489  {
490    rtx insn;
491
492    /* Search for any REG_LABEL notes which reference deleted labels.  */
493    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
494      {
495	rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
496
497	if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
498	  abort ();
499      }
500  }
501#endif
502
503  rebuild_jump_labels (get_insns ());
504
505  /* Removing dead insns should've made jumptables really dead.  */
506  delete_dead_jumptables ();
507}
508
509/* A subroutine of verify_wide_reg, called through for_each_rtx.
510   Search for REGNO.  If found, return 2 if it is not wider than
511   word_mode.  */
512
513static int
514verify_wide_reg_1 (px, pregno)
515     rtx *px;
516     void *pregno;
517{
518  rtx x = *px;
519  unsigned int regno = *(int *) pregno;
520
521  if (GET_CODE (x) == REG && REGNO (x) == regno)
522    {
523      if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
524	return 2;
525      return 1;
526    }
527  return 0;
528}
529
530/* A subroutine of verify_local_live_at_start.  Search through insns
531   of BB looking for register REGNO.  */
532
533static void
534verify_wide_reg (regno, bb)
535     int regno;
536     basic_block bb;
537{
538  rtx head = bb->head, end = bb->end;
539
540  while (1)
541    {
542      if (INSN_P (head))
543	{
544	  int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
545	  if (r == 1)
546	    return;
547	  if (r == 2)
548	    break;
549	}
550      if (head == end)
551	break;
552      head = NEXT_INSN (head);
553    }
554
555  if (rtl_dump_file)
556    {
557      fprintf (rtl_dump_file, "Register %d died unexpectedly.\n", regno);
558      dump_bb (bb, rtl_dump_file);
559    }
560  abort ();
561}
562
563/* A subroutine of update_life_info.  Verify that there are no untoward
564   changes in live_at_start during a local update.  */
565
566static void
567verify_local_live_at_start (new_live_at_start, bb)
568     regset new_live_at_start;
569     basic_block bb;
570{
571  if (reload_completed)
572    {
573      /* After reload, there are no pseudos, nor subregs of multi-word
574	 registers.  The regsets should exactly match.  */
575      if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
576	{
577	  if (rtl_dump_file)
578	    {
579	      fprintf (rtl_dump_file,
580		       "live_at_start mismatch in bb %d, aborting\nNew:\n",
581		       bb->index);
582	      debug_bitmap_file (rtl_dump_file, new_live_at_start);
583	      fputs ("Old:\n", rtl_dump_file);
584	      dump_bb (bb, rtl_dump_file);
585	    }
586	  abort ();
587	}
588    }
589  else
590    {
591      int i;
592
593      /* Find the set of changed registers.  */
594      XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
595
596      EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
597	{
598          /* No registers should die.  */
599	  if (REGNO_REG_SET_P (bb->global_live_at_start, i))
600	    {
601	      if (rtl_dump_file)
602		{
603		  fprintf (rtl_dump_file,
604			   "Register %d died unexpectedly.\n", i);
605		  dump_bb (bb, rtl_dump_file);
606		}
607	      abort ();
608	    }
609
610          /* Verify that the now-live register is wider than word_mode.  */
611	  verify_wide_reg (i, bb);
612	});
613    }
614}
615
616/* Updates life information starting with the basic blocks set in BLOCKS.
617   If BLOCKS is null, consider it to be the universal set.
618
619   If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
620   we are only expecting local modifications to basic blocks.  If we find
621   extra registers live at the beginning of a block, then we either killed
622   useful data, or we have a broken split that wants data not provided.
623   If we find registers removed from live_at_start, that means we have
624   a broken peephole that is killing a register it shouldn't.
625
626   ??? This is not true in one situation -- when a pre-reload splitter
627   generates subregs of a multi-word pseudo, current life analysis will
628   lose the kill.  So we _can_ have a pseudo go live.  How irritating.
629
630   Including PROP_REG_INFO does not properly refresh regs_ever_live
631   unless the caller resets it to zero.  */
632
633void
634update_life_info (blocks, extent, prop_flags)
635     sbitmap blocks;
636     enum update_life_extent extent;
637     int prop_flags;
638{
639  regset tmp;
640  regset_head tmp_head;
641  int i;
642  int stabilized_prop_flags = prop_flags;
643
644  tmp = INITIALIZE_REG_SET (tmp_head);
645
646  timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
647		? TV_LIFE_UPDATE : TV_LIFE);
648
649  /* Changes to the CFG are only allowed when
650     doing a global update for the entire CFG.  */
651  if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
652      && (extent == UPDATE_LIFE_LOCAL || blocks))
653    abort ();
654
655  /* Clear log links in case we are asked to (re)compute them.  */
656  if (prop_flags & PROP_LOG_LINKS)
657    clear_log_links (blocks);
658
659  /* For a global update, we go through the relaxation process again.  */
660  if (extent != UPDATE_LIFE_LOCAL)
661    {
662      for ( ; ; )
663	{
664	  int changed = 0;
665
666	  calculate_global_regs_live (blocks, blocks,
667				prop_flags & (PROP_SCAN_DEAD_CODE
668					      | PROP_ALLOW_CFG_CHANGES));
669
670	  if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
671	      != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
672	    break;
673
674	  /* Removing dead code may allow the CFG to be simplified which
675	     in turn may allow for further dead code detection / removal.  */
676	  for (i = n_basic_blocks - 1; i >= 0; --i)
677	    {
678	      basic_block bb = BASIC_BLOCK (i);
679
680	      COPY_REG_SET (tmp, bb->global_live_at_end);
681	      changed |= propagate_block (bb, tmp, NULL, NULL,
682				prop_flags & (PROP_SCAN_DEAD_CODE
683					      | PROP_KILL_DEAD_CODE));
684	    }
685
686	  /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
687	     subsequent propagate_block calls, since removing or acting as
688	     removing dead code can affect global register liveness, which
689	     is supposed to be finalized for this call after this loop.  */
690	  stabilized_prop_flags
691	    &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
692
693	  if (! changed)
694	    break;
695
696	  /* We repeat regardless of what cleanup_cfg says.  If there were
697	     instructions deleted above, that might have been only a
698	     partial improvement (see MAX_MEM_SET_LIST_LEN usage).
699	     Further improvement may be possible.  */
700	  cleanup_cfg (CLEANUP_EXPENSIVE);
701	}
702
703      /* If asked, remove notes from the blocks we'll update.  */
704      if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
705	count_or_remove_death_notes (blocks, 1);
706    }
707
708  if (blocks)
709    {
710      EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
711	{
712	  basic_block bb = BASIC_BLOCK (i);
713
714	  COPY_REG_SET (tmp, bb->global_live_at_end);
715	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
716
717	  if (extent == UPDATE_LIFE_LOCAL)
718	    verify_local_live_at_start (tmp, bb);
719	});
720    }
721  else
722    {
723      for (i = n_basic_blocks - 1; i >= 0; --i)
724	{
725	  basic_block bb = BASIC_BLOCK (i);
726
727	  COPY_REG_SET (tmp, bb->global_live_at_end);
728
729	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
730
731	  if (extent == UPDATE_LIFE_LOCAL)
732	    verify_local_live_at_start (tmp, bb);
733	}
734    }
735
736  FREE_REG_SET (tmp);
737
738  if (prop_flags & PROP_REG_INFO)
739    {
740      /* The only pseudos that are live at the beginning of the function
741	 are those that were not set anywhere in the function.  local-alloc
742	 doesn't know how to handle these correctly, so mark them as not
743	 local to any one basic block.  */
744      EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
745				 FIRST_PSEUDO_REGISTER, i,
746				 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
747
748      /* We have a problem with any pseudoreg that lives across the setjmp.
749	 ANSI says that if a user variable does not change in value between
750	 the setjmp and the longjmp, then the longjmp preserves it.  This
751	 includes longjmp from a place where the pseudo appears dead.
752	 (In principle, the value still exists if it is in scope.)
753	 If the pseudo goes in a hard reg, some other value may occupy
754	 that hard reg where this pseudo is dead, thus clobbering the pseudo.
755	 Conclusion: such a pseudo must not go in a hard reg.  */
756      EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
757				 FIRST_PSEUDO_REGISTER, i,
758				 {
759				   if (regno_reg_rtx[i] != 0)
760				     {
761				       REG_LIVE_LENGTH (i) = -1;
762				       REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
763				     }
764				 });
765    }
766  timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
767	       ? TV_LIFE_UPDATE : TV_LIFE);
768}
769
770/* Free the variables allocated by find_basic_blocks.
771
772   KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed.  */
773
774void
775free_basic_block_vars (keep_head_end_p)
776     int keep_head_end_p;
777{
778  if (! keep_head_end_p)
779    {
780      if (basic_block_info)
781	{
782	  clear_edges ();
783	  VARRAY_FREE (basic_block_info);
784	}
785      n_basic_blocks = 0;
786
787      ENTRY_BLOCK_PTR->aux = NULL;
788      ENTRY_BLOCK_PTR->global_live_at_end = NULL;
789      EXIT_BLOCK_PTR->aux = NULL;
790      EXIT_BLOCK_PTR->global_live_at_start = NULL;
791    }
792}
793
794/* Delete any insns that copy a register to itself.  */
795
796void
797delete_noop_moves (f)
798     rtx f ATTRIBUTE_UNUSED;
799{
800  int i;
801  rtx insn, next;
802  basic_block bb;
803
804  for (i = 0; i < n_basic_blocks; i++)
805    {
806      bb = BASIC_BLOCK (i);
807      for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
808	{
809	  next = NEXT_INSN (insn);
810	  if (INSN_P (insn) && noop_move_p (insn))
811	    {
812	      rtx note;
813
814	      /* If we're about to remove the first insn of a libcall
815		 then move the libcall note to the next real insn and
816		 update the retval note.  */
817	      if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
818		       && XEXP (note, 0) != insn)
819		{
820		  rtx new_libcall_insn = next_real_insn (insn);
821		  rtx retval_note = find_reg_note (XEXP (note, 0),
822						   REG_RETVAL, NULL_RTX);
823		  REG_NOTES (new_libcall_insn)
824		    = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
825					 REG_NOTES (new_libcall_insn));
826		  XEXP (retval_note, 0) = new_libcall_insn;
827		}
828
829	      /* Do not call delete_insn here since that may change
830	         the basic block boundaries which upsets some callers.  */
831	      PUT_CODE (insn, NOTE);
832	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
833	      NOTE_SOURCE_FILE (insn) = 0;
834	    }
835	}
836    }
837}
838
839/* Delete any jump tables never referenced.  We can't delete them at the
840   time of removing tablejump insn as they are referenced by the preceding
841   insns computing the destination, so we delay deleting and garbagecollect
842   them once life information is computed.  */
843static void
844delete_dead_jumptables ()
845{
846  rtx insn, next;
847  for (insn = get_insns (); insn; insn = next)
848    {
849      next = NEXT_INSN (insn);
850      if (GET_CODE (insn) == CODE_LABEL
851	  && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
852	  && GET_CODE (next) == JUMP_INSN
853	  && (GET_CODE (PATTERN (next)) == ADDR_VEC
854	      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
855	{
856	  if (rtl_dump_file)
857	    fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
858	  delete_insn (NEXT_INSN (insn));
859	  delete_insn (insn);
860	  next = NEXT_INSN (next);
861	}
862    }
863}
864
865/* Determine if the stack pointer is constant over the life of the function.
866   Only useful before prologues have been emitted.  */
867
868static void
869notice_stack_pointer_modification_1 (x, pat, data)
870     rtx x;
871     rtx pat ATTRIBUTE_UNUSED;
872     void *data ATTRIBUTE_UNUSED;
873{
874  if (x == stack_pointer_rtx
875      /* The stack pointer is only modified indirectly as the result
876	 of a push until later in flow.  See the comments in rtl.texi
877	 regarding Embedded Side-Effects on Addresses.  */
878      || (GET_CODE (x) == MEM
879	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
880	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
881    current_function_sp_is_unchanging = 0;
882}
883
884static void
885notice_stack_pointer_modification (f)
886     rtx f;
887{
888  rtx insn;
889
890  /* Assume that the stack pointer is unchanging if alloca hasn't
891     been used.  */
892  current_function_sp_is_unchanging = !current_function_calls_alloca;
893  if (! current_function_sp_is_unchanging)
894    return;
895
896  for (insn = f; insn; insn = NEXT_INSN (insn))
897    {
898      if (INSN_P (insn))
899	{
900	  /* Check if insn modifies the stack pointer.  */
901	  note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
902		       NULL);
903	  if (! current_function_sp_is_unchanging)
904	    return;
905	}
906    }
907}
908
909/* Mark a register in SET.  Hard registers in large modes get all
910   of their component registers set as well.  */
911
912static void
913mark_reg (reg, xset)
914     rtx reg;
915     void *xset;
916{
917  regset set = (regset) xset;
918  int regno = REGNO (reg);
919
920  if (GET_MODE (reg) == BLKmode)
921    abort ();
922
923  SET_REGNO_REG_SET (set, regno);
924  if (regno < FIRST_PSEUDO_REGISTER)
925    {
926      int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
927      while (--n > 0)
928	SET_REGNO_REG_SET (set, regno + n);
929    }
930}
931
932/* Mark those regs which are needed at the end of the function as live
933   at the end of the last basic block.  */
934
935static void
936mark_regs_live_at_end (set)
937     regset set;
938{
939  unsigned int i;
940
941  /* If exiting needs the right stack value, consider the stack pointer
942     live at the end of the function.  */
943  if ((HAVE_epilogue && reload_completed)
944      || ! EXIT_IGNORE_STACK
945      || (! FRAME_POINTER_REQUIRED
946	  && ! current_function_calls_alloca
947	  && flag_omit_frame_pointer)
948      || current_function_sp_is_unchanging)
949    {
950      SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
951    }
952
953  /* Mark the frame pointer if needed at the end of the function.  If
954     we end up eliminating it, it will be removed from the live list
955     of each basic block by reload.  */
956
957  if (! reload_completed || frame_pointer_needed)
958    {
959      SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
960#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
961      /* If they are different, also mark the hard frame pointer as live.  */
962      if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
963        SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
964#endif
965    }
966
967#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
968  /* Many architectures have a GP register even without flag_pic.
969     Assume the pic register is not in use, or will be handled by
970     other means, if it is not fixed.  */
971  if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
972      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
973    SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
974#endif
975
976  /* Mark all global registers, and all registers used by the epilogue
977     as being live at the end of the function since they may be
978     referenced by our caller.  */
979  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
980    if (global_regs[i] || EPILOGUE_USES (i))
981      SET_REGNO_REG_SET (set, i);
982
983  if (HAVE_epilogue && reload_completed)
984    {
985      /* Mark all call-saved registers that we actually used.  */
986      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
987	if (regs_ever_live[i] && ! LOCAL_REGNO (i)
988	    && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
989	  SET_REGNO_REG_SET (set, i);
990    }
991
992#ifdef EH_RETURN_DATA_REGNO
993  /* Mark the registers that will contain data for the handler.  */
994  if (reload_completed && current_function_calls_eh_return)
995    for (i = 0; ; ++i)
996      {
997	unsigned regno = EH_RETURN_DATA_REGNO(i);
998	if (regno == INVALID_REGNUM)
999	  break;
1000	SET_REGNO_REG_SET (set, regno);
1001      }
1002#endif
1003#ifdef EH_RETURN_STACKADJ_RTX
1004  if ((! HAVE_epilogue || ! reload_completed)
1005      && current_function_calls_eh_return)
1006    {
1007      rtx tmp = EH_RETURN_STACKADJ_RTX;
1008      if (tmp && REG_P (tmp))
1009	mark_reg (tmp, set);
1010    }
1011#endif
1012#ifdef EH_RETURN_HANDLER_RTX
1013  if ((! HAVE_epilogue || ! reload_completed)
1014      && current_function_calls_eh_return)
1015    {
1016      rtx tmp = EH_RETURN_HANDLER_RTX;
1017      if (tmp && REG_P (tmp))
1018	mark_reg (tmp, set);
1019    }
1020#endif
1021
1022  /* Mark function return value.  */
1023  diddle_return_value (mark_reg, set);
1024}
1025
1026/* Callback function for for_each_successor_phi.  DATA is a regset.
1027   Sets the SRC_REGNO, the regno of the phi alternative for phi node
1028   INSN, in the regset.  */
1029
1030static int
1031set_phi_alternative_reg (insn, dest_regno, src_regno, data)
1032     rtx insn ATTRIBUTE_UNUSED;
1033     int dest_regno ATTRIBUTE_UNUSED;
1034     int src_regno;
1035     void *data;
1036{
1037  regset live = (regset) data;
1038  SET_REGNO_REG_SET (live, src_regno);
1039  return 0;
1040}
1041
1042/* Propagate global life info around the graph of basic blocks.  Begin
1043   considering blocks with their corresponding bit set in BLOCKS_IN.
1044   If BLOCKS_IN is null, consider it the universal set.
1045
1046   BLOCKS_OUT is set for every block that was changed.  */
1047
1048static void
1049calculate_global_regs_live (blocks_in, blocks_out, flags)
1050     sbitmap blocks_in, blocks_out;
1051     int flags;
1052{
1053  basic_block *queue, *qhead, *qtail, *qend;
1054  regset tmp, new_live_at_end, call_used;
1055  regset_head tmp_head, call_used_head;
1056  regset_head new_live_at_end_head;
1057  int i;
1058
1059  tmp = INITIALIZE_REG_SET (tmp_head);
1060  new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
1061  call_used = INITIALIZE_REG_SET (call_used_head);
1062
1063  /* Inconveniently, this is only readily available in hard reg set form.  */
1064  for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1065    if (call_used_regs[i])
1066      SET_REGNO_REG_SET (call_used, i);
1067
1068  /* Create a worklist.  Allocate an extra slot for ENTRY_BLOCK, and one
1069     because the `head == tail' style test for an empty queue doesn't
1070     work with a full queue.  */
1071  queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
1072  qtail = queue;
1073  qhead = qend = queue + n_basic_blocks + 2;
1074
1075  /* Queue the blocks set in the initial mask.  Do this in reverse block
1076     number order so that we are more likely for the first round to do
1077     useful work.  We use AUX non-null to flag that the block is queued.  */
1078  if (blocks_in)
1079    {
1080      /* Clear out the garbage that might be hanging out in bb->aux.  */
1081      for (i = n_basic_blocks - 1; i >= 0; --i)
1082	BASIC_BLOCK (i)->aux = NULL;
1083
1084      EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
1085	{
1086	  basic_block bb = BASIC_BLOCK (i);
1087	  *--qhead = bb;
1088	  bb->aux = bb;
1089	});
1090    }
1091  else
1092    {
1093      for (i = 0; i < n_basic_blocks; ++i)
1094	{
1095	  basic_block bb = BASIC_BLOCK (i);
1096	  *--qhead = bb;
1097	  bb->aux = bb;
1098	}
1099    }
1100
1101  /* We clean aux when we remove the initially-enqueued bbs, but we
1102     don't enqueue ENTRY and EXIT initially, so clean them upfront and
1103     unconditionally.  */
1104  ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;
1105
1106  if (blocks_out)
1107    sbitmap_zero (blocks_out);
1108
1109  /* We work through the queue until there are no more blocks.  What
1110     is live at the end of this block is precisely the union of what
1111     is live at the beginning of all its successors.  So, we set its
1112     GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1113     for its successors.  Then, we compute GLOBAL_LIVE_AT_START for
1114     this block by walking through the instructions in this block in
1115     reverse order and updating as we go.  If that changed
1116     GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1117     queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1118
1119     We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1120     never shrinks.  If a register appears in GLOBAL_LIVE_AT_START, it
1121     must either be live at the end of the block, or used within the
1122     block.  In the latter case, it will certainly never disappear
1123     from GLOBAL_LIVE_AT_START.  In the former case, the register
1124     could go away only if it disappeared from GLOBAL_LIVE_AT_START
1125     for one of the successor blocks.  By induction, that cannot
1126     occur.  */
1127  while (qhead != qtail)
1128    {
1129      int rescan, changed;
1130      basic_block bb;
1131      edge e;
1132
1133      bb = *qhead++;
1134      if (qhead == qend)
1135	qhead = queue;
1136      bb->aux = NULL;
1137
1138      /* Begin by propagating live_at_start from the successor blocks.  */
1139      CLEAR_REG_SET (new_live_at_end);
1140
1141      if (bb->succ)
1142	for (e = bb->succ; e; e = e->succ_next)
1143	  {
1144	    basic_block sb = e->dest;
1145
1146	    /* Call-clobbered registers die across exception and
1147	       call edges.  */
1148	    /* ??? Abnormal call edges ignored for the moment, as this gets
1149	       confused by sibling call edges, which crashes reg-stack.  */
1150	    if (e->flags & EDGE_EH)
1151	      {
1152		bitmap_operation (tmp, sb->global_live_at_start,
1153				  call_used, BITMAP_AND_COMPL);
1154		IOR_REG_SET (new_live_at_end, tmp);
1155	      }
1156	    else
1157	      IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
1158
1159	    /* If a target saves one register in another (instead of on
1160	       the stack) the save register will need to be live for EH.  */
1161	    if (e->flags & EDGE_EH)
1162	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1163		if (EH_USES (i))
1164		  SET_REGNO_REG_SET (new_live_at_end, i);
1165	  }
1166      else
1167	{
1168	  /* This might be a noreturn function that throws.  And
1169	     even if it isn't, getting the unwind info right helps
1170	     debugging.  */
1171	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1172	    if (EH_USES (i))
1173	      SET_REGNO_REG_SET (new_live_at_end, i);
1174	}
1175
1176      /* The all-important stack pointer must always be live.  */
1177      SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1178
1179      /* Before reload, there are a few registers that must be forced
1180	 live everywhere -- which might not already be the case for
1181	 blocks within infinite loops.  */
1182      if (! reload_completed)
1183	{
1184	  /* Any reference to any pseudo before reload is a potential
1185	     reference of the frame pointer.  */
1186	  SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1187
1188#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1189	  /* Pseudos with argument area equivalences may require
1190	     reloading via the argument pointer.  */
1191	  if (fixed_regs[ARG_POINTER_REGNUM])
1192	    SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1193#endif
1194
1195	  /* Any constant, or pseudo with constant equivalences, may
1196	     require reloading from memory using the pic register.  */
1197	  if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1198	      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1199	    SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1200	}
1201
1202      /* Regs used in phi nodes are not included in
1203	 global_live_at_start, since they are live only along a
1204	 particular edge.  Set those regs that are live because of a
1205	 phi node alternative corresponding to this particular block.  */
1206      if (in_ssa_form)
1207	for_each_successor_phi (bb, &set_phi_alternative_reg,
1208				new_live_at_end);
1209
1210      if (bb == ENTRY_BLOCK_PTR)
1211	{
1212	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1213	  continue;
1214	}
1215
1216      /* On our first pass through this block, we'll go ahead and continue.
1217	 Recognize first pass by local_set NULL.  On subsequent passes, we
1218	 get to skip out early if live_at_end wouldn't have changed.  */
1219
1220      if (bb->local_set == NULL)
1221	{
1222	  bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1223	  bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1224	  rescan = 1;
1225	}
1226      else
1227	{
1228	  /* If any bits were removed from live_at_end, we'll have to
1229	     rescan the block.  This wouldn't be necessary if we had
1230	     precalculated local_live, however with PROP_SCAN_DEAD_CODE
1231	     local_live is really dependent on live_at_end.  */
1232	  CLEAR_REG_SET (tmp);
1233	  rescan = bitmap_operation (tmp, bb->global_live_at_end,
1234				     new_live_at_end, BITMAP_AND_COMPL);
1235
1236	  if (! rescan)
1237	    {
1238	      /* If any of the registers in the new live_at_end set are
1239		 conditionally set in this basic block, we must rescan.
1240	         This is because conditional lifetimes at the end of the
1241		 block do not just take the live_at_end set into account,
1242		 but also the liveness at the start of each successor
1243		 block.  We can miss changes in those sets if we only
1244		 compare the new live_at_end against the previous one.  */
1245	      CLEAR_REG_SET (tmp);
1246	      rescan = bitmap_operation (tmp, new_live_at_end,
1247					 bb->cond_local_set, BITMAP_AND);
1248	    }
1249
1250	  if (! rescan)
1251	    {
1252	      /* Find the set of changed bits.  Take this opportunity
1253		 to notice that this set is empty and early out.  */
1254	      CLEAR_REG_SET (tmp);
1255	      changed = bitmap_operation (tmp, bb->global_live_at_end,
1256					  new_live_at_end, BITMAP_XOR);
1257	      if (! changed)
1258		continue;
1259
1260	      /* If any of the changed bits overlap with local_set,
1261		 we'll have to rescan the block.  Detect overlap by
1262		 the AND with ~local_set turning off bits.  */
1263	      rescan = bitmap_operation (tmp, tmp, bb->local_set,
1264					 BITMAP_AND_COMPL);
1265	    }
1266	}
1267
1268      /* Let our caller know that BB changed enough to require its
1269	 death notes updated.  */
1270      if (blocks_out)
1271	SET_BIT (blocks_out, bb->index);
1272
1273      if (! rescan)
1274	{
1275	  /* Add to live_at_start the set of all registers in
1276	     new_live_at_end that aren't in the old live_at_end.  */
1277
1278	  bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
1279			    BITMAP_AND_COMPL);
1280	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1281
1282	  changed = bitmap_operation (bb->global_live_at_start,
1283				      bb->global_live_at_start,
1284				      tmp, BITMAP_IOR);
1285	  if (! changed)
1286	    continue;
1287	}
1288      else
1289	{
1290	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1291
1292	  /* Rescan the block insn by insn to turn (a copy of) live_at_end
1293	     into live_at_start.  */
1294	  propagate_block (bb, new_live_at_end, bb->local_set,
1295			   bb->cond_local_set, flags);
1296
1297	  /* If live_at start didn't change, no need to go farther.  */
1298	  if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
1299	    continue;
1300
1301	  COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
1302	}
1303
1304      /* Queue all predecessors of BB so that we may re-examine
1305	 their live_at_end.  */
1306      for (e = bb->pred; e; e = e->pred_next)
1307	{
1308	  basic_block pb = e->src;
1309	  if (pb->aux == NULL)
1310	    {
1311	      *qtail++ = pb;
1312	      if (qtail == qend)
1313		qtail = queue;
1314	      pb->aux = pb;
1315	    }
1316	}
1317    }
1318
1319  FREE_REG_SET (tmp);
1320  FREE_REG_SET (new_live_at_end);
1321  FREE_REG_SET (call_used);
1322
1323  if (blocks_out)
1324    {
1325      EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
1326	{
1327	  basic_block bb = BASIC_BLOCK (i);
1328	  FREE_REG_SET (bb->local_set);
1329	  FREE_REG_SET (bb->cond_local_set);
1330	});
1331    }
1332  else
1333    {
1334      for (i = n_basic_blocks - 1; i >= 0; --i)
1335	{
1336	  basic_block bb = BASIC_BLOCK (i);
1337	  FREE_REG_SET (bb->local_set);
1338	  FREE_REG_SET (bb->cond_local_set);
1339	}
1340    }
1341
1342  free (queue);
1343}
1344
1345
1346/* This structure is used to pass parameters to an from the
1347   the function find_regno_partial(). It is used to pass in the
1348   register number we are looking, as well as to return any rtx
1349   we find.  */
1350
1351typedef struct {
1352  unsigned regno_to_find;
1353  rtx retval;
1354} find_regno_partial_param;
1355
1356
1357/* Find the rtx for the reg numbers specified in 'data' if it is
1358   part of an expression which only uses part of the register.  Return
1359   it in the structure passed in.  */
1360static int
1361find_regno_partial (ptr, data)
1362     rtx *ptr;
1363     void *data;
1364{
1365  find_regno_partial_param *param = (find_regno_partial_param *)data;
1366  unsigned reg = param->regno_to_find;
1367  param->retval = NULL_RTX;
1368
1369  if (*ptr == NULL_RTX)
1370    return 0;
1371
1372  switch (GET_CODE (*ptr))
1373    {
1374    case ZERO_EXTRACT:
1375    case SIGN_EXTRACT:
1376    case STRICT_LOW_PART:
1377      if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg)
1378	{
1379	  param->retval = XEXP (*ptr, 0);
1380	  return 1;
1381	}
1382      break;
1383
1384    case SUBREG:
1385      if (GET_CODE (SUBREG_REG (*ptr)) == REG
1386	  && REGNO (SUBREG_REG (*ptr)) == reg)
1387	{
1388	  param->retval = SUBREG_REG (*ptr);
1389	  return 1;
1390	}
1391      break;
1392
1393    default:
1394      break;
1395    }
1396
1397  return 0;
1398}
1399
1400/* Process all immediate successors of the entry block looking for pseudo
1401   registers which are live on entry. Find all of those whose first
1402   instance is a partial register reference of some kind, and initialize
1403   them to 0 after the entry block.  This will prevent bit sets within
1404   registers whose value is unknown, and may contain some kind of sticky
1405   bits we don't want.  */
1406
1407int
1408initialize_uninitialized_subregs ()
1409{
1410  rtx insn;
1411  edge e;
1412  int reg, did_something = 0;
1413  find_regno_partial_param param;
1414
1415  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1416    {
1417      basic_block bb = e->dest;
1418      regset map = bb->global_live_at_start;
1419      EXECUTE_IF_SET_IN_REG_SET (map,
1420				 FIRST_PSEUDO_REGISTER, reg,
1421	{
1422	  int uid = REGNO_FIRST_UID (reg);
1423	  rtx i;
1424
1425	  /* Find an insn which mentions the register we are looking for.
1426	     Its preferable to have an instance of the register's rtl since
1427	     there may be various flags set which we need to duplicate.
1428	     If we can't find it, its probably an automatic whose initial
1429	     value doesn't matter, or hopefully something we don't care about.  */
1430	  for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1431	    ;
1432	  if (i != NULL_RTX)
1433	    {
1434	      /* Found the insn, now get the REG rtx, if we can.  */
1435	      param.regno_to_find = reg;
1436	      for_each_rtx (&i, find_regno_partial, &param);
1437	      if (param.retval != NULL_RTX)
1438		{
1439		  insn = gen_move_insn (param.retval,
1440				        CONST0_RTX (GET_MODE (param.retval)));
1441		  insert_insn_on_edge (insn, e);
1442		  did_something = 1;
1443		}
1444	    }
1445	});
1446    }
1447
1448  if (did_something)
1449    commit_edge_insertions ();
1450  return did_something;
1451}
1452
1453
1454/* Subroutines of life analysis.  */
1455
1456/* Allocate the permanent data structures that represent the results
1457   of life analysis.  Not static since used also for stupid life analysis.  */
1458
1459void
1460allocate_bb_life_data ()
1461{
1462  int i;
1463
1464  for (i = 0; i < n_basic_blocks; i++)
1465    {
1466      basic_block bb = BASIC_BLOCK (i);
1467
1468      bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1469      bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1470    }
1471
1472  ENTRY_BLOCK_PTR->global_live_at_end
1473    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1474  EXIT_BLOCK_PTR->global_live_at_start
1475    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1476
1477  regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1478}
1479
1480void
1481allocate_reg_life_data ()
1482{
1483  int i;
1484
1485  max_regno = max_reg_num ();
1486
1487  /* Recalculate the register space, in case it has grown.  Old style
1488     vector oriented regsets would set regset_{size,bytes} here also.  */
1489  allocate_reg_info (max_regno, FALSE, FALSE);
1490
1491  /* Reset all the data we'll collect in propagate_block and its
1492     subroutines.  */
1493  for (i = 0; i < max_regno; i++)
1494    {
1495      REG_N_SETS (i) = 0;
1496      REG_N_REFS (i) = 0;
1497      REG_N_DEATHS (i) = 0;
1498      REG_N_CALLS_CROSSED (i) = 0;
1499      REG_LIVE_LENGTH (i) = 0;
1500      REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1501    }
1502}
1503
1504/* Delete dead instructions for propagate_block.  */
1505
1506static void
1507propagate_block_delete_insn (bb, insn)
1508     basic_block bb;
1509     rtx insn;
1510{
1511  rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1512  bool purge = false;
1513
1514  /* If the insn referred to a label, and that label was attached to
1515     an ADDR_VEC, it's safe to delete the ADDR_VEC.  In fact, it's
1516     pretty much mandatory to delete it, because the ADDR_VEC may be
1517     referencing labels that no longer exist.
1518
1519     INSN may reference a deleted label, particularly when a jump
1520     table has been optimized into a direct jump.  There's no
1521     real good way to fix up the reference to the deleted label
1522     when the label is deleted, so we just allow it here.
1523
1524     After dead code elimination is complete, we do search for
1525     any REG_LABEL notes which reference deleted labels as a
1526     sanity check.  */
1527
1528  if (inote && GET_CODE (inote) == CODE_LABEL)
1529    {
1530      rtx label = XEXP (inote, 0);
1531      rtx next;
1532
1533      /* The label may be forced if it has been put in the constant
1534	 pool.  If that is the only use we must discard the table
1535	 jump following it, but not the label itself.  */
1536      if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1537	  && (next = next_nonnote_insn (label)) != NULL
1538	  && GET_CODE (next) == JUMP_INSN
1539	  && (GET_CODE (PATTERN (next)) == ADDR_VEC
1540	      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1541	{
1542	  rtx pat = PATTERN (next);
1543	  int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1544	  int len = XVECLEN (pat, diff_vec_p);
1545	  int i;
1546
1547	  for (i = 0; i < len; i++)
1548	    LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1549
1550	  delete_insn (next);
1551	}
1552    }
1553
1554  if (bb->end == insn)
1555    purge = true;
1556  delete_insn (insn);
1557  if (purge)
1558    purge_dead_edges (bb);
1559}
1560
1561/* Delete dead libcalls for propagate_block.  Return the insn
1562   before the libcall.  */
1563
1564static rtx
1565propagate_block_delete_libcall ( insn, note)
1566     rtx insn, note;
1567{
1568  rtx first = XEXP (note, 0);
1569  rtx before = PREV_INSN (first);
1570
1571  delete_insn_chain (first, insn);
1572  return before;
1573}
1574
1575/* Update the life-status of regs for one insn.  Return the previous insn.  */
1576
1577rtx
1578propagate_one_insn (pbi, insn)
1579     struct propagate_block_info *pbi;
1580     rtx insn;
1581{
1582  rtx prev = PREV_INSN (insn);
1583  int flags = pbi->flags;
1584  int insn_is_dead = 0;
1585  int libcall_is_dead = 0;
1586  rtx note;
1587  int i;
1588
1589  if (! INSN_P (insn))
1590    return prev;
1591
1592  note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1593  if (flags & PROP_SCAN_DEAD_CODE)
1594    {
1595      insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1596      libcall_is_dead = (insn_is_dead && note != 0
1597			 && libcall_dead_p (pbi, note, insn));
1598    }
1599
1600  /* If an instruction consists of just dead store(s) on final pass,
1601     delete it.  */
1602  if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1603    {
1604      /* If we're trying to delete a prologue or epilogue instruction
1605	 that isn't flagged as possibly being dead, something is wrong.
1606	 But if we are keeping the stack pointer depressed, we might well
1607	 be deleting insns that are used to compute the amount to update
1608	 it by, so they are fine.  */
1609      if (reload_completed
1610	  && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1611		&& (TYPE_RETURNS_STACK_DEPRESSED
1612		    (TREE_TYPE (current_function_decl))))
1613	  && (((HAVE_epilogue || HAVE_prologue)
1614	       && prologue_epilogue_contains (insn))
1615	      || (HAVE_sibcall_epilogue
1616		  && sibcall_epilogue_contains (insn)))
1617	  && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1618	fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1619
1620      /* Record sets.  Do this even for dead instructions, since they
1621	 would have killed the values if they hadn't been deleted.  */
1622      mark_set_regs (pbi, PATTERN (insn), insn);
1623
1624      /* CC0 is now known to be dead.  Either this insn used it,
1625	 in which case it doesn't anymore, or clobbered it,
1626	 so the next insn can't use it.  */
1627      pbi->cc0_live = 0;
1628
1629      if (libcall_is_dead)
1630	prev = propagate_block_delete_libcall ( insn, note);
1631      else
1632	{
1633
1634	  /* If INSN contains a RETVAL note and is dead, but the libcall
1635	     as a whole is not dead, then we want to remove INSN, but
1636	     not the whole libcall sequence.
1637
1638	     However, we need to also remove the dangling REG_LIBCALL
1639	     note so that we do not have mis-matched LIBCALL/RETVAL
1640	     notes.  In theory we could find a new location for the
1641	     REG_RETVAL note, but it hardly seems worth the effort.
1642
1643	     NOTE at this point will be the RETVAL note if it exists.  */
1644	  if (note)
1645	    {
1646	      rtx libcall_note;
1647
1648	      libcall_note
1649		= find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
1650	      remove_note (XEXP (note, 0), libcall_note);
1651	    }
1652
1653	  /* Similarly if INSN contains a LIBCALL note, remove the
1654	     dangling REG_RETVAL note.  */
1655	  note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
1656	  if (note)
1657	    {
1658	      rtx retval_note;
1659
1660	      retval_note
1661		= find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
1662	      remove_note (XEXP (note, 0), retval_note);
1663	    }
1664
1665	  /* Now delete INSN.  */
1666	  propagate_block_delete_insn (pbi->bb, insn);
1667	}
1668
1669      return prev;
1670    }
1671
1672  /* See if this is an increment or decrement that can be merged into
1673     a following memory address.  */
1674#ifdef AUTO_INC_DEC
1675  {
1676    rtx x = single_set (insn);
1677
1678    /* Does this instruction increment or decrement a register?  */
1679    if ((flags & PROP_AUTOINC)
1680	&& x != 0
1681	&& GET_CODE (SET_DEST (x)) == REG
1682	&& (GET_CODE (SET_SRC (x)) == PLUS
1683	    || GET_CODE (SET_SRC (x)) == MINUS)
1684	&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
1685	&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1686	/* Ok, look for a following memory ref we can combine with.
1687	   If one is found, change the memory ref to a PRE_INC
1688	   or PRE_DEC, cancel this insn, and return 1.
1689	   Return 0 if nothing has been done.  */
1690	&& try_pre_increment_1 (pbi, insn))
1691      return prev;
1692  }
1693#endif /* AUTO_INC_DEC */
1694
1695  CLEAR_REG_SET (pbi->new_set);
1696
1697  /* If this is not the final pass, and this insn is copying the value of
1698     a library call and it's dead, don't scan the insns that perform the
1699     library call, so that the call's arguments are not marked live.  */
1700  if (libcall_is_dead)
1701    {
1702      /* Record the death of the dest reg.  */
1703      mark_set_regs (pbi, PATTERN (insn), insn);
1704
1705      insn = XEXP (note, 0);
1706      return PREV_INSN (insn);
1707    }
1708  else if (GET_CODE (PATTERN (insn)) == SET
1709	   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1710	   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1711	   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1712	   && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1713    /* We have an insn to pop a constant amount off the stack.
1714       (Such insns use PLUS regardless of the direction of the stack,
1715       and any insn to adjust the stack by a constant is always a pop.)
1716       These insns, if not dead stores, have no effect on life.  */
1717    ;
1718  else
1719    {
1720      rtx note;
1721      /* Any regs live at the time of a call instruction must not go
1722	 in a register clobbered by calls.  Find all regs now live and
1723	 record this for them.  */
1724
1725      if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
1726	EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1727				   { REG_N_CALLS_CROSSED (i)++; });
1728
1729      /* Record sets.  Do this even for dead instructions, since they
1730	 would have killed the values if they hadn't been deleted.  */
1731      mark_set_regs (pbi, PATTERN (insn), insn);
1732
1733      if (GET_CODE (insn) == CALL_INSN)
1734	{
1735	  int i;
1736	  rtx note, cond;
1737
1738	  cond = NULL_RTX;
1739	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1740	    cond = COND_EXEC_TEST (PATTERN (insn));
1741
1742	  /* Non-constant calls clobber memory.  */
1743	  if (! CONST_OR_PURE_CALL_P (insn))
1744	    {
1745	      free_EXPR_LIST_list (&pbi->mem_set_list);
1746	      pbi->mem_set_list_len = 0;
1747	    }
1748
1749	  /* There may be extra registers to be clobbered.  */
1750	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1751	       note;
1752	       note = XEXP (note, 1))
1753	    if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1754	      mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1755			  cond, insn, pbi->flags);
1756
1757	  /* Calls change all call-used and global registers.  */
1758	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1759	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1760	      {
1761		/* We do not want REG_UNUSED notes for these registers.  */
1762		mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
1763			    cond, insn,
1764			    pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1765	      }
1766	}
1767
1768      /* If an insn doesn't use CC0, it becomes dead since we assume
1769	 that every insn clobbers it.  So show it dead here;
1770	 mark_used_regs will set it live if it is referenced.  */
1771      pbi->cc0_live = 0;
1772
1773      /* Record uses.  */
1774      if (! insn_is_dead)
1775	mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1776      if ((flags & PROP_EQUAL_NOTES)
1777	  && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))
1778	      || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX))))
1779	mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn);
1780
1781      /* Sometimes we may have inserted something before INSN (such as a move)
1782	 when we make an auto-inc.  So ensure we will scan those insns.  */
1783#ifdef AUTO_INC_DEC
1784      prev = PREV_INSN (insn);
1785#endif
1786
1787      if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1788	{
1789	  int i;
1790	  rtx note, cond;
1791
1792	  cond = NULL_RTX;
1793	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1794	    cond = COND_EXEC_TEST (PATTERN (insn));
1795
1796	  /* Calls use their arguments.  */
1797	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1798	       note;
1799	       note = XEXP (note, 1))
1800	    if (GET_CODE (XEXP (note, 0)) == USE)
1801	      mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
1802			      cond, insn);
1803
1804	  /* The stack ptr is used (honorarily) by a CALL insn.  */
1805	  SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1806
1807	  /* Calls may also reference any of the global registers,
1808	     so they are made live.  */
1809	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1810	    if (global_regs[i])
1811	      mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
1812			     cond, insn);
1813	}
1814    }
1815
1816  /* On final pass, update counts of how many insns in which each reg
1817     is live.  */
1818  if (flags & PROP_REG_INFO)
1819    EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1820			       { REG_LIVE_LENGTH (i)++; });
1821
1822  return prev;
1823}
1824
1825/* Initialize a propagate_block_info struct for public consumption.
1826   Note that the structure itself is opaque to this file, but that
1827   the user can use the regsets provided here.  */
1828
1829struct propagate_block_info *
1830init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
1831     basic_block bb;
1832     regset live, local_set, cond_local_set;
1833     int flags;
1834{
1835  struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1836
1837  pbi->bb = bb;
1838  pbi->reg_live = live;
1839  pbi->mem_set_list = NULL_RTX;
1840  pbi->mem_set_list_len = 0;
1841  pbi->local_set = local_set;
1842  pbi->cond_local_set = cond_local_set;
1843  pbi->cc0_live = 0;
1844  pbi->flags = flags;
1845
1846  if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1847    pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
1848  else
1849    pbi->reg_next_use = NULL;
1850
1851  pbi->new_set = BITMAP_XMALLOC ();
1852
1853#ifdef HAVE_conditional_execution
1854  pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1855				       free_reg_cond_life_info);
1856  pbi->reg_cond_reg = BITMAP_XMALLOC ();
1857
1858  /* If this block ends in a conditional branch, for each register live
1859     from one side of the branch and not the other, record the register
1860     as conditionally dead.  */
1861  if (GET_CODE (bb->end) == JUMP_INSN
1862      && any_condjump_p (bb->end))
1863    {
1864      regset_head diff_head;
1865      regset diff = INITIALIZE_REG_SET (diff_head);
1866      basic_block bb_true, bb_false;
1867      rtx cond_true, cond_false, set_src;
1868      int i;
1869
1870      /* Identify the successor blocks.  */
1871      bb_true = bb->succ->dest;
1872      if (bb->succ->succ_next != NULL)
1873	{
1874	  bb_false = bb->succ->succ_next->dest;
1875
1876	  if (bb->succ->flags & EDGE_FALLTHRU)
1877	    {
1878	      basic_block t = bb_false;
1879	      bb_false = bb_true;
1880	      bb_true = t;
1881	    }
1882	  else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
1883	    abort ();
1884	}
1885      else
1886	{
1887	  /* This can happen with a conditional jump to the next insn.  */
1888	  if (JUMP_LABEL (bb->end) != bb_true->head)
1889	    abort ();
1890
1891	  /* Simplest way to do nothing.  */
1892	  bb_false = bb_true;
1893	}
1894
1895      /* Extract the condition from the branch.  */
1896      set_src = SET_SRC (pc_set (bb->end));
1897      cond_true = XEXP (set_src, 0);
1898      cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
1899				   GET_MODE (cond_true), XEXP (cond_true, 0),
1900				   XEXP (cond_true, 1));
1901      if (GET_CODE (XEXP (set_src, 1)) == PC)
1902	{
1903	  rtx t = cond_false;
1904	  cond_false = cond_true;
1905	  cond_true = t;
1906	}
1907
1908      /* Compute which register lead different lives in the successors.  */
1909      if (bitmap_operation (diff, bb_true->global_live_at_start,
1910			    bb_false->global_live_at_start, BITMAP_XOR))
1911	{
1912	  rtx reg = XEXP (cond_true, 0);
1913
1914	  if (GET_CODE (reg) == SUBREG)
1915	    reg = SUBREG_REG (reg);
1916
1917	  if (GET_CODE (reg) != REG)
1918	    abort ();
1919
1920	  SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
1921
1922	  /* For each such register, mark it conditionally dead.  */
1923	  EXECUTE_IF_SET_IN_REG_SET
1924	    (diff, 0, i,
1925	     {
1926	       struct reg_cond_life_info *rcli;
1927	       rtx cond;
1928
1929	       rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
1930
1931	       if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
1932		 cond = cond_false;
1933	       else
1934		 cond = cond_true;
1935	       rcli->condition = cond;
1936	       rcli->stores = const0_rtx;
1937	       rcli->orig_condition = cond;
1938
1939	       splay_tree_insert (pbi->reg_cond_dead, i,
1940				  (splay_tree_value) rcli);
1941	     });
1942	}
1943
1944      FREE_REG_SET (diff);
1945    }
1946#endif
1947
1948  /* If this block has no successors, any stores to the frame that aren't
1949     used later in the block are dead.  So make a pass over the block
1950     recording any such that are made and show them dead at the end.  We do
1951     a very conservative and simple job here.  */
1952  if (optimize
1953      && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1954	    && (TYPE_RETURNS_STACK_DEPRESSED
1955		(TREE_TYPE (current_function_decl))))
1956      && (flags & PROP_SCAN_DEAD_CODE)
1957      && (bb->succ == NULL
1958	  || (bb->succ->succ_next == NULL
1959	      && bb->succ->dest == EXIT_BLOCK_PTR
1960	      && ! current_function_calls_eh_return)))
1961    {
1962      rtx insn, set;
1963      for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
1964	if (GET_CODE (insn) == INSN
1965	    && (set = single_set (insn))
1966	    && GET_CODE (SET_DEST (set)) == MEM)
1967	  {
1968	    rtx mem = SET_DEST (set);
1969	    rtx canon_mem = canon_rtx (mem);
1970
1971	    /* This optimization is performed by faking a store to the
1972	       memory at the end of the block.  This doesn't work for
1973	       unchanging memories because multiple stores to unchanging
1974	       memory is illegal and alias analysis doesn't consider it.  */
1975	    if (RTX_UNCHANGING_P (canon_mem))
1976	      continue;
1977
1978	    if (XEXP (canon_mem, 0) == frame_pointer_rtx
1979		|| (GET_CODE (XEXP (canon_mem, 0)) == PLUS
1980		    && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
1981		    && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
1982	      add_to_mem_set_list (pbi, canon_mem);
1983	  }
1984    }
1985
1986  return pbi;
1987}
1988
1989/* Release a propagate_block_info struct.  */
1990
1991void
1992free_propagate_block_info (pbi)
1993     struct propagate_block_info *pbi;
1994{
1995  free_EXPR_LIST_list (&pbi->mem_set_list);
1996
1997  BITMAP_XFREE (pbi->new_set);
1998
1999#ifdef HAVE_conditional_execution
2000  splay_tree_delete (pbi->reg_cond_dead);
2001  BITMAP_XFREE (pbi->reg_cond_reg);
2002#endif
2003
2004  if (pbi->reg_next_use)
2005    free (pbi->reg_next_use);
2006
2007  free (pbi);
2008}
2009
2010/* Compute the registers live at the beginning of a basic block BB from
2011   those live at the end.
2012
2013   When called, REG_LIVE contains those live at the end.  On return, it
2014   contains those live at the beginning.
2015
2016   LOCAL_SET, if non-null, will be set with all registers killed
2017   unconditionally by this basic block.
2018   Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
2019   killed conditionally by this basic block.  If there is any unconditional
2020   set of a register, then the corresponding bit will be set in LOCAL_SET
2021   and cleared in COND_LOCAL_SET.
2022   It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set.  In this
2023   case, the resulting set will be equal to the union of the two sets that
2024   would otherwise be computed.
2025
2026   Return non-zero if an INSN is deleted (i.e. by dead code removal).  */
2027
2028int
2029propagate_block (bb, live, local_set, cond_local_set, flags)
2030     basic_block bb;
2031     regset live;
2032     regset local_set;
2033     regset cond_local_set;
2034     int flags;
2035{
2036  struct propagate_block_info *pbi;
2037  rtx insn, prev;
2038  int changed;
2039
2040  pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2041
2042  if (flags & PROP_REG_INFO)
2043    {
2044      int i;
2045
2046      /* Process the regs live at the end of the block.
2047	 Mark them as not local to any one basic block.  */
2048      EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
2049				 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2050    }
2051
2052  /* Scan the block an insn at a time from end to beginning.  */
2053
2054  changed = 0;
2055  for (insn = bb->end;; insn = prev)
2056    {
2057      /* If this is a call to `setjmp' et al, warn if any
2058	 non-volatile datum is live.  */
2059      if ((flags & PROP_REG_INFO)
2060	  && GET_CODE (insn) == CALL_INSN
2061	  && find_reg_note (insn, REG_SETJMP, NULL))
2062	IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2063
2064      prev = propagate_one_insn (pbi, insn);
2065      changed |= NEXT_INSN (prev) != insn;
2066
2067      if (insn == bb->head)
2068	break;
2069    }
2070
2071  free_propagate_block_info (pbi);
2072
2073  return changed;
2074}
2075
2076/* Return 1 if X (the body of an insn, or part of it) is just dead stores
2077   (SET expressions whose destinations are registers dead after the insn).
2078   NEEDED is the regset that says which regs are alive after the insn.
2079
2080   Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2081
2082   If X is the entire body of an insn, NOTES contains the reg notes
2083   pertaining to the insn.  */
2084
2085static int
2086insn_dead_p (pbi, x, call_ok, notes)
2087     struct propagate_block_info *pbi;
2088     rtx x;
2089     int call_ok;
2090     rtx notes ATTRIBUTE_UNUSED;
2091{
2092  enum rtx_code code = GET_CODE (x);
2093
2094#ifdef AUTO_INC_DEC
2095  /* As flow is invoked after combine, we must take existing AUTO_INC
2096     expressions into account.  */
2097  for (; notes; notes = XEXP (notes, 1))
2098    {
2099      if (REG_NOTE_KIND (notes) == REG_INC)
2100	{
2101	  int regno = REGNO (XEXP (notes, 0));
2102
2103	  /* Don't delete insns to set global regs.  */
2104	  if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2105	      || REGNO_REG_SET_P (pbi->reg_live, regno))
2106	    return 0;
2107	}
2108    }
2109#endif
2110
2111  /* If setting something that's a reg or part of one,
2112     see if that register's altered value will be live.  */
2113
2114  if (code == SET)
2115    {
2116      rtx r = SET_DEST (x);
2117
2118#ifdef HAVE_cc0
2119      if (GET_CODE (r) == CC0)
2120	return ! pbi->cc0_live;
2121#endif
2122
2123      /* A SET that is a subroutine call cannot be dead.  */
2124      if (GET_CODE (SET_SRC (x)) == CALL)
2125	{
2126	  if (! call_ok)
2127	    return 0;
2128	}
2129
2130      /* Don't eliminate loads from volatile memory or volatile asms.  */
2131      else if (volatile_refs_p (SET_SRC (x)))
2132	return 0;
2133
2134      if (GET_CODE (r) == MEM)
2135	{
2136	  rtx temp, canon_r;
2137
2138	  if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2139	    return 0;
2140
2141	  canon_r = canon_rtx (r);
2142
2143	  /* Walk the set of memory locations we are currently tracking
2144	     and see if one is an identical match to this memory location.
2145	     If so, this memory write is dead (remember, we're walking
2146	     backwards from the end of the block to the start).  Since
2147	     rtx_equal_p does not check the alias set or flags, we also
2148	     must have the potential for them to conflict (anti_dependence).  */
2149	  for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2150	    if (anti_dependence (r, XEXP (temp, 0)))
2151	      {
2152		rtx mem = XEXP (temp, 0);
2153
2154		if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2155		    && (GET_MODE_SIZE (GET_MODE (canon_r))
2156			<= GET_MODE_SIZE (GET_MODE (mem))))
2157		  return 1;
2158
2159#ifdef AUTO_INC_DEC
2160		/* Check if memory reference matches an auto increment. Only
2161		   post increment/decrement or modify are valid.  */
2162		if (GET_MODE (mem) == GET_MODE (r)
2163		    && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2164			|| GET_CODE (XEXP (mem, 0)) == POST_INC
2165			|| GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2166		    && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2167		    && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2168		  return 1;
2169#endif
2170	      }
2171	}
2172      else
2173	{
2174	  while (GET_CODE (r) == SUBREG
2175		 || GET_CODE (r) == STRICT_LOW_PART
2176		 || GET_CODE (r) == ZERO_EXTRACT)
2177	    r = XEXP (r, 0);
2178
2179	  if (GET_CODE (r) == REG)
2180	    {
2181	      int regno = REGNO (r);
2182
2183	      /* Obvious.  */
2184	      if (REGNO_REG_SET_P (pbi->reg_live, regno))
2185		return 0;
2186
2187	      /* If this is a hard register, verify that subsequent
2188		 words are not needed.  */
2189	      if (regno < FIRST_PSEUDO_REGISTER)
2190		{
2191		  int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2192
2193		  while (--n > 0)
2194		    if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2195		      return 0;
2196		}
2197
2198	      /* Don't delete insns to set global regs.  */
2199	      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2200		return 0;
2201
2202	      /* Make sure insns to set the stack pointer aren't deleted.  */
2203	      if (regno == STACK_POINTER_REGNUM)
2204		return 0;
2205
2206	      /* ??? These bits might be redundant with the force live bits
2207		 in calculate_global_regs_live.  We would delete from
2208		 sequential sets; whether this actually affects real code
2209		 for anything but the stack pointer I don't know.  */
2210	      /* Make sure insns to set the frame pointer aren't deleted.  */
2211	      if (regno == FRAME_POINTER_REGNUM
2212		  && (! reload_completed || frame_pointer_needed))
2213		return 0;
2214#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2215	      if (regno == HARD_FRAME_POINTER_REGNUM
2216		  && (! reload_completed || frame_pointer_needed))
2217		return 0;
2218#endif
2219
2220#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2221	      /* Make sure insns to set arg pointer are never deleted
2222		 (if the arg pointer isn't fixed, there will be a USE
2223		 for it, so we can treat it normally).  */
2224	      if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2225		return 0;
2226#endif
2227
2228	      /* Otherwise, the set is dead.  */
2229	      return 1;
2230	    }
2231	}
2232    }
2233
2234  /* If performing several activities, insn is dead if each activity
2235     is individually dead.  Also, CLOBBERs and USEs can be ignored; a
2236     CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2237     worth keeping.  */
2238  else if (code == PARALLEL)
2239    {
2240      int i = XVECLEN (x, 0);
2241
2242      for (i--; i >= 0; i--)
2243	if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2244	    && GET_CODE (XVECEXP (x, 0, i)) != USE
2245	    && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2246	  return 0;
2247
2248      return 1;
2249    }
2250
2251  /* A CLOBBER of a pseudo-register that is dead serves no purpose.  That
2252     is not necessarily true for hard registers.  */
2253  else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2254	   && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2255	   && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2256    return 1;
2257
2258  /* We do not check other CLOBBER or USE here.  An insn consisting of just
2259     a CLOBBER or just a USE should not be deleted.  */
2260  return 0;
2261}
2262
2263/* If INSN is the last insn in a libcall, and assuming INSN is dead,
2264   return 1 if the entire library call is dead.
2265   This is true if INSN copies a register (hard or pseudo)
2266   and if the hard return reg of the call insn is dead.
2267   (The caller should have tested the destination of the SET inside
2268   INSN already for death.)
2269
2270   If this insn doesn't just copy a register, then we don't
2271   have an ordinary libcall.  In that case, cse could not have
2272   managed to substitute the source for the dest later on,
2273   so we can assume the libcall is dead.
2274
2275   PBI is the block info giving pseudoregs live before this insn.
2276   NOTE is the REG_RETVAL note of the insn.  */
2277
2278static int
2279libcall_dead_p (pbi, note, insn)
2280     struct propagate_block_info *pbi;
2281     rtx note;
2282     rtx insn;
2283{
2284  rtx x = single_set (insn);
2285
2286  if (x)
2287    {
2288      rtx r = SET_SRC (x);
2289
2290      if (GET_CODE (r) == REG)
2291	{
2292	  rtx call = XEXP (note, 0);
2293	  rtx call_pat;
2294	  int i;
2295
2296	  /* Find the call insn.  */
2297	  while (call != insn && GET_CODE (call) != CALL_INSN)
2298	    call = NEXT_INSN (call);
2299
2300	  /* If there is none, do nothing special,
2301	     since ordinary death handling can understand these insns.  */
2302	  if (call == insn)
2303	    return 0;
2304
2305	  /* See if the hard reg holding the value is dead.
2306	     If this is a PARALLEL, find the call within it.  */
2307	  call_pat = PATTERN (call);
2308	  if (GET_CODE (call_pat) == PARALLEL)
2309	    {
2310	      for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2311		if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2312		    && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2313		  break;
2314
2315	      /* This may be a library call that is returning a value
2316		 via invisible pointer.  Do nothing special, since
2317		 ordinary death handling can understand these insns.  */
2318	      if (i < 0)
2319		return 0;
2320
2321	      call_pat = XVECEXP (call_pat, 0, i);
2322	    }
2323
2324	  return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
2325	}
2326    }
2327  return 1;
2328}
2329
2330/* Return 1 if register REGNO was used before it was set, i.e. if it is
2331   live at function entry.  Don't count global register variables, variables
2332   in registers that can be used for function arg passing, or variables in
2333   fixed hard registers.  */
2334
2335int
2336regno_uninitialized (regno)
2337     unsigned int regno;
2338{
2339  if (n_basic_blocks == 0
2340      || (regno < FIRST_PSEUDO_REGISTER
2341	  && (global_regs[regno]
2342	      || fixed_regs[regno]
2343	      || FUNCTION_ARG_REGNO_P (regno))))
2344    return 0;
2345
2346  return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
2347}
2348
2349/* 1 if register REGNO was alive at a place where `setjmp' was called
2350   and was set more than once or is an argument.
2351   Such regs may be clobbered by `longjmp'.  */
2352
2353int
2354regno_clobbered_at_setjmp (regno)
2355     int regno;
2356{
2357  if (n_basic_blocks == 0)
2358    return 0;
2359
2360  return ((REG_N_SETS (regno) > 1
2361	   || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
2362	  && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2363}
2364
2365/* Add MEM to PBI->MEM_SET_LIST.  MEM should be canonical.  Respect the
2366   maximal list size; look for overlaps in mode and select the largest.  */
2367static void
2368add_to_mem_set_list (pbi, mem)
2369     struct propagate_block_info *pbi;
2370     rtx mem;
2371{
2372  rtx i;
2373
2374  /* We don't know how large a BLKmode store is, so we must not
2375     take them into consideration.  */
2376  if (GET_MODE (mem) == BLKmode)
2377    return;
2378
2379  for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2380    {
2381      rtx e = XEXP (i, 0);
2382      if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2383	{
2384	  if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2385	    {
2386#ifdef AUTO_INC_DEC
2387	      /* If we must store a copy of the mem, we can just modify
2388		 the mode of the stored copy.  */
2389	      if (pbi->flags & PROP_AUTOINC)
2390	        PUT_MODE (e, GET_MODE (mem));
2391	      else
2392#endif
2393	        XEXP (i, 0) = mem;
2394	    }
2395	  return;
2396	}
2397    }
2398
2399  if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
2400    {
2401#ifdef AUTO_INC_DEC
2402      /* Store a copy of mem, otherwise the address may be
2403	 scrogged by find_auto_inc.  */
2404      if (pbi->flags & PROP_AUTOINC)
2405	mem = shallow_copy_rtx (mem);
2406#endif
2407      pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2408      pbi->mem_set_list_len++;
2409    }
2410}
2411
2412/* INSN references memory, possibly using autoincrement addressing modes.
2413   Find any entries on the mem_set_list that need to be invalidated due
2414   to an address change.  */
2415
2416static void
2417invalidate_mems_from_autoinc (pbi, insn)
2418     struct propagate_block_info *pbi;
2419     rtx insn;
2420{
2421  rtx note = REG_NOTES (insn);
2422  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2423    if (REG_NOTE_KIND (note) == REG_INC)
2424      invalidate_mems_from_set (pbi, XEXP (note, 0));
2425}
2426
2427/* EXP is a REG.  Remove any dependent entries from pbi->mem_set_list.  */
2428
2429static void
2430invalidate_mems_from_set (pbi, exp)
2431     struct propagate_block_info *pbi;
2432     rtx exp;
2433{
2434  rtx temp = pbi->mem_set_list;
2435  rtx prev = NULL_RTX;
2436  rtx next;
2437
2438  while (temp)
2439    {
2440      next = XEXP (temp, 1);
2441      if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2442	{
2443	  /* Splice this entry out of the list.  */
2444	  if (prev)
2445	    XEXP (prev, 1) = next;
2446	  else
2447	    pbi->mem_set_list = next;
2448	  free_EXPR_LIST_node (temp);
2449	  pbi->mem_set_list_len--;
2450	}
2451      else
2452	prev = temp;
2453      temp = next;
2454    }
2455}
2456
2457/* Process the registers that are set within X.  Their bits are set to
2458   1 in the regset DEAD, because they are dead prior to this insn.
2459
2460   If INSN is nonzero, it is the insn being processed.
2461
2462   FLAGS is the set of operations to perform.  */
2463
2464static void
2465mark_set_regs (pbi, x, insn)
2466     struct propagate_block_info *pbi;
2467     rtx x, insn;
2468{
2469  rtx cond = NULL_RTX;
2470  rtx link;
2471  enum rtx_code code;
2472
2473  if (insn)
2474    for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2475      {
2476	if (REG_NOTE_KIND (link) == REG_INC)
2477	  mark_set_1 (pbi, SET, XEXP (link, 0),
2478		      (GET_CODE (x) == COND_EXEC
2479		       ? COND_EXEC_TEST (x) : NULL_RTX),
2480		      insn, pbi->flags);
2481      }
2482 retry:
2483  switch (code = GET_CODE (x))
2484    {
2485    case SET:
2486    case CLOBBER:
2487      mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
2488      return;
2489
2490    case COND_EXEC:
2491      cond = COND_EXEC_TEST (x);
2492      x = COND_EXEC_CODE (x);
2493      goto retry;
2494
2495    case PARALLEL:
2496      {
2497	int i;
2498
2499	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2500	  {
2501	    rtx sub = XVECEXP (x, 0, i);
2502	    switch (code = GET_CODE (sub))
2503	      {
2504	      case COND_EXEC:
2505		if (cond != NULL_RTX)
2506		  abort ();
2507
2508		cond = COND_EXEC_TEST (sub);
2509		sub = COND_EXEC_CODE (sub);
2510		if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
2511		  break;
2512		/* Fall through.  */
2513
2514	      case SET:
2515	      case CLOBBER:
2516		mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
2517		break;
2518
2519	      default:
2520		break;
2521	      }
2522	  }
2523	break;
2524      }
2525
2526    default:
2527      break;
2528    }
2529}
2530
2531/* Process a single set, which appears in INSN.  REG (which may not
2532   actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2533   being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2534   If the set is conditional (because it appear in a COND_EXEC), COND
2535   will be the condition.  */
2536
2537static void
2538mark_set_1 (pbi, code, reg, cond, insn, flags)
2539     struct propagate_block_info *pbi;
2540     enum rtx_code code;
2541     rtx reg, cond, insn;
2542     int flags;
2543{
2544  int regno_first = -1, regno_last = -1;
2545  unsigned long not_dead = 0;
2546  int i;
2547
2548  /* Modifying just one hardware register of a multi-reg value or just a
2549     byte field of a register does not mean the value from before this insn
2550     is now dead.  Of course, if it was dead after it's unused now.  */
2551
2552  switch (GET_CODE (reg))
2553    {
2554    case PARALLEL:
2555      /* Some targets place small structures in registers for return values of
2556	 functions.  We have to detect this case specially here to get correct
2557	 flow information.  */
2558      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2559	if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2560	  mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2561		      flags);
2562      return;
2563
2564    case ZERO_EXTRACT:
2565    case SIGN_EXTRACT:
2566    case STRICT_LOW_PART:
2567      /* ??? Assumes STRICT_LOW_PART not used on multi-word registers.  */
2568      do
2569	reg = XEXP (reg, 0);
2570      while (GET_CODE (reg) == SUBREG
2571	     || GET_CODE (reg) == ZERO_EXTRACT
2572	     || GET_CODE (reg) == SIGN_EXTRACT
2573	     || GET_CODE (reg) == STRICT_LOW_PART);
2574      if (GET_CODE (reg) == MEM)
2575	break;
2576      not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2577      /* Fall through.  */
2578
2579    case REG:
2580      regno_last = regno_first = REGNO (reg);
2581      if (regno_first < FIRST_PSEUDO_REGISTER)
2582	regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
2583      break;
2584
2585    case SUBREG:
2586      if (GET_CODE (SUBREG_REG (reg)) == REG)
2587	{
2588	  enum machine_mode outer_mode = GET_MODE (reg);
2589	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2590
2591	  /* Identify the range of registers affected.  This is moderately
2592	     tricky for hard registers.  See alter_subreg.  */
2593
2594	  regno_last = regno_first = REGNO (SUBREG_REG (reg));
2595	  if (regno_first < FIRST_PSEUDO_REGISTER)
2596	    {
2597	      regno_first += subreg_regno_offset (regno_first, inner_mode,
2598						  SUBREG_BYTE (reg),
2599						  outer_mode);
2600	      regno_last = (regno_first
2601			    + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
2602
2603	      /* Since we've just adjusted the register number ranges, make
2604		 sure REG matches.  Otherwise some_was_live will be clear
2605		 when it shouldn't have been, and we'll create incorrect
2606		 REG_UNUSED notes.  */
2607	      reg = gen_rtx_REG (outer_mode, regno_first);
2608	    }
2609	  else
2610	    {
2611	      /* If the number of words in the subreg is less than the number
2612		 of words in the full register, we have a well-defined partial
2613		 set.  Otherwise the high bits are undefined.
2614
2615		 This is only really applicable to pseudos, since we just took
2616		 care of multi-word hard registers.  */
2617	      if (((GET_MODE_SIZE (outer_mode)
2618		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2619		  < ((GET_MODE_SIZE (inner_mode)
2620		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2621		not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2622							    regno_first);
2623
2624	      reg = SUBREG_REG (reg);
2625	    }
2626	}
2627      else
2628	reg = SUBREG_REG (reg);
2629      break;
2630
2631    default:
2632      break;
2633    }
2634
2635  /* If this set is a MEM, then it kills any aliased writes.
2636     If this set is a REG, then it kills any MEMs which use the reg.  */
2637  if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2638    {
2639      if (GET_CODE (reg) == REG)
2640	invalidate_mems_from_set (pbi, reg);
2641
2642      /* If the memory reference had embedded side effects (autoincrement
2643	 address modes.  Then we may need to kill some entries on the
2644	 memory set list.  */
2645      if (insn && GET_CODE (reg) == MEM)
2646	invalidate_mems_from_autoinc (pbi, insn);
2647
2648      if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2649	  /* ??? With more effort we could track conditional memory life.  */
2650	  && ! cond
2651	  /* There are no REG_INC notes for SP, so we can't assume we'll see
2652	     everything that invalidates it.  To be safe, don't eliminate any
2653	     stores though SP; none of them should be redundant anyway.  */
2654	  && ! reg_mentioned_p (stack_pointer_rtx, reg))
2655        add_to_mem_set_list (pbi, canon_rtx (reg));
2656    }
2657
2658  if (GET_CODE (reg) == REG
2659      && ! (regno_first == FRAME_POINTER_REGNUM
2660	    && (! reload_completed || frame_pointer_needed))
2661#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2662      && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2663	    && (! reload_completed || frame_pointer_needed))
2664#endif
2665#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2666      && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2667#endif
2668      )
2669    {
2670      int some_was_live = 0, some_was_dead = 0;
2671
2672      for (i = regno_first; i <= regno_last; ++i)
2673	{
2674	  int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2675	  if (pbi->local_set)
2676	    {
2677	      /* Order of the set operation matters here since both
2678		 sets may be the same.  */
2679	      CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2680	      if (cond != NULL_RTX
2681		  && ! REGNO_REG_SET_P (pbi->local_set, i))
2682		SET_REGNO_REG_SET (pbi->cond_local_set, i);
2683	      else
2684		SET_REGNO_REG_SET (pbi->local_set, i);
2685	    }
2686	  if (code != CLOBBER)
2687	    SET_REGNO_REG_SET (pbi->new_set, i);
2688
2689	  some_was_live |= needed_regno;
2690	  some_was_dead |= ! needed_regno;
2691	}
2692
2693#ifdef HAVE_conditional_execution
2694      /* Consider conditional death in deciding that the register needs
2695	 a death note.  */
2696      if (some_was_live && ! not_dead
2697	  /* The stack pointer is never dead.  Well, not strictly true,
2698	     but it's very difficult to tell from here.  Hopefully
2699	     combine_stack_adjustments will fix up the most egregious
2700	     errors.  */
2701	  && regno_first != STACK_POINTER_REGNUM)
2702	{
2703	  for (i = regno_first; i <= regno_last; ++i)
2704	    if (! mark_regno_cond_dead (pbi, i, cond))
2705	      not_dead |= ((unsigned long) 1) << (i - regno_first);
2706	}
2707#endif
2708
2709      /* Additional data to record if this is the final pass.  */
2710      if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2711		   | PROP_DEATH_NOTES | PROP_AUTOINC))
2712	{
2713	  rtx y;
2714	  int blocknum = pbi->bb->index;
2715
2716	  y = NULL_RTX;
2717	  if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2718	    {
2719	      y = pbi->reg_next_use[regno_first];
2720
2721	      /* The next use is no longer next, since a store intervenes.  */
2722	      for (i = regno_first; i <= regno_last; ++i)
2723		pbi->reg_next_use[i] = 0;
2724	    }
2725
2726	  if (flags & PROP_REG_INFO)
2727	    {
2728	      for (i = regno_first; i <= regno_last; ++i)
2729		{
2730		  /* Count (weighted) references, stores, etc.  This counts a
2731		     register twice if it is modified, but that is correct.  */
2732		  REG_N_SETS (i) += 1;
2733		  REG_N_REFS (i) += 1;
2734		  REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2735
2736	          /* The insns where a reg is live are normally counted
2737		     elsewhere, but we want the count to include the insn
2738		     where the reg is set, and the normal counting mechanism
2739		     would not count it.  */
2740		  REG_LIVE_LENGTH (i) += 1;
2741		}
2742
2743	      /* If this is a hard reg, record this function uses the reg.  */
2744	      if (regno_first < FIRST_PSEUDO_REGISTER)
2745		{
2746		  for (i = regno_first; i <= regno_last; i++)
2747		    regs_ever_live[i] = 1;
2748		}
2749	      else
2750		{
2751		  /* Keep track of which basic blocks each reg appears in.  */
2752		  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2753		    REG_BASIC_BLOCK (regno_first) = blocknum;
2754		  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2755		    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2756		}
2757	    }
2758
2759	  if (! some_was_dead)
2760	    {
2761	      if (flags & PROP_LOG_LINKS)
2762		{
2763		  /* Make a logical link from the next following insn
2764		     that uses this register, back to this insn.
2765		     The following insns have already been processed.
2766
2767		     We don't build a LOG_LINK for hard registers containing
2768		     in ASM_OPERANDs.  If these registers get replaced,
2769		     we might wind up changing the semantics of the insn,
2770		     even if reload can make what appear to be valid
2771		     assignments later.  */
2772		  if (y && (BLOCK_NUM (y) == blocknum)
2773		      && (regno_first >= FIRST_PSEUDO_REGISTER
2774			  || asm_noperands (PATTERN (y)) < 0))
2775		    LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2776		}
2777	    }
2778	  else if (not_dead)
2779	    ;
2780	  else if (! some_was_live)
2781	    {
2782	      if (flags & PROP_REG_INFO)
2783		REG_N_DEATHS (regno_first) += 1;
2784
2785	      if (flags & PROP_DEATH_NOTES)
2786		{
2787		  /* Note that dead stores have already been deleted
2788		     when possible.  If we get here, we have found a
2789		     dead store that cannot be eliminated (because the
2790		     same insn does something useful).  Indicate this
2791		     by marking the reg being set as dying here.  */
2792		  REG_NOTES (insn)
2793		    = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2794		}
2795	    }
2796	  else
2797	    {
2798	      if (flags & PROP_DEATH_NOTES)
2799		{
2800		  /* This is a case where we have a multi-word hard register
2801		     and some, but not all, of the words of the register are
2802		     needed in subsequent insns.  Write REG_UNUSED notes
2803		     for those parts that were not needed.  This case should
2804		     be rare.  */
2805
2806		  for (i = regno_first; i <= regno_last; ++i)
2807		    if (! REGNO_REG_SET_P (pbi->reg_live, i))
2808		      REG_NOTES (insn)
2809			= alloc_EXPR_LIST (REG_UNUSED,
2810					   gen_rtx_REG (reg_raw_mode[i], i),
2811					   REG_NOTES (insn));
2812		}
2813	    }
2814	}
2815
2816      /* Mark the register as being dead.  */
2817      if (some_was_live
2818	  /* The stack pointer is never dead.  Well, not strictly true,
2819	     but it's very difficult to tell from here.  Hopefully
2820	     combine_stack_adjustments will fix up the most egregious
2821	     errors.  */
2822	  && regno_first != STACK_POINTER_REGNUM)
2823	{
2824	  for (i = regno_first; i <= regno_last; ++i)
2825	    if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2826	      CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2827	}
2828    }
2829  else if (GET_CODE (reg) == REG)
2830    {
2831      if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2832	pbi->reg_next_use[regno_first] = 0;
2833    }
2834
2835  /* If this is the last pass and this is a SCRATCH, show it will be dying
2836     here and count it.  */
2837  else if (GET_CODE (reg) == SCRATCH)
2838    {
2839      if (flags & PROP_DEATH_NOTES)
2840	REG_NOTES (insn)
2841	  = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2842    }
2843}
2844
2845#ifdef HAVE_conditional_execution
2846/* Mark REGNO conditionally dead.
2847   Return true if the register is now unconditionally dead.  */
2848
2849static int
2850mark_regno_cond_dead (pbi, regno, cond)
2851     struct propagate_block_info *pbi;
2852     int regno;
2853     rtx cond;
2854{
2855  /* If this is a store to a predicate register, the value of the
2856     predicate is changing, we don't know that the predicate as seen
2857     before is the same as that seen after.  Flush all dependent
2858     conditions from reg_cond_dead.  This will make all such
2859     conditionally live registers unconditionally live.  */
2860  if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
2861    flush_reg_cond_reg (pbi, regno);
2862
2863  /* If this is an unconditional store, remove any conditional
2864     life that may have existed.  */
2865  if (cond == NULL_RTX)
2866    splay_tree_remove (pbi->reg_cond_dead, regno);
2867  else
2868    {
2869      splay_tree_node node;
2870      struct reg_cond_life_info *rcli;
2871      rtx ncond;
2872
2873      /* Otherwise this is a conditional set.  Record that fact.
2874	 It may have been conditionally used, or there may be a
2875	 subsequent set with a complimentary condition.  */
2876
2877      node = splay_tree_lookup (pbi->reg_cond_dead, regno);
2878      if (node == NULL)
2879	{
2880	  /* The register was unconditionally live previously.
2881	     Record the current condition as the condition under
2882	     which it is dead.  */
2883	  rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
2884	  rcli->condition = cond;
2885	  rcli->stores = cond;
2886	  rcli->orig_condition = const0_rtx;
2887	  splay_tree_insert (pbi->reg_cond_dead, regno,
2888			     (splay_tree_value) rcli);
2889
2890	  SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2891
2892	  /* Not unconditionally dead.  */
2893	  return 0;
2894	}
2895      else
2896	{
2897	  /* The register was conditionally live previously.
2898	     Add the new condition to the old.  */
2899	  rcli = (struct reg_cond_life_info *) node->value;
2900	  ncond = rcli->condition;
2901	  ncond = ior_reg_cond (ncond, cond, 1);
2902	  if (rcli->stores == const0_rtx)
2903	    rcli->stores = cond;
2904	  else if (rcli->stores != const1_rtx)
2905	    rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
2906
2907	  /* If the register is now unconditionally dead, remove the entry
2908	     in the splay_tree.  A register is unconditionally dead if the
2909	     dead condition ncond is true.  A register is also unconditionally
2910	     dead if the sum of all conditional stores is an unconditional
2911	     store (stores is true), and the dead condition is identically the
2912	     same as the original dead condition initialized at the end of
2913	     the block.  This is a pointer compare, not an rtx_equal_p
2914	     compare.  */
2915	  if (ncond == const1_rtx
2916	      || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
2917	    splay_tree_remove (pbi->reg_cond_dead, regno);
2918	  else
2919	    {
2920	      rcli->condition = ncond;
2921
2922	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2923
2924	      /* Not unconditionally dead.  */
2925	      return 0;
2926	    }
2927	}
2928    }
2929
2930  return 1;
2931}
2932
2933/* Called from splay_tree_delete for pbi->reg_cond_life.  */
2934
2935static void
2936free_reg_cond_life_info (value)
2937     splay_tree_value value;
2938{
2939  struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
2940  free (rcli);
2941}
2942
2943/* Helper function for flush_reg_cond_reg.  */
2944
2945static int
2946flush_reg_cond_reg_1 (node, data)
2947     splay_tree_node node;
2948     void *data;
2949{
2950  struct reg_cond_life_info *rcli;
2951  int *xdata = (int *) data;
2952  unsigned int regno = xdata[0];
2953
2954  /* Don't need to search if last flushed value was farther on in
2955     the in-order traversal.  */
2956  if (xdata[1] >= (int) node->key)
2957    return 0;
2958
2959  /* Splice out portions of the expression that refer to regno.  */
2960  rcli = (struct reg_cond_life_info *) node->value;
2961  rcli->condition = elim_reg_cond (rcli->condition, regno);
2962  if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
2963    rcli->stores = elim_reg_cond (rcli->stores, regno);
2964
2965  /* If the entire condition is now false, signal the node to be removed.  */
2966  if (rcli->condition == const0_rtx)
2967    {
2968      xdata[1] = node->key;
2969      return -1;
2970    }
2971  else if (rcli->condition == const1_rtx)
2972    abort ();
2973
2974  return 0;
2975}
2976
2977/* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE.  */
2978
2979static void
2980flush_reg_cond_reg (pbi, regno)
2981     struct propagate_block_info *pbi;
2982     int regno;
2983{
2984  int pair[2];
2985
2986  pair[0] = regno;
2987  pair[1] = -1;
2988  while (splay_tree_foreach (pbi->reg_cond_dead,
2989			     flush_reg_cond_reg_1, pair) == -1)
2990    splay_tree_remove (pbi->reg_cond_dead, pair[1]);
2991
2992  CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
2993}
2994
2995/* Logical arithmetic on predicate conditions.  IOR, NOT and AND.
2996   For ior/and, the ADD flag determines whether we want to add the new
2997   condition X to the old one unconditionally.  If it is zero, we will
2998   only return a new expression if X allows us to simplify part of
2999   OLD, otherwise we return NULL to the caller.
3000   If ADD is nonzero, we will return a new condition in all cases.  The
3001   toplevel caller of one of these functions should always pass 1 for
3002   ADD.  */
3003
3004static rtx
3005ior_reg_cond (old, x, add)
3006     rtx old, x;
3007     int add;
3008{
3009  rtx op0, op1;
3010
3011  if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3012    {
3013      if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3014	  && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
3015	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3016	return const1_rtx;
3017      if (GET_CODE (x) == GET_CODE (old)
3018	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3019	return old;
3020      if (! add)
3021	return NULL;
3022      return gen_rtx_IOR (0, old, x);
3023    }
3024
3025  switch (GET_CODE (old))
3026    {
3027    case IOR:
3028      op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3029      op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3030      if (op0 != NULL || op1 != NULL)
3031	{
3032	  if (op0 == const0_rtx)
3033	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3034	  if (op1 == const0_rtx)
3035	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3036	  if (op0 == const1_rtx || op1 == const1_rtx)
3037	    return const1_rtx;
3038	  if (op0 == NULL)
3039	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3040	  else if (rtx_equal_p (x, op0))
3041	    /* (x | A) | x ~ (x | A).  */
3042	    return old;
3043	  if (op1 == NULL)
3044	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3045	  else if (rtx_equal_p (x, op1))
3046	    /* (A | x) | x ~ (A | x).  */
3047	    return old;
3048	  return gen_rtx_IOR (0, op0, op1);
3049	}
3050      if (! add)
3051	return NULL;
3052      return gen_rtx_IOR (0, old, x);
3053
3054    case AND:
3055      op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3056      op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3057      if (op0 != NULL || op1 != NULL)
3058	{
3059	  if (op0 == const1_rtx)
3060	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3061	  if (op1 == const1_rtx)
3062	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3063	  if (op0 == const0_rtx || op1 == const0_rtx)
3064	    return const0_rtx;
3065	  if (op0 == NULL)
3066	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3067	  else if (rtx_equal_p (x, op0))
3068	    /* (x & A) | x ~ x.  */
3069	    return op0;
3070	  if (op1 == NULL)
3071	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3072	  else if (rtx_equal_p (x, op1))
3073	    /* (A & x) | x ~ x.  */
3074	    return op1;
3075	  return gen_rtx_AND (0, op0, op1);
3076	}
3077      if (! add)
3078	return NULL;
3079      return gen_rtx_IOR (0, old, x);
3080
3081    case NOT:
3082      op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3083      if (op0 != NULL)
3084	return not_reg_cond (op0);
3085      if (! add)
3086	return NULL;
3087      return gen_rtx_IOR (0, old, x);
3088
3089    default:
3090      abort ();
3091    }
3092}
3093
3094static rtx
3095not_reg_cond (x)
3096     rtx x;
3097{
3098  enum rtx_code x_code;
3099
3100  if (x == const0_rtx)
3101    return const1_rtx;
3102  else if (x == const1_rtx)
3103    return const0_rtx;
3104  x_code = GET_CODE (x);
3105  if (x_code == NOT)
3106    return XEXP (x, 0);
3107  if (GET_RTX_CLASS (x_code) == '<'
3108      && GET_CODE (XEXP (x, 0)) == REG)
3109    {
3110      if (XEXP (x, 1) != const0_rtx)
3111	abort ();
3112
3113      return gen_rtx_fmt_ee (reverse_condition (x_code),
3114			     VOIDmode, XEXP (x, 0), const0_rtx);
3115    }
3116  return gen_rtx_NOT (0, x);
3117}
3118
3119static rtx
3120and_reg_cond (old, x, add)
3121     rtx old, x;
3122     int add;
3123{
3124  rtx op0, op1;
3125
3126  if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3127    {
3128      if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3129	  && GET_CODE (x) == reverse_condition (GET_CODE (old))
3130	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3131	return const0_rtx;
3132      if (GET_CODE (x) == GET_CODE (old)
3133	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3134	return old;
3135      if (! add)
3136	return NULL;
3137      return gen_rtx_AND (0, old, x);
3138    }
3139
3140  switch (GET_CODE (old))
3141    {
3142    case IOR:
3143      op0 = and_reg_cond (XEXP (old, 0), x, 0);
3144      op1 = and_reg_cond (XEXP (old, 1), x, 0);
3145      if (op0 != NULL || op1 != NULL)
3146	{
3147	  if (op0 == const0_rtx)
3148	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3149	  if (op1 == const0_rtx)
3150	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3151	  if (op0 == const1_rtx || op1 == const1_rtx)
3152	    return const1_rtx;
3153	  if (op0 == NULL)
3154	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3155	  else if (rtx_equal_p (x, op0))
3156	    /* (x | A) & x ~ x.  */
3157	    return op0;
3158	  if (op1 == NULL)
3159	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3160	  else if (rtx_equal_p (x, op1))
3161	    /* (A | x) & x ~ x.  */
3162	    return op1;
3163	  return gen_rtx_IOR (0, op0, op1);
3164	}
3165      if (! add)
3166	return NULL;
3167      return gen_rtx_AND (0, old, x);
3168
3169    case AND:
3170      op0 = and_reg_cond (XEXP (old, 0), x, 0);
3171      op1 = and_reg_cond (XEXP (old, 1), x, 0);
3172      if (op0 != NULL || op1 != NULL)
3173	{
3174	  if (op0 == const1_rtx)
3175	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3176	  if (op1 == const1_rtx)
3177	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3178	  if (op0 == const0_rtx || op1 == const0_rtx)
3179	    return const0_rtx;
3180	  if (op0 == NULL)
3181	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3182	  else if (rtx_equal_p (x, op0))
3183	    /* (x & A) & x ~ (x & A).  */
3184	    return old;
3185	  if (op1 == NULL)
3186	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3187	  else if (rtx_equal_p (x, op1))
3188	    /* (A & x) & x ~ (A & x).  */
3189	    return old;
3190	  return gen_rtx_AND (0, op0, op1);
3191	}
3192      if (! add)
3193	return NULL;
3194      return gen_rtx_AND (0, old, x);
3195
3196    case NOT:
3197      op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3198      if (op0 != NULL)
3199	return not_reg_cond (op0);
3200      if (! add)
3201	return NULL;
3202      return gen_rtx_AND (0, old, x);
3203
3204    default:
3205      abort ();
3206    }
3207}
3208
3209/* Given a condition X, remove references to reg REGNO and return the
3210   new condition.  The removal will be done so that all conditions
3211   involving REGNO are considered to evaluate to false.  This function
3212   is used when the value of REGNO changes.  */
3213
3214static rtx
3215elim_reg_cond (x, regno)
3216     rtx x;
3217     unsigned int regno;
3218{
3219  rtx op0, op1;
3220
3221  if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3222    {
3223      if (REGNO (XEXP (x, 0)) == regno)
3224	return const0_rtx;
3225      return x;
3226    }
3227
3228  switch (GET_CODE (x))
3229    {
3230    case AND:
3231      op0 = elim_reg_cond (XEXP (x, 0), regno);
3232      op1 = elim_reg_cond (XEXP (x, 1), regno);
3233      if (op0 == const0_rtx || op1 == const0_rtx)
3234	return const0_rtx;
3235      if (op0 == const1_rtx)
3236	return op1;
3237      if (op1 == const1_rtx)
3238	return op0;
3239      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3240	return x;
3241      return gen_rtx_AND (0, op0, op1);
3242
3243    case IOR:
3244      op0 = elim_reg_cond (XEXP (x, 0), regno);
3245      op1 = elim_reg_cond (XEXP (x, 1), regno);
3246      if (op0 == const1_rtx || op1 == const1_rtx)
3247	return const1_rtx;
3248      if (op0 == const0_rtx)
3249	return op1;
3250      if (op1 == const0_rtx)
3251	return op0;
3252      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3253	return x;
3254      return gen_rtx_IOR (0, op0, op1);
3255
3256    case NOT:
3257      op0 = elim_reg_cond (XEXP (x, 0), regno);
3258      if (op0 == const0_rtx)
3259	return const1_rtx;
3260      if (op0 == const1_rtx)
3261	return const0_rtx;
3262      if (op0 != XEXP (x, 0))
3263	return not_reg_cond (op0);
3264      return x;
3265
3266    default:
3267      abort ();
3268    }
3269}
3270#endif /* HAVE_conditional_execution */
3271
3272#ifdef AUTO_INC_DEC
3273
3274/* Try to substitute the auto-inc expression INC as the address inside
3275   MEM which occurs in INSN.  Currently, the address of MEM is an expression
3276   involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3277   that has a single set whose source is a PLUS of INCR_REG and something
3278   else.  */
3279
3280static void
3281attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
3282     struct propagate_block_info *pbi;
3283     rtx inc, insn, mem, incr, incr_reg;
3284{
3285  int regno = REGNO (incr_reg);
3286  rtx set = single_set (incr);
3287  rtx q = SET_DEST (set);
3288  rtx y = SET_SRC (set);
3289  int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3290
3291  /* Make sure this reg appears only once in this insn.  */
3292  if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3293    return;
3294
3295  if (dead_or_set_p (incr, incr_reg)
3296      /* Mustn't autoinc an eliminable register.  */
3297      && (regno >= FIRST_PSEUDO_REGISTER
3298	  || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3299    {
3300      /* This is the simple case.  Try to make the auto-inc.  If
3301	 we can't, we are done.  Otherwise, we will do any
3302	 needed updates below.  */
3303      if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3304	return;
3305    }
3306  else if (GET_CODE (q) == REG
3307	   /* PREV_INSN used here to check the semi-open interval
3308	      [insn,incr).  */
3309	   && ! reg_used_between_p (q,  PREV_INSN (insn), incr)
3310	   /* We must also check for sets of q as q may be
3311	      a call clobbered hard register and there may
3312	      be a call between PREV_INSN (insn) and incr.  */
3313	   && ! reg_set_between_p (q,  PREV_INSN (insn), incr))
3314    {
3315      /* We have *p followed sometime later by q = p+size.
3316	 Both p and q must be live afterward,
3317	 and q is not used between INSN and its assignment.
3318	 Change it to q = p, ...*q..., q = q+size.
3319	 Then fall into the usual case.  */
3320      rtx insns, temp;
3321
3322      start_sequence ();
3323      emit_move_insn (q, incr_reg);
3324      insns = get_insns ();
3325      end_sequence ();
3326
3327      /* If we can't make the auto-inc, or can't make the
3328	 replacement into Y, exit.  There's no point in making
3329	 the change below if we can't do the auto-inc and doing
3330	 so is not correct in the pre-inc case.  */
3331
3332      XEXP (inc, 0) = q;
3333      validate_change (insn, &XEXP (mem, 0), inc, 1);
3334      validate_change (incr, &XEXP (y, opnum), q, 1);
3335      if (! apply_change_group ())
3336	return;
3337
3338      /* We now know we'll be doing this change, so emit the
3339	 new insn(s) and do the updates.  */
3340      emit_insns_before (insns, insn);
3341
3342      if (pbi->bb->head == insn)
3343	pbi->bb->head = insns;
3344
3345      /* INCR will become a NOTE and INSN won't contain a
3346	 use of INCR_REG.  If a use of INCR_REG was just placed in
3347	 the insn before INSN, make that the next use.
3348	 Otherwise, invalidate it.  */
3349      if (GET_CODE (PREV_INSN (insn)) == INSN
3350	  && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3351	  && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3352	pbi->reg_next_use[regno] = PREV_INSN (insn);
3353      else
3354	pbi->reg_next_use[regno] = 0;
3355
3356      incr_reg = q;
3357      regno = REGNO (q);
3358
3359      /* REGNO is now used in INCR which is below INSN, but
3360	 it previously wasn't live here.  If we don't mark
3361	 it as live, we'll put a REG_DEAD note for it
3362	 on this insn, which is incorrect.  */
3363      SET_REGNO_REG_SET (pbi->reg_live, regno);
3364
3365      /* If there are any calls between INSN and INCR, show
3366	 that REGNO now crosses them.  */
3367      for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3368	if (GET_CODE (temp) == CALL_INSN)
3369	  REG_N_CALLS_CROSSED (regno)++;
3370
3371      /* Invalidate alias info for Q since we just changed its value.  */
3372      clear_reg_alias_info (q);
3373    }
3374  else
3375    return;
3376
3377  /* If we haven't returned, it means we were able to make the
3378     auto-inc, so update the status.  First, record that this insn
3379     has an implicit side effect.  */
3380
3381  REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3382
3383  /* Modify the old increment-insn to simply copy
3384     the already-incremented value of our register.  */
3385  if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
3386    abort ();
3387
3388  /* If that makes it a no-op (copying the register into itself) delete
3389     it so it won't appear to be a "use" and a "set" of this
3390     register.  */
3391  if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3392    {
3393      /* If the original source was dead, it's dead now.  */
3394      rtx note;
3395
3396      while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3397	{
3398	  remove_note (incr, note);
3399	  if (XEXP (note, 0) != incr_reg)
3400	    CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3401	}
3402
3403      PUT_CODE (incr, NOTE);
3404      NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3405      NOTE_SOURCE_FILE (incr) = 0;
3406    }
3407
3408  if (regno >= FIRST_PSEUDO_REGISTER)
3409    {
3410      /* Count an extra reference to the reg.  When a reg is
3411	 incremented, spilling it is worse, so we want to make
3412	 that less likely.  */
3413      REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3414
3415      /* Count the increment as a setting of the register,
3416	 even though it isn't a SET in rtl.  */
3417      REG_N_SETS (regno)++;
3418    }
3419}
3420
3421/* X is a MEM found in INSN.  See if we can convert it into an auto-increment
3422   reference.  */
3423
3424static void
3425find_auto_inc (pbi, x, insn)
3426     struct propagate_block_info *pbi;
3427     rtx x;
3428     rtx insn;
3429{
3430  rtx addr = XEXP (x, 0);
3431  HOST_WIDE_INT offset = 0;
3432  rtx set, y, incr, inc_val;
3433  int regno;
3434  int size = GET_MODE_SIZE (GET_MODE (x));
3435
3436  if (GET_CODE (insn) == JUMP_INSN)
3437    return;
3438
3439  /* Here we detect use of an index register which might be good for
3440     postincrement, postdecrement, preincrement, or predecrement.  */
3441
3442  if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3443    offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3444
3445  if (GET_CODE (addr) != REG)
3446    return;
3447
3448  regno = REGNO (addr);
3449
3450  /* Is the next use an increment that might make auto-increment? */
3451  incr = pbi->reg_next_use[regno];
3452  if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3453    return;
3454  set = single_set (incr);
3455  if (set == 0 || GET_CODE (set) != SET)
3456    return;
3457  y = SET_SRC (set);
3458
3459  if (GET_CODE (y) != PLUS)
3460    return;
3461
3462  if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3463    inc_val = XEXP (y, 1);
3464  else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3465    inc_val = XEXP (y, 0);
3466  else
3467    return;
3468
3469  if (GET_CODE (inc_val) == CONST_INT)
3470    {
3471      if (HAVE_POST_INCREMENT
3472	  && (INTVAL (inc_val) == size && offset == 0))
3473	attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3474			  incr, addr);
3475      else if (HAVE_POST_DECREMENT
3476	       && (INTVAL (inc_val) == -size && offset == 0))
3477	attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3478			  incr, addr);
3479      else if (HAVE_PRE_INCREMENT
3480	       && (INTVAL (inc_val) == size && offset == size))
3481	attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3482			  incr, addr);
3483      else if (HAVE_PRE_DECREMENT
3484	       && (INTVAL (inc_val) == -size && offset == -size))
3485	attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3486			  incr, addr);
3487      else if (HAVE_POST_MODIFY_DISP && offset == 0)
3488	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3489						    gen_rtx_PLUS (Pmode,
3490								  addr,
3491								  inc_val)),
3492			  insn, x, incr, addr);
3493    }
3494  else if (GET_CODE (inc_val) == REG
3495	   && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3496				   NEXT_INSN (incr)))
3497
3498    {
3499      if (HAVE_POST_MODIFY_REG && offset == 0)
3500	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3501						    gen_rtx_PLUS (Pmode,
3502								  addr,
3503								  inc_val)),
3504			  insn, x, incr, addr);
3505    }
3506}
3507
3508#endif /* AUTO_INC_DEC */
3509
3510static void
3511mark_used_reg (pbi, reg, cond, insn)
3512     struct propagate_block_info *pbi;
3513     rtx reg;
3514     rtx cond ATTRIBUTE_UNUSED;
3515     rtx insn;
3516{
3517  unsigned int regno_first, regno_last, i;
3518  int some_was_live, some_was_dead, some_not_set;
3519
3520  regno_last = regno_first = REGNO (reg);
3521  if (regno_first < FIRST_PSEUDO_REGISTER)
3522    regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
3523
3524  /* Find out if any of this register is live after this instruction.  */
3525  some_was_live = some_was_dead = 0;
3526  for (i = regno_first; i <= regno_last; ++i)
3527    {
3528      int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3529      some_was_live |= needed_regno;
3530      some_was_dead |= ! needed_regno;
3531    }
3532
3533  /* Find out if any of the register was set this insn.  */
3534  some_not_set = 0;
3535  for (i = regno_first; i <= regno_last; ++i)
3536    some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3537
3538  if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3539    {
3540      /* Record where each reg is used, so when the reg is set we know
3541	 the next insn that uses it.  */
3542      pbi->reg_next_use[regno_first] = insn;
3543    }
3544
3545  if (pbi->flags & PROP_REG_INFO)
3546    {
3547      if (regno_first < FIRST_PSEUDO_REGISTER)
3548	{
3549	  /* If this is a register we are going to try to eliminate,
3550	     don't mark it live here.  If we are successful in
3551	     eliminating it, it need not be live unless it is used for
3552	     pseudos, in which case it will have been set live when it
3553	     was allocated to the pseudos.  If the register will not
3554	     be eliminated, reload will set it live at that point.
3555
3556	     Otherwise, record that this function uses this register.  */
3557	  /* ??? The PPC backend tries to "eliminate" on the pic
3558	     register to itself.  This should be fixed.  In the mean
3559	     time, hack around it.  */
3560
3561	  if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3562	         && (regno_first == FRAME_POINTER_REGNUM
3563		     || regno_first == ARG_POINTER_REGNUM)))
3564	    for (i = regno_first; i <= regno_last; ++i)
3565	      regs_ever_live[i] = 1;
3566	}
3567      else
3568	{
3569	  /* Keep track of which basic block each reg appears in.  */
3570
3571	  int blocknum = pbi->bb->index;
3572	  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3573	    REG_BASIC_BLOCK (regno_first) = blocknum;
3574	  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3575	    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3576
3577	  /* Count (weighted) number of uses of each reg.  */
3578	  REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3579	  REG_N_REFS (regno_first)++;
3580	}
3581    }
3582
3583  /* Record and count the insns in which a reg dies.  If it is used in
3584     this insn and was dead below the insn then it dies in this insn.
3585     If it was set in this insn, we do not make a REG_DEAD note;
3586     likewise if we already made such a note.  */
3587  if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3588      && some_was_dead
3589      && some_not_set)
3590    {
3591      /* Check for the case where the register dying partially
3592	 overlaps the register set by this insn.  */
3593      if (regno_first != regno_last)
3594	for (i = regno_first; i <= regno_last; ++i)
3595	  some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3596
3597      /* If none of the words in X is needed, make a REG_DEAD note.
3598	 Otherwise, we must make partial REG_DEAD notes.  */
3599      if (! some_was_live)
3600	{
3601	  if ((pbi->flags & PROP_DEATH_NOTES)
3602	      && ! find_regno_note (insn, REG_DEAD, regno_first))
3603	    REG_NOTES (insn)
3604	      = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3605
3606	  if (pbi->flags & PROP_REG_INFO)
3607	    REG_N_DEATHS (regno_first)++;
3608	}
3609      else
3610	{
3611	  /* Don't make a REG_DEAD note for a part of a register
3612	     that is set in the insn.  */
3613	  for (i = regno_first; i <= regno_last; ++i)
3614	    if (! REGNO_REG_SET_P (pbi->reg_live, i)
3615		&& ! dead_or_set_regno_p (insn, i))
3616	      REG_NOTES (insn)
3617		= alloc_EXPR_LIST (REG_DEAD,
3618				   gen_rtx_REG (reg_raw_mode[i], i),
3619				   REG_NOTES (insn));
3620	}
3621    }
3622
3623  /* Mark the register as being live.  */
3624  for (i = regno_first; i <= regno_last; ++i)
3625    {
3626#ifdef HAVE_conditional_execution
3627      int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
3628#endif
3629
3630      SET_REGNO_REG_SET (pbi->reg_live, i);
3631
3632#ifdef HAVE_conditional_execution
3633      /* If this is a conditional use, record that fact.  If it is later
3634	 conditionally set, we'll know to kill the register.  */
3635      if (cond != NULL_RTX)
3636	{
3637	  splay_tree_node node;
3638	  struct reg_cond_life_info *rcli;
3639	  rtx ncond;
3640
3641	  if (this_was_live)
3642	    {
3643	      node = splay_tree_lookup (pbi->reg_cond_dead, i);
3644	      if (node == NULL)
3645		{
3646		  /* The register was unconditionally live previously.
3647		     No need to do anything.  */
3648		}
3649	      else
3650		{
3651		  /* The register was conditionally live previously.
3652		     Subtract the new life cond from the old death cond.  */
3653		  rcli = (struct reg_cond_life_info *) node->value;
3654		  ncond = rcli->condition;
3655		  ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3656
3657		  /* If the register is now unconditionally live,
3658		     remove the entry in the splay_tree.  */
3659		  if (ncond == const0_rtx)
3660		    splay_tree_remove (pbi->reg_cond_dead, i);
3661		  else
3662		    {
3663		      rcli->condition = ncond;
3664		      SET_REGNO_REG_SET (pbi->reg_cond_reg,
3665					 REGNO (XEXP (cond, 0)));
3666		    }
3667		}
3668	    }
3669	  else
3670	    {
3671	      /* The register was not previously live at all.  Record
3672		 the condition under which it is still dead.  */
3673	      rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3674	      rcli->condition = not_reg_cond (cond);
3675	      rcli->stores = const0_rtx;
3676	      rcli->orig_condition = const0_rtx;
3677	      splay_tree_insert (pbi->reg_cond_dead, i,
3678				 (splay_tree_value) rcli);
3679
3680	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3681	    }
3682	}
3683      else if (this_was_live)
3684	{
3685	  /* The register may have been conditionally live previously, but
3686	     is now unconditionally live.  Remove it from the conditionally
3687	     dead list, so that a conditional set won't cause us to think
3688	     it dead.  */
3689	  splay_tree_remove (pbi->reg_cond_dead, i);
3690	}
3691#endif
3692    }
3693}
3694
3695/* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3696   This is done assuming the registers needed from X are those that
3697   have 1-bits in PBI->REG_LIVE.
3698
3699   INSN is the containing instruction.  If INSN is dead, this function
3700   is not called.  */
3701
3702static void
3703mark_used_regs (pbi, x, cond, insn)
3704     struct propagate_block_info *pbi;
3705     rtx x, cond, insn;
3706{
3707  RTX_CODE code;
3708  int regno;
3709  int flags = pbi->flags;
3710
3711 retry:
3712  if (!x)
3713    return;
3714  code = GET_CODE (x);
3715  switch (code)
3716    {
3717    case LABEL_REF:
3718    case SYMBOL_REF:
3719    case CONST_INT:
3720    case CONST:
3721    case CONST_DOUBLE:
3722    case CONST_VECTOR:
3723    case PC:
3724    case ADDR_VEC:
3725    case ADDR_DIFF_VEC:
3726      return;
3727
3728#ifdef HAVE_cc0
3729    case CC0:
3730      pbi->cc0_live = 1;
3731      return;
3732#endif
3733
3734    case CLOBBER:
3735      /* If we are clobbering a MEM, mark any registers inside the address
3736	 as being used.  */
3737      if (GET_CODE (XEXP (x, 0)) == MEM)
3738	mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
3739      return;
3740
3741    case MEM:
3742      /* Don't bother watching stores to mems if this is not the
3743	 final pass.  We'll not be deleting dead stores this round.  */
3744      if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3745	{
3746	  /* Invalidate the data for the last MEM stored, but only if MEM is
3747	     something that can be stored into.  */
3748	  if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3749	      && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3750	    /* Needn't clear the memory set list.  */
3751	    ;
3752	  else
3753	    {
3754	      rtx temp = pbi->mem_set_list;
3755	      rtx prev = NULL_RTX;
3756	      rtx next;
3757
3758	      while (temp)
3759		{
3760		  next = XEXP (temp, 1);
3761		  if (anti_dependence (XEXP (temp, 0), x))
3762		    {
3763		      /* Splice temp out of the list.  */
3764		      if (prev)
3765			XEXP (prev, 1) = next;
3766		      else
3767			pbi->mem_set_list = next;
3768		      free_EXPR_LIST_node (temp);
3769		      pbi->mem_set_list_len--;
3770		    }
3771		  else
3772		    prev = temp;
3773		  temp = next;
3774		}
3775	    }
3776
3777	  /* If the memory reference had embedded side effects (autoincrement
3778	     address modes.  Then we may need to kill some entries on the
3779	     memory set list.  */
3780	  if (insn)
3781	    invalidate_mems_from_autoinc (pbi, insn);
3782	}
3783
3784#ifdef AUTO_INC_DEC
3785      if (flags & PROP_AUTOINC)
3786        find_auto_inc (pbi, x, insn);
3787#endif
3788      break;
3789
3790    case SUBREG:
3791#ifdef CLASS_CANNOT_CHANGE_MODE
3792      if (GET_CODE (SUBREG_REG (x)) == REG
3793	  && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3794	  && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
3795					 GET_MODE (SUBREG_REG (x))))
3796	REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
3797#endif
3798
3799      /* While we're here, optimize this case.  */
3800      x = SUBREG_REG (x);
3801      if (GET_CODE (x) != REG)
3802	goto retry;
3803      /* Fall through.  */
3804
3805    case REG:
3806      /* See a register other than being set => mark it as needed.  */
3807      mark_used_reg (pbi, x, cond, insn);
3808      return;
3809
3810    case SET:
3811      {
3812	rtx testreg = SET_DEST (x);
3813	int mark_dest = 0;
3814
3815	/* If storing into MEM, don't show it as being used.  But do
3816	   show the address as being used.  */
3817	if (GET_CODE (testreg) == MEM)
3818	  {
3819#ifdef AUTO_INC_DEC
3820	    if (flags & PROP_AUTOINC)
3821	      find_auto_inc (pbi, testreg, insn);
3822#endif
3823	    mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
3824	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
3825	    return;
3826	  }
3827
3828	/* Storing in STRICT_LOW_PART is like storing in a reg
3829	   in that this SET might be dead, so ignore it in TESTREG.
3830	   but in some other ways it is like using the reg.
3831
3832	   Storing in a SUBREG or a bit field is like storing the entire
3833	   register in that if the register's value is not used
3834	   then this SET is not needed.  */
3835	while (GET_CODE (testreg) == STRICT_LOW_PART
3836	       || GET_CODE (testreg) == ZERO_EXTRACT
3837	       || GET_CODE (testreg) == SIGN_EXTRACT
3838	       || GET_CODE (testreg) == SUBREG)
3839	  {
3840#ifdef CLASS_CANNOT_CHANGE_MODE
3841	    if (GET_CODE (testreg) == SUBREG
3842		&& GET_CODE (SUBREG_REG (testreg)) == REG
3843		&& REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3844		&& CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
3845					       GET_MODE (testreg)))
3846	      REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
3847#endif
3848
3849	    /* Modifying a single register in an alternate mode
3850	       does not use any of the old value.  But these other
3851	       ways of storing in a register do use the old value.  */
3852	    if (GET_CODE (testreg) == SUBREG
3853		&& !((REG_BYTES (SUBREG_REG (testreg))
3854		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD
3855		     > (REG_BYTES (testreg)
3856			+ UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3857	      ;
3858	    else
3859	      mark_dest = 1;
3860
3861	    testreg = XEXP (testreg, 0);
3862	  }
3863
3864	/* If this is a store into a register or group of registers,
3865	   recursively scan the value being stored.  */
3866
3867	if ((GET_CODE (testreg) == PARALLEL
3868	     && GET_MODE (testreg) == BLKmode)
3869	    || (GET_CODE (testreg) == REG
3870		&& (regno = REGNO (testreg),
3871		    ! (regno == FRAME_POINTER_REGNUM
3872		       && (! reload_completed || frame_pointer_needed)))
3873#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3874		&& ! (regno == HARD_FRAME_POINTER_REGNUM
3875		      && (! reload_completed || frame_pointer_needed))
3876#endif
3877#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3878		&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3879#endif
3880		))
3881	  {
3882	    if (mark_dest)
3883	      mark_used_regs (pbi, SET_DEST (x), cond, insn);
3884	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
3885	    return;
3886	  }
3887      }
3888      break;
3889
3890    case ASM_OPERANDS:
3891    case UNSPEC_VOLATILE:
3892    case TRAP_IF:
3893    case ASM_INPUT:
3894      {
3895	/* Traditional and volatile asm instructions must be considered to use
3896	   and clobber all hard registers, all pseudo-registers and all of
3897	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
3898
3899	   Consider for instance a volatile asm that changes the fpu rounding
3900	   mode.  An insn should not be moved across this even if it only uses
3901	   pseudo-regs because it might give an incorrectly rounded result.
3902
3903	   ?!? Unfortunately, marking all hard registers as live causes massive
3904	   problems for the register allocator and marking all pseudos as live
3905	   creates mountains of uninitialized variable warnings.
3906
3907	   So for now, just clear the memory set list and mark any regs
3908	   we can find in ASM_OPERANDS as used.  */
3909	if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3910	  {
3911	    free_EXPR_LIST_list (&pbi->mem_set_list);
3912	    pbi->mem_set_list_len = 0;
3913	  }
3914
3915	/* For all ASM_OPERANDS, we must traverse the vector of input operands.
3916	   We can not just fall through here since then we would be confused
3917	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3918	   traditional asms unlike their normal usage.  */
3919	if (code == ASM_OPERANDS)
3920	  {
3921	    int j;
3922
3923	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3924	      mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
3925	  }
3926	break;
3927      }
3928
3929    case COND_EXEC:
3930      if (cond != NULL_RTX)
3931	abort ();
3932
3933      mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
3934
3935      cond = COND_EXEC_TEST (x);
3936      x = COND_EXEC_CODE (x);
3937      goto retry;
3938
3939    case PHI:
3940      /* We _do_not_ want to scan operands of phi nodes.  Operands of
3941	 a phi function are evaluated only when control reaches this
3942	 block along a particular edge.  Therefore, regs that appear
3943	 as arguments to phi should not be added to the global live at
3944	 start.  */
3945      return;
3946
3947    default:
3948      break;
3949    }
3950
3951  /* Recursively scan the operands of this expression.  */
3952
3953  {
3954    const char * const fmt = GET_RTX_FORMAT (code);
3955    int i;
3956
3957    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3958      {
3959	if (fmt[i] == 'e')
3960	  {
3961	    /* Tail recursive case: save a function call level.  */
3962	    if (i == 0)
3963	      {
3964		x = XEXP (x, 0);
3965		goto retry;
3966	      }
3967	    mark_used_regs (pbi, XEXP (x, i), cond, insn);
3968	  }
3969	else if (fmt[i] == 'E')
3970	  {
3971	    int j;
3972	    for (j = 0; j < XVECLEN (x, i); j++)
3973	      mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
3974	  }
3975      }
3976  }
3977}
3978
3979#ifdef AUTO_INC_DEC
3980
3981static int
3982try_pre_increment_1 (pbi, insn)
3983     struct propagate_block_info *pbi;
3984     rtx insn;
3985{
3986  /* Find the next use of this reg.  If in same basic block,
3987     make it do pre-increment or pre-decrement if appropriate.  */
3988  rtx x = single_set (insn);
3989  HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
3990			  * INTVAL (XEXP (SET_SRC (x), 1)));
3991  int regno = REGNO (SET_DEST (x));
3992  rtx y = pbi->reg_next_use[regno];
3993  if (y != 0
3994      && SET_DEST (x) != stack_pointer_rtx
3995      && BLOCK_NUM (y) == BLOCK_NUM (insn)
3996      /* Don't do this if the reg dies, or gets set in y; a standard addressing
3997	 mode would be better.  */
3998      && ! dead_or_set_p (y, SET_DEST (x))
3999      && try_pre_increment (y, SET_DEST (x), amount))
4000    {
4001      /* We have found a suitable auto-increment and already changed
4002	 insn Y to do it.  So flush this increment instruction.  */
4003      propagate_block_delete_insn (pbi->bb, insn);
4004
4005      /* Count a reference to this reg for the increment insn we are
4006	 deleting.  When a reg is incremented, spilling it is worse,
4007	 so we want to make that less likely.  */
4008      if (regno >= FIRST_PSEUDO_REGISTER)
4009	{
4010	  REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
4011	  REG_N_SETS (regno)++;
4012	}
4013
4014      /* Flush any remembered memories depending on the value of
4015	 the incremented register.  */
4016      invalidate_mems_from_set (pbi, SET_DEST (x));
4017
4018      return 1;
4019    }
4020  return 0;
4021}
4022
4023/* Try to change INSN so that it does pre-increment or pre-decrement
4024   addressing on register REG in order to add AMOUNT to REG.
4025   AMOUNT is negative for pre-decrement.
4026   Returns 1 if the change could be made.
4027   This checks all about the validity of the result of modifying INSN.  */
4028
4029static int
4030try_pre_increment (insn, reg, amount)
4031     rtx insn, reg;
4032     HOST_WIDE_INT amount;
4033{
4034  rtx use;
4035
4036  /* Nonzero if we can try to make a pre-increment or pre-decrement.
4037     For example, addl $4,r1; movl (r1),... can become movl +(r1),...  */
4038  int pre_ok = 0;
4039  /* Nonzero if we can try to make a post-increment or post-decrement.
4040     For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4041     It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4042     supports both pre-inc and post-inc, or both pre-dec and post-dec.  */
4043  int post_ok = 0;
4044
4045  /* Nonzero if the opportunity actually requires post-inc or post-dec.  */
4046  int do_post = 0;
4047
4048  /* From the sign of increment, see which possibilities are conceivable
4049     on this target machine.  */
4050  if (HAVE_PRE_INCREMENT && amount > 0)
4051    pre_ok = 1;
4052  if (HAVE_POST_INCREMENT && amount > 0)
4053    post_ok = 1;
4054
4055  if (HAVE_PRE_DECREMENT && amount < 0)
4056    pre_ok = 1;
4057  if (HAVE_POST_DECREMENT && amount < 0)
4058    post_ok = 1;
4059
4060  if (! (pre_ok || post_ok))
4061    return 0;
4062
4063  /* It is not safe to add a side effect to a jump insn
4064     because if the incremented register is spilled and must be reloaded
4065     there would be no way to store the incremented value back in memory.  */
4066
4067  if (GET_CODE (insn) == JUMP_INSN)
4068    return 0;
4069
4070  use = 0;
4071  if (pre_ok)
4072    use = find_use_as_address (PATTERN (insn), reg, 0);
4073  if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4074    {
4075      use = find_use_as_address (PATTERN (insn), reg, -amount);
4076      do_post = 1;
4077    }
4078
4079  if (use == 0 || use == (rtx) (size_t) 1)
4080    return 0;
4081
4082  if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4083    return 0;
4084
4085  /* See if this combination of instruction and addressing mode exists.  */
4086  if (! validate_change (insn, &XEXP (use, 0),
4087			 gen_rtx_fmt_e (amount > 0
4088					? (do_post ? POST_INC : PRE_INC)
4089					: (do_post ? POST_DEC : PRE_DEC),
4090					Pmode, reg), 0))
4091    return 0;
4092
4093  /* Record that this insn now has an implicit side effect on X.  */
4094  REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4095  return 1;
4096}
4097
4098#endif /* AUTO_INC_DEC */
4099
4100/* Find the place in the rtx X where REG is used as a memory address.
4101   Return the MEM rtx that so uses it.
4102   If PLUSCONST is nonzero, search instead for a memory address equivalent to
4103   (plus REG (const_int PLUSCONST)).
4104
4105   If such an address does not appear, return 0.
4106   If REG appears more than once, or is used other than in such an address,
4107   return (rtx) 1.  */
4108
4109rtx
4110find_use_as_address (x, reg, plusconst)
4111     rtx x;
4112     rtx reg;
4113     HOST_WIDE_INT plusconst;
4114{
4115  enum rtx_code code = GET_CODE (x);
4116  const char * const fmt = GET_RTX_FORMAT (code);
4117  int i;
4118  rtx value = 0;
4119  rtx tem;
4120
4121  if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4122    return x;
4123
4124  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4125      && XEXP (XEXP (x, 0), 0) == reg
4126      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4127      && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4128    return x;
4129
4130  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4131    {
4132      /* If REG occurs inside a MEM used in a bit-field reference,
4133	 that is unacceptable.  */
4134      if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4135	return (rtx) (size_t) 1;
4136    }
4137
4138  if (x == reg)
4139    return (rtx) (size_t) 1;
4140
4141  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4142    {
4143      if (fmt[i] == 'e')
4144	{
4145	  tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4146	  if (value == 0)
4147	    value = tem;
4148	  else if (tem != 0)
4149	    return (rtx) (size_t) 1;
4150	}
4151      else if (fmt[i] == 'E')
4152	{
4153	  int j;
4154	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4155	    {
4156	      tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4157	      if (value == 0)
4158		value = tem;
4159	      else if (tem != 0)
4160		return (rtx) (size_t) 1;
4161	    }
4162	}
4163    }
4164
4165  return value;
4166}
4167
4168/* Write information about registers and basic blocks into FILE.
4169   This is part of making a debugging dump.  */
4170
4171void
4172dump_regset (r, outf)
4173     regset r;
4174     FILE *outf;
4175{
4176  int i;
4177  if (r == NULL)
4178    {
4179      fputs (" (nil)", outf);
4180      return;
4181    }
4182
4183  EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4184    {
4185      fprintf (outf, " %d", i);
4186      if (i < FIRST_PSEUDO_REGISTER)
4187	fprintf (outf, " [%s]",
4188		 reg_names[i]);
4189    });
4190}
4191
4192/* Print a human-reaable representation of R on the standard error
4193   stream.  This function is designed to be used from within the
4194   debugger.  */
4195
4196void
4197debug_regset (r)
4198     regset r;
4199{
4200  dump_regset (r, stderr);
4201  putc ('\n', stderr);
4202}
4203
4204/* Recompute register set/reference counts immediately prior to register
4205   allocation.
4206
4207   This avoids problems with set/reference counts changing to/from values
4208   which have special meanings to the register allocators.
4209
4210   Additionally, the reference counts are the primary component used by the
4211   register allocators to prioritize pseudos for allocation to hard regs.
4212   More accurate reference counts generally lead to better register allocation.
4213
4214   F is the first insn to be scanned.
4215
4216   LOOP_STEP denotes how much loop_depth should be incremented per
4217   loop nesting level in order to increase the ref count more for
4218   references in a loop.
4219
4220   It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4221   possibly other information which is used by the register allocators.  */
4222
4223void
4224recompute_reg_usage (f, loop_step)
4225     rtx f ATTRIBUTE_UNUSED;
4226     int loop_step ATTRIBUTE_UNUSED;
4227{
4228  allocate_reg_life_data ();
4229  update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
4230}
4231
4232/* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4233   blocks.  If BLOCKS is NULL, assume the universal set.  Returns a count
4234   of the number of registers that died.  */
4235
4236int
4237count_or_remove_death_notes (blocks, kill)
4238     sbitmap blocks;
4239     int kill;
4240{
4241  int i, count = 0;
4242
4243  for (i = n_basic_blocks - 1; i >= 0; --i)
4244    {
4245      basic_block bb;
4246      rtx insn;
4247
4248      if (blocks && ! TEST_BIT (blocks, i))
4249	continue;
4250
4251      bb = BASIC_BLOCK (i);
4252
4253      for (insn = bb->head;; insn = NEXT_INSN (insn))
4254	{
4255	  if (INSN_P (insn))
4256	    {
4257	      rtx *pprev = &REG_NOTES (insn);
4258	      rtx link = *pprev;
4259
4260	      while (link)
4261		{
4262		  switch (REG_NOTE_KIND (link))
4263		    {
4264		    case REG_DEAD:
4265		      if (GET_CODE (XEXP (link, 0)) == REG)
4266			{
4267			  rtx reg = XEXP (link, 0);
4268			  int n;
4269
4270			  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4271			    n = 1;
4272			  else
4273			    n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
4274			  count += n;
4275			}
4276		      /* Fall through.  */
4277
4278		    case REG_UNUSED:
4279		      if (kill)
4280			{
4281			  rtx next = XEXP (link, 1);
4282			  free_EXPR_LIST_node (link);
4283			  *pprev = link = next;
4284			  break;
4285			}
4286		      /* Fall through.  */
4287
4288		    default:
4289		      pprev = &XEXP (link, 1);
4290		      link = *pprev;
4291		      break;
4292		    }
4293		}
4294	    }
4295
4296	  if (insn == bb->end)
4297	    break;
4298	}
4299    }
4300
4301  return count;
4302}
4303/* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4304   if blocks is NULL.  */
4305
4306static void
4307clear_log_links (blocks)
4308     sbitmap blocks;
4309{
4310  rtx insn;
4311  int i;
4312
4313  if (!blocks)
4314    {
4315      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4316	if (INSN_P (insn))
4317	  free_INSN_LIST_list (&LOG_LINKS (insn));
4318    }
4319  else
4320    EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4321      {
4322	basic_block bb = BASIC_BLOCK (i);
4323
4324	for (insn = bb->head; insn != NEXT_INSN (bb->end);
4325	     insn = NEXT_INSN (insn))
4326	  if (INSN_P (insn))
4327	    free_INSN_LIST_list (&LOG_LINKS (insn));
4328      });
4329}
4330
4331/* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4332   correspond to the hard registers, if any, set in that map.  This
4333   could be done far more efficiently by having all sorts of special-cases
4334   with moving single words, but probably isn't worth the trouble.  */
4335
4336void
4337reg_set_to_hard_reg_set (to, from)
4338     HARD_REG_SET *to;
4339     bitmap from;
4340{
4341  int i;
4342
4343  EXECUTE_IF_SET_IN_BITMAP
4344    (from, 0, i,
4345     {
4346       if (i >= FIRST_PSEUDO_REGISTER)
4347	 return;
4348       SET_HARD_REG_BIT (*to, i);
4349     });
4350}
4351