flow.c revision 96489
1/* Data flow analysis for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file contains the data flow analysis pass of the compiler.  It
23   computes data flow information which tells combine_instructions
24   which insns to consider combining and controls register allocation.
25
26   Additional data flow information that is too bulky to record is
27   generated during the analysis, and is used at that time to create
28   autoincrement and autodecrement addressing.
29
30   The first step is dividing the function into basic blocks.
31   find_basic_blocks does this.  Then life_analysis determines
32   where each register is live and where it is dead.
33
34   ** find_basic_blocks **
35
36   find_basic_blocks divides the current function's rtl into basic
37   blocks and constructs the CFG.  The blocks are recorded in the
38   basic_block_info array; the CFG exists in the edge structures
39   referenced by the blocks.
40
41   find_basic_blocks also finds any unreachable loops and deletes them.
42
43   ** life_analysis **
44
45   life_analysis is called immediately after find_basic_blocks.
46   It uses the basic block information to determine where each
47   hard or pseudo register is live.
48
49   ** live-register info **
50
51   The information about where each register is live is in two parts:
52   the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54   basic_block->global_live_at_start has an element for each basic
55   block, and the element is a bit-vector with a bit for each hard or
56   pseudo register.  The bit is 1 if the register is live at the
57   beginning of the basic block.
58
59   Two types of elements can be added to an insn's REG_NOTES.
60   A REG_DEAD note is added to an insn's REG_NOTES for any register
61   that meets both of two conditions:  The value in the register is not
62   needed in subsequent insns and the insn does not replace the value in
63   the register (in the case of multi-word hard registers, the value in
64   each register must be replaced by the insn to avoid a REG_DEAD note).
65
66   In the vast majority of cases, an object in a REG_DEAD note will be
67   used somewhere in the insn.  The (rare) exception to this is if an
68   insn uses a multi-word hard register and only some of the registers are
69   needed in subsequent insns.  In that case, REG_DEAD notes will be
70   provided for those hard registers that are not subsequently needed.
71   Partial REG_DEAD notes of this type do not occur when an insn sets
72   only some of the hard registers used in such a multi-word operand;
73   omitting REG_DEAD notes for objects stored in an insn is optional and
74   the desire to do so does not justify the complexity of the partial
75   REG_DEAD notes.
76
77   REG_UNUSED notes are added for each register that is set by the insn
78   but is unused subsequently (if every register set by the insn is unused
79   and the insn does not reference memory or have some other side-effect,
80   the insn is deleted instead).  If only part of a multi-word hard
81   register is used in a subsequent insn, REG_UNUSED notes are made for
82   the parts that will not be used.
83
84   To determine which registers are live after any insn, one can
85   start from the beginning of the basic block and scan insns, noting
86   which registers are set by each insn and which die there.
87
88   ** Other actions of life_analysis **
89
90   life_analysis sets up the LOG_LINKS fields of insns because the
91   information needed to do so is readily available.
92
93   life_analysis deletes insns whose only effect is to store a value
94   that is never used.
95
96   life_analysis notices cases where a reference to a register as
97   a memory address can be combined with a preceding or following
98   incrementation or decrementation of the register.  The separate
99   instruction to increment or decrement is deleted and the address
100   is changed to a POST_INC or similar rtx.
101
102   Each time an incrementing or decrementing address is created,
103   a REG_INC element is added to the insn's REG_NOTES list.
104
105   life_analysis fills in certain vectors containing information about
106   register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107   REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109   life_analysis sets current_function_sp_is_unchanging if the function
110   doesn't modify the stack pointer.  */
111
112/* TODO:
113
114   Split out from life_analysis:
115	- local property discovery (bb->local_live, bb->local_set)
116	- global property computation
117	- log links creation
118	- pre/post modify transformation
119*/
120
121#include "config.h"
122#include "system.h"
123#include "tree.h"
124#include "rtl.h"
125#include "tm_p.h"
126#include "hard-reg-set.h"
127#include "basic-block.h"
128#include "insn-config.h"
129#include "regs.h"
130#include "flags.h"
131#include "output.h"
132#include "function.h"
133#include "except.h"
134#include "toplev.h"
135#include "recog.h"
136#include "expr.h"
137#include "ssa.h"
138#include "timevar.h"
139
140#include "obstack.h"
141#include "splay-tree.h"
142
143#define obstack_chunk_alloc xmalloc
144#define obstack_chunk_free free
145
146/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147   the stack pointer does not matter.  The value is tested only in
148   functions that have frame pointers.
149   No definition is equivalent to always zero.  */
150#ifndef EXIT_IGNORE_STACK
151#define EXIT_IGNORE_STACK 0
152#endif
153
154#ifndef HAVE_epilogue
155#define HAVE_epilogue 0
156#endif
157#ifndef HAVE_prologue
158#define HAVE_prologue 0
159#endif
160#ifndef HAVE_sibcall_epilogue
161#define HAVE_sibcall_epilogue 0
162#endif
163
164#ifndef LOCAL_REGNO
165#define LOCAL_REGNO(REGNO)  0
166#endif
167#ifndef EPILOGUE_USES
168#define EPILOGUE_USES(REGNO)  0
169#endif
170#ifndef EH_USES
171#define EH_USES(REGNO)  0
172#endif
173
174#ifdef HAVE_conditional_execution
175#ifndef REVERSE_CONDEXEC_PREDICATES_P
176#define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
177#endif
178#endif
179
180/* Nonzero if the second flow pass has completed.  */
181int flow2_completed;
182
183/* Maximum register number used in this function, plus one.  */
184
185int max_regno;
186
187/* Indexed by n, giving various register information */
188
189varray_type reg_n_info;
190
191/* Size of a regset for the current function,
192   in (1) bytes and (2) elements.  */
193
194int regset_bytes;
195int regset_size;
196
197/* Regset of regs live when calls to `setjmp'-like functions happen.  */
198/* ??? Does this exist only for the setjmp-clobbered warning message?  */
199
200regset regs_live_at_setjmp;
201
202/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
203   that have to go in the same hard reg.
204   The first two regs in the list are a pair, and the next two
205   are another pair, etc.  */
206rtx regs_may_share;
207
208/* Callback that determines if it's ok for a function to have no
209   noreturn attribute.  */
210int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
211
212/* Set of registers that may be eliminable.  These are handled specially
213   in updating regs_ever_live.  */
214
215static HARD_REG_SET elim_reg_set;
216
217/* Holds information for tracking conditional register life information.  */
218struct reg_cond_life_info
219{
220  /* A boolean expression of conditions under which a register is dead.  */
221  rtx condition;
222  /* Conditions under which a register is dead at the basic block end.  */
223  rtx orig_condition;
224
225  /* A boolean expression of conditions under which a register has been
226     stored into.  */
227  rtx stores;
228
229  /* ??? Could store mask of bytes that are dead, so that we could finally
230     track lifetimes of multi-word registers accessed via subregs.  */
231};
232
233/* For use in communicating between propagate_block and its subroutines.
234   Holds all information needed to compute life and def-use information.  */
235
236struct propagate_block_info
237{
238  /* The basic block we're considering.  */
239  basic_block bb;
240
241  /* Bit N is set if register N is conditionally or unconditionally live.  */
242  regset reg_live;
243
244  /* Bit N is set if register N is set this insn.  */
245  regset new_set;
246
247  /* Element N is the next insn that uses (hard or pseudo) register N
248     within the current basic block; or zero, if there is no such insn.  */
249  rtx *reg_next_use;
250
251  /* Contains a list of all the MEMs we are tracking for dead store
252     elimination.  */
253  rtx mem_set_list;
254
255  /* If non-null, record the set of registers set unconditionally in the
256     basic block.  */
257  regset local_set;
258
259  /* If non-null, record the set of registers set conditionally in the
260     basic block.  */
261  regset cond_local_set;
262
263#ifdef HAVE_conditional_execution
264  /* Indexed by register number, holds a reg_cond_life_info for each
265     register that is not unconditionally live or dead.  */
266  splay_tree reg_cond_dead;
267
268  /* Bit N is set if register N is in an expression in reg_cond_dead.  */
269  regset reg_cond_reg;
270#endif
271
272  /* The length of mem_set_list.  */
273  int mem_set_list_len;
274
275  /* Non-zero if the value of CC0 is live.  */
276  int cc0_live;
277
278  /* Flags controling the set of information propagate_block collects.  */
279  int flags;
280};
281
282/* Maximum length of pbi->mem_set_list before we start dropping
283   new elements on the floor.  */
284#define MAX_MEM_SET_LIST_LEN	100
285
286/* Forward declarations */
287static int verify_wide_reg_1		PARAMS ((rtx *, void *));
288static void verify_wide_reg		PARAMS ((int, basic_block));
289static void verify_local_live_at_start	PARAMS ((regset, basic_block));
290static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
291static void notice_stack_pointer_modification PARAMS ((rtx));
292static void mark_reg			PARAMS ((rtx, void *));
293static void mark_regs_live_at_end	PARAMS ((regset));
294static int set_phi_alternative_reg      PARAMS ((rtx, int, int, void *));
295static void calculate_global_regs_live	PARAMS ((sbitmap, sbitmap, int));
296static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
297static rtx propagate_block_delete_libcall PARAMS ((rtx, rtx));
298static int insn_dead_p			PARAMS ((struct propagate_block_info *,
299						 rtx, int, rtx));
300static int libcall_dead_p		PARAMS ((struct propagate_block_info *,
301						 rtx, rtx));
302static void mark_set_regs		PARAMS ((struct propagate_block_info *,
303						 rtx, rtx));
304static void mark_set_1			PARAMS ((struct propagate_block_info *,
305						 enum rtx_code, rtx, rtx,
306						 rtx, int));
307static int find_regno_partial		PARAMS ((rtx *, void *));
308
309#ifdef HAVE_conditional_execution
310static int mark_regno_cond_dead		PARAMS ((struct propagate_block_info *,
311						 int, rtx));
312static void free_reg_cond_life_info	PARAMS ((splay_tree_value));
313static int flush_reg_cond_reg_1		PARAMS ((splay_tree_node, void *));
314static void flush_reg_cond_reg		PARAMS ((struct propagate_block_info *,
315						 int));
316static rtx elim_reg_cond		PARAMS ((rtx, unsigned int));
317static rtx ior_reg_cond			PARAMS ((rtx, rtx, int));
318static rtx not_reg_cond			PARAMS ((rtx));
319static rtx and_reg_cond			PARAMS ((rtx, rtx, int));
320#endif
321#ifdef AUTO_INC_DEC
322static void attempt_auto_inc		PARAMS ((struct propagate_block_info *,
323						 rtx, rtx, rtx, rtx, rtx));
324static void find_auto_inc		PARAMS ((struct propagate_block_info *,
325						 rtx, rtx));
326static int try_pre_increment_1		PARAMS ((struct propagate_block_info *,
327						 rtx));
328static int try_pre_increment		PARAMS ((rtx, rtx, HOST_WIDE_INT));
329#endif
330static void mark_used_reg		PARAMS ((struct propagate_block_info *,
331						 rtx, rtx, rtx));
332static void mark_used_regs		PARAMS ((struct propagate_block_info *,
333						 rtx, rtx, rtx));
334void dump_flow_info			PARAMS ((FILE *));
335void debug_flow_info			PARAMS ((void));
336static void add_to_mem_set_list		PARAMS ((struct propagate_block_info *,
337						 rtx));
338static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
339						  rtx));
340static void invalidate_mems_from_set	PARAMS ((struct propagate_block_info *,
341						 rtx));
342static void delete_dead_jumptables	PARAMS ((void));
343static void clear_log_links		PARAMS ((sbitmap));
344
345
346void
347check_function_return_warnings ()
348{
349  if (warn_missing_noreturn
350      && !TREE_THIS_VOLATILE (cfun->decl)
351      && EXIT_BLOCK_PTR->pred == NULL
352      && (lang_missing_noreturn_ok_p
353	  && !lang_missing_noreturn_ok_p (cfun->decl)))
354    warning ("function might be possible candidate for attribute `noreturn'");
355
356  /* If we have a path to EXIT, then we do return.  */
357  if (TREE_THIS_VOLATILE (cfun->decl)
358      && EXIT_BLOCK_PTR->pred != NULL)
359    warning ("`noreturn' function does return");
360
361  /* If the clobber_return_insn appears in some basic block, then we
362     do reach the end without returning a value.  */
363  else if (warn_return_type
364	   && cfun->x_clobber_return_insn != NULL
365	   && EXIT_BLOCK_PTR->pred != NULL)
366    {
367      int max_uid = get_max_uid ();
368
369      /* If clobber_return_insn was excised by jump1, then renumber_insns
370	 can make max_uid smaller than the number still recorded in our rtx.
371	 That's fine, since this is a quick way of verifying that the insn
372	 is no longer in the chain.  */
373      if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
374	{
375	  /* Recompute insn->block mapping, since the initial mapping is
376	     set before we delete unreachable blocks.  */
377	  if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
378	    warning ("control reaches end of non-void function");
379	}
380    }
381}
382
383/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
384   note associated with the BLOCK.  */
385
386rtx
387first_insn_after_basic_block_note (block)
388     basic_block block;
389{
390  rtx insn;
391
392  /* Get the first instruction in the block.  */
393  insn = block->head;
394
395  if (insn == NULL_RTX)
396    return NULL_RTX;
397  if (GET_CODE (insn) == CODE_LABEL)
398    insn = NEXT_INSN (insn);
399  if (!NOTE_INSN_BASIC_BLOCK_P (insn))
400    abort ();
401
402  return NEXT_INSN (insn);
403}
404
405/* Perform data flow analysis.
406   F is the first insn of the function; FLAGS is a set of PROP_* flags
407   to be used in accumulating flow info.  */
408
409void
410life_analysis (f, file, flags)
411     rtx f;
412     FILE *file;
413     int flags;
414{
415#ifdef ELIMINABLE_REGS
416  int i;
417  static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
418#endif
419
420  /* Record which registers will be eliminated.  We use this in
421     mark_used_regs.  */
422
423  CLEAR_HARD_REG_SET (elim_reg_set);
424
425#ifdef ELIMINABLE_REGS
426  for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
427    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
428#else
429  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
430#endif
431
432  if (! optimize)
433    flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
434
435  /* The post-reload life analysis have (on a global basis) the same
436     registers live as was computed by reload itself.  elimination
437     Otherwise offsets and such may be incorrect.
438
439     Reload will make some registers as live even though they do not
440     appear in the rtl.
441
442     We don't want to create new auto-incs after reload, since they
443     are unlikely to be useful and can cause problems with shared
444     stack slots.  */
445  if (reload_completed)
446    flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
447
448  /* We want alias analysis information for local dead store elimination.  */
449  if (optimize && (flags & PROP_SCAN_DEAD_CODE))
450    init_alias_analysis ();
451
452  /* Always remove no-op moves.  Do this before other processing so
453     that we don't have to keep re-scanning them.  */
454  delete_noop_moves (f);
455  purge_all_dead_edges (false);
456
457  /* Some targets can emit simpler epilogues if they know that sp was
458     not ever modified during the function.  After reload, of course,
459     we've already emitted the epilogue so there's no sense searching.  */
460  if (! reload_completed)
461    notice_stack_pointer_modification (f);
462
463  /* Allocate and zero out data structures that will record the
464     data from lifetime analysis.  */
465  allocate_reg_life_data ();
466  allocate_bb_life_data ();
467
468  /* Find the set of registers live on function exit.  */
469  mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
470
471  /* "Update" life info from zero.  It'd be nice to begin the
472     relaxation with just the exit and noreturn blocks, but that set
473     is not immediately handy.  */
474
475  if (flags & PROP_REG_INFO)
476    memset (regs_ever_live, 0, sizeof (regs_ever_live));
477  update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
478
479  /* Clean up.  */
480  if (optimize && (flags & PROP_SCAN_DEAD_CODE))
481    end_alias_analysis ();
482
483  if (file)
484    dump_flow_info (file);
485
486  free_basic_block_vars (1);
487
488#ifdef ENABLE_CHECKING
489  {
490    rtx insn;
491
492    /* Search for any REG_LABEL notes which reference deleted labels.  */
493    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
494      {
495	rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
496
497	if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
498	  abort ();
499      }
500  }
501#endif
502  /* Removing dead insns should've made jumptables really dead.  */
503  delete_dead_jumptables ();
504}
505
506/* A subroutine of verify_wide_reg, called through for_each_rtx.
507   Search for REGNO.  If found, return 2 if it is not wider than
508   word_mode.  */
509
510static int
511verify_wide_reg_1 (px, pregno)
512     rtx *px;
513     void *pregno;
514{
515  rtx x = *px;
516  unsigned int regno = *(int *) pregno;
517
518  if (GET_CODE (x) == REG && REGNO (x) == regno)
519    {
520      if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
521	return 2;
522      return 1;
523    }
524  return 0;
525}
526
527/* A subroutine of verify_local_live_at_start.  Search through insns
528   of BB looking for register REGNO.  */
529
530static void
531verify_wide_reg (regno, bb)
532     int regno;
533     basic_block bb;
534{
535  rtx head = bb->head, end = bb->end;
536
537  while (1)
538    {
539      if (INSN_P (head))
540	{
541	  int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
542	  if (r == 1)
543	    return;
544	  if (r == 2)
545	    break;
546	}
547      if (head == end)
548	break;
549      head = NEXT_INSN (head);
550    }
551
552  if (rtl_dump_file)
553    {
554      fprintf (rtl_dump_file, "Register %d died unexpectedly.\n", regno);
555      dump_bb (bb, rtl_dump_file);
556    }
557  abort ();
558}
559
560/* A subroutine of update_life_info.  Verify that there are no untoward
561   changes in live_at_start during a local update.  */
562
563static void
564verify_local_live_at_start (new_live_at_start, bb)
565     regset new_live_at_start;
566     basic_block bb;
567{
568  if (reload_completed)
569    {
570      /* After reload, there are no pseudos, nor subregs of multi-word
571	 registers.  The regsets should exactly match.  */
572      if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
573	{
574	  if (rtl_dump_file)
575	    {
576	      fprintf (rtl_dump_file,
577		       "live_at_start mismatch in bb %d, aborting\nNew:\n",
578		       bb->index);
579	      debug_bitmap_file (rtl_dump_file, new_live_at_start);
580	      fputs ("Old:\n", rtl_dump_file);
581	      dump_bb (bb, rtl_dump_file);
582	    }
583	  abort ();
584	}
585    }
586  else
587    {
588      int i;
589
590      /* Find the set of changed registers.  */
591      XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
592
593      EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
594	{
595          /* No registers should die.  */
596	  if (REGNO_REG_SET_P (bb->global_live_at_start, i))
597	    {
598	      if (rtl_dump_file)
599		{
600		  fprintf (rtl_dump_file,
601			   "Register %d died unexpectedly.\n", i);
602		  dump_bb (bb, rtl_dump_file);
603		}
604	      abort ();
605	    }
606
607          /* Verify that the now-live register is wider than word_mode.  */
608	  verify_wide_reg (i, bb);
609	});
610    }
611}
612
613/* Updates life information starting with the basic blocks set in BLOCKS.
614   If BLOCKS is null, consider it to be the universal set.
615
616   If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
617   we are only expecting local modifications to basic blocks.  If we find
618   extra registers live at the beginning of a block, then we either killed
619   useful data, or we have a broken split that wants data not provided.
620   If we find registers removed from live_at_start, that means we have
621   a broken peephole that is killing a register it shouldn't.
622
623   ??? This is not true in one situation -- when a pre-reload splitter
624   generates subregs of a multi-word pseudo, current life analysis will
625   lose the kill.  So we _can_ have a pseudo go live.  How irritating.
626
627   Including PROP_REG_INFO does not properly refresh regs_ever_live
628   unless the caller resets it to zero.  */
629
630void
631update_life_info (blocks, extent, prop_flags)
632     sbitmap blocks;
633     enum update_life_extent extent;
634     int prop_flags;
635{
636  regset tmp;
637  regset_head tmp_head;
638  int i;
639  int stabilized_prop_flags = prop_flags;
640
641  tmp = INITIALIZE_REG_SET (tmp_head);
642
643  timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
644		? TV_LIFE_UPDATE : TV_LIFE);
645
646  /* Changes to the CFG are only allowed when
647     doing a global update for the entire CFG.  */
648  if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
649      && (extent == UPDATE_LIFE_LOCAL || blocks))
650    abort ();
651
652  /* Clear log links in case we are asked to (re)compute them.  */
653  if (prop_flags & PROP_LOG_LINKS)
654    clear_log_links (blocks);
655
656  /* For a global update, we go through the relaxation process again.  */
657  if (extent != UPDATE_LIFE_LOCAL)
658    {
659      for ( ; ; )
660	{
661	  int changed = 0;
662
663	  calculate_global_regs_live (blocks, blocks,
664				prop_flags & (PROP_SCAN_DEAD_CODE
665					      | PROP_ALLOW_CFG_CHANGES));
666
667	  if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
668	      != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
669	    break;
670
671	  /* Removing dead code may allow the CFG to be simplified which
672	     in turn may allow for further dead code detection / removal.  */
673	  for (i = n_basic_blocks - 1; i >= 0; --i)
674	    {
675	      basic_block bb = BASIC_BLOCK (i);
676
677	      COPY_REG_SET (tmp, bb->global_live_at_end);
678	      changed |= propagate_block (bb, tmp, NULL, NULL,
679				prop_flags & (PROP_SCAN_DEAD_CODE
680					      | PROP_KILL_DEAD_CODE));
681	    }
682
683	  /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
684	     subsequent propagate_block calls, since removing or acting as
685	     removing dead code can affect global register liveness, which
686	     is supposed to be finalized for this call after this loop.  */
687	  stabilized_prop_flags
688	    &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
689
690	  if (! changed)
691	    break;
692
693	  /* We repeat regardless of what cleanup_cfg says.  If there were
694	     instructions deleted above, that might have been only a
695	     partial improvement (see MAX_MEM_SET_LIST_LEN usage).
696	     Further improvement may be possible.  */
697	  cleanup_cfg (CLEANUP_EXPENSIVE);
698	}
699
700      /* If asked, remove notes from the blocks we'll update.  */
701      if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
702	count_or_remove_death_notes (blocks, 1);
703    }
704
705  if (blocks)
706    {
707      EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
708	{
709	  basic_block bb = BASIC_BLOCK (i);
710
711	  COPY_REG_SET (tmp, bb->global_live_at_end);
712	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
713
714	  if (extent == UPDATE_LIFE_LOCAL)
715	    verify_local_live_at_start (tmp, bb);
716	});
717    }
718  else
719    {
720      for (i = n_basic_blocks - 1; i >= 0; --i)
721	{
722	  basic_block bb = BASIC_BLOCK (i);
723
724	  COPY_REG_SET (tmp, bb->global_live_at_end);
725
726	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
727
728	  if (extent == UPDATE_LIFE_LOCAL)
729	    verify_local_live_at_start (tmp, bb);
730	}
731    }
732
733  FREE_REG_SET (tmp);
734
735  if (prop_flags & PROP_REG_INFO)
736    {
737      /* The only pseudos that are live at the beginning of the function
738	 are those that were not set anywhere in the function.  local-alloc
739	 doesn't know how to handle these correctly, so mark them as not
740	 local to any one basic block.  */
741      EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
742				 FIRST_PSEUDO_REGISTER, i,
743				 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
744
745      /* We have a problem with any pseudoreg that lives across the setjmp.
746	 ANSI says that if a user variable does not change in value between
747	 the setjmp and the longjmp, then the longjmp preserves it.  This
748	 includes longjmp from a place where the pseudo appears dead.
749	 (In principle, the value still exists if it is in scope.)
750	 If the pseudo goes in a hard reg, some other value may occupy
751	 that hard reg where this pseudo is dead, thus clobbering the pseudo.
752	 Conclusion: such a pseudo must not go in a hard reg.  */
753      EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
754				 FIRST_PSEUDO_REGISTER, i,
755				 {
756				   if (regno_reg_rtx[i] != 0)
757				     {
758				       REG_LIVE_LENGTH (i) = -1;
759				       REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
760				     }
761				 });
762    }
763  timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
764	       ? TV_LIFE_UPDATE : TV_LIFE);
765}
766
767/* Free the variables allocated by find_basic_blocks.
768
769   KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed.  */
770
771void
772free_basic_block_vars (keep_head_end_p)
773     int keep_head_end_p;
774{
775  if (! keep_head_end_p)
776    {
777      if (basic_block_info)
778	{
779	  clear_edges ();
780	  VARRAY_FREE (basic_block_info);
781	}
782      n_basic_blocks = 0;
783
784      ENTRY_BLOCK_PTR->aux = NULL;
785      ENTRY_BLOCK_PTR->global_live_at_end = NULL;
786      EXIT_BLOCK_PTR->aux = NULL;
787      EXIT_BLOCK_PTR->global_live_at_start = NULL;
788    }
789}
790
791/* Delete any insns that copy a register to itself.  */
792
793void
794delete_noop_moves (f)
795     rtx f ATTRIBUTE_UNUSED;
796{
797  int i;
798  rtx insn, next;
799  basic_block bb;
800
801  for (i = 0; i < n_basic_blocks; i++)
802    {
803      bb = BASIC_BLOCK (i);
804      for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
805	{
806	  next = NEXT_INSN (insn);
807	  if (INSN_P (insn) && noop_move_p (insn))
808	    {
809	      rtx note;
810
811	      /* If we're about to remove the first insn of a libcall
812		 then move the libcall note to the next real insn and
813		 update the retval note.  */
814	      if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
815		       && XEXP (note, 0) != insn)
816		{
817		  rtx new_libcall_insn = next_real_insn (insn);
818		  rtx retval_note = find_reg_note (XEXP (note, 0),
819						   REG_RETVAL, NULL_RTX);
820		  REG_NOTES (new_libcall_insn)
821		    = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
822					 REG_NOTES (new_libcall_insn));
823		  XEXP (retval_note, 0) = new_libcall_insn;
824		}
825
826	      /* Do not call delete_insn here since that may change
827	         the basic block boundaries which upsets some callers.  */
828	      PUT_CODE (insn, NOTE);
829	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
830	      NOTE_SOURCE_FILE (insn) = 0;
831	    }
832	}
833    }
834}
835
836/* Delete any jump tables never referenced.  We can't delete them at the
837   time of removing tablejump insn as they are referenced by the preceding
838   insns computing the destination, so we delay deleting and garbagecollect
839   them once life information is computed.  */
840static void
841delete_dead_jumptables ()
842{
843  rtx insn, next;
844  for (insn = get_insns (); insn; insn = next)
845    {
846      next = NEXT_INSN (insn);
847      if (GET_CODE (insn) == CODE_LABEL
848	  && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
849	  && GET_CODE (next) == JUMP_INSN
850	  && (GET_CODE (PATTERN (next)) == ADDR_VEC
851	      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
852	{
853	  if (rtl_dump_file)
854	    fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
855	  delete_insn (NEXT_INSN (insn));
856	  delete_insn (insn);
857	  next = NEXT_INSN (next);
858	}
859    }
860}
861
862/* Determine if the stack pointer is constant over the life of the function.
863   Only useful before prologues have been emitted.  */
864
865static void
866notice_stack_pointer_modification_1 (x, pat, data)
867     rtx x;
868     rtx pat ATTRIBUTE_UNUSED;
869     void *data ATTRIBUTE_UNUSED;
870{
871  if (x == stack_pointer_rtx
872      /* The stack pointer is only modified indirectly as the result
873	 of a push until later in flow.  See the comments in rtl.texi
874	 regarding Embedded Side-Effects on Addresses.  */
875      || (GET_CODE (x) == MEM
876	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
877	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
878    current_function_sp_is_unchanging = 0;
879}
880
881static void
882notice_stack_pointer_modification (f)
883     rtx f;
884{
885  rtx insn;
886
887  /* Assume that the stack pointer is unchanging if alloca hasn't
888     been used.  */
889  current_function_sp_is_unchanging = !current_function_calls_alloca;
890  if (! current_function_sp_is_unchanging)
891    return;
892
893  for (insn = f; insn; insn = NEXT_INSN (insn))
894    {
895      if (INSN_P (insn))
896	{
897	  /* Check if insn modifies the stack pointer.  */
898	  note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
899		       NULL);
900	  if (! current_function_sp_is_unchanging)
901	    return;
902	}
903    }
904}
905
906/* Mark a register in SET.  Hard registers in large modes get all
907   of their component registers set as well.  */
908
909static void
910mark_reg (reg, xset)
911     rtx reg;
912     void *xset;
913{
914  regset set = (regset) xset;
915  int regno = REGNO (reg);
916
917  if (GET_MODE (reg) == BLKmode)
918    abort ();
919
920  SET_REGNO_REG_SET (set, regno);
921  if (regno < FIRST_PSEUDO_REGISTER)
922    {
923      int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
924      while (--n > 0)
925	SET_REGNO_REG_SET (set, regno + n);
926    }
927}
928
929/* Mark those regs which are needed at the end of the function as live
930   at the end of the last basic block.  */
931
932static void
933mark_regs_live_at_end (set)
934     regset set;
935{
936  unsigned int i;
937
938  /* If exiting needs the right stack value, consider the stack pointer
939     live at the end of the function.  */
940  if ((HAVE_epilogue && reload_completed)
941      || ! EXIT_IGNORE_STACK
942      || (! FRAME_POINTER_REQUIRED
943	  && ! current_function_calls_alloca
944	  && flag_omit_frame_pointer)
945      || current_function_sp_is_unchanging)
946    {
947      SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
948    }
949
950  /* Mark the frame pointer if needed at the end of the function.  If
951     we end up eliminating it, it will be removed from the live list
952     of each basic block by reload.  */
953
954  if (! reload_completed || frame_pointer_needed)
955    {
956      SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
957#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
958      /* If they are different, also mark the hard frame pointer as live.  */
959      if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
960        SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
961#endif
962    }
963
964#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
965  /* Many architectures have a GP register even without flag_pic.
966     Assume the pic register is not in use, or will be handled by
967     other means, if it is not fixed.  */
968  if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
969      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
970    SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
971#endif
972
973  /* Mark all global registers, and all registers used by the epilogue
974     as being live at the end of the function since they may be
975     referenced by our caller.  */
976  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
977    if (global_regs[i] || EPILOGUE_USES (i))
978      SET_REGNO_REG_SET (set, i);
979
980  if (HAVE_epilogue && reload_completed)
981    {
982      /* Mark all call-saved registers that we actually used.  */
983      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
984	if (regs_ever_live[i] && ! LOCAL_REGNO (i)
985	    && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
986	  SET_REGNO_REG_SET (set, i);
987    }
988
989#ifdef EH_RETURN_DATA_REGNO
990  /* Mark the registers that will contain data for the handler.  */
991  if (reload_completed && current_function_calls_eh_return)
992    for (i = 0; ; ++i)
993      {
994	unsigned regno = EH_RETURN_DATA_REGNO(i);
995	if (regno == INVALID_REGNUM)
996	  break;
997	SET_REGNO_REG_SET (set, regno);
998      }
999#endif
1000#ifdef EH_RETURN_STACKADJ_RTX
1001  if ((! HAVE_epilogue || ! reload_completed)
1002      && current_function_calls_eh_return)
1003    {
1004      rtx tmp = EH_RETURN_STACKADJ_RTX;
1005      if (tmp && REG_P (tmp))
1006	mark_reg (tmp, set);
1007    }
1008#endif
1009#ifdef EH_RETURN_HANDLER_RTX
1010  if ((! HAVE_epilogue || ! reload_completed)
1011      && current_function_calls_eh_return)
1012    {
1013      rtx tmp = EH_RETURN_HANDLER_RTX;
1014      if (tmp && REG_P (tmp))
1015	mark_reg (tmp, set);
1016    }
1017#endif
1018
1019  /* Mark function return value.  */
1020  diddle_return_value (mark_reg, set);
1021}
1022
1023/* Callback function for for_each_successor_phi.  DATA is a regset.
1024   Sets the SRC_REGNO, the regno of the phi alternative for phi node
1025   INSN, in the regset.  */
1026
1027static int
1028set_phi_alternative_reg (insn, dest_regno, src_regno, data)
1029     rtx insn ATTRIBUTE_UNUSED;
1030     int dest_regno ATTRIBUTE_UNUSED;
1031     int src_regno;
1032     void *data;
1033{
1034  regset live = (regset) data;
1035  SET_REGNO_REG_SET (live, src_regno);
1036  return 0;
1037}
1038
1039/* Propagate global life info around the graph of basic blocks.  Begin
1040   considering blocks with their corresponding bit set in BLOCKS_IN.
1041   If BLOCKS_IN is null, consider it the universal set.
1042
1043   BLOCKS_OUT is set for every block that was changed.  */
1044
1045static void
1046calculate_global_regs_live (blocks_in, blocks_out, flags)
1047     sbitmap blocks_in, blocks_out;
1048     int flags;
1049{
1050  basic_block *queue, *qhead, *qtail, *qend;
1051  regset tmp, new_live_at_end, call_used;
1052  regset_head tmp_head, call_used_head;
1053  regset_head new_live_at_end_head;
1054  int i;
1055
1056  tmp = INITIALIZE_REG_SET (tmp_head);
1057  new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
1058  call_used = INITIALIZE_REG_SET (call_used_head);
1059
1060  /* Inconveniently, this is only readily available in hard reg set form.  */
1061  for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1062    if (call_used_regs[i])
1063      SET_REGNO_REG_SET (call_used, i);
1064
1065  /* Create a worklist.  Allocate an extra slot for ENTRY_BLOCK, and one
1066     because the `head == tail' style test for an empty queue doesn't
1067     work with a full queue.  */
1068  queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
1069  qtail = queue;
1070  qhead = qend = queue + n_basic_blocks + 2;
1071
1072  /* Queue the blocks set in the initial mask.  Do this in reverse block
1073     number order so that we are more likely for the first round to do
1074     useful work.  We use AUX non-null to flag that the block is queued.  */
1075  if (blocks_in)
1076    {
1077      /* Clear out the garbage that might be hanging out in bb->aux.  */
1078      for (i = n_basic_blocks - 1; i >= 0; --i)
1079	BASIC_BLOCK (i)->aux = NULL;
1080
1081      EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
1082	{
1083	  basic_block bb = BASIC_BLOCK (i);
1084	  *--qhead = bb;
1085	  bb->aux = bb;
1086	});
1087    }
1088  else
1089    {
1090      for (i = 0; i < n_basic_blocks; ++i)
1091	{
1092	  basic_block bb = BASIC_BLOCK (i);
1093	  *--qhead = bb;
1094	  bb->aux = bb;
1095	}
1096    }
1097
1098  /* We clean aux when we remove the initially-enqueued bbs, but we
1099     don't enqueue ENTRY and EXIT initially, so clean them upfront and
1100     unconditionally.  */
1101  ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;
1102
1103  if (blocks_out)
1104    sbitmap_zero (blocks_out);
1105
1106  /* We work through the queue until there are no more blocks.  What
1107     is live at the end of this block is precisely the union of what
1108     is live at the beginning of all its successors.  So, we set its
1109     GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1110     for its successors.  Then, we compute GLOBAL_LIVE_AT_START for
1111     this block by walking through the instructions in this block in
1112     reverse order and updating as we go.  If that changed
1113     GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1114     queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1115
1116     We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1117     never shrinks.  If a register appears in GLOBAL_LIVE_AT_START, it
1118     must either be live at the end of the block, or used within the
1119     block.  In the latter case, it will certainly never disappear
1120     from GLOBAL_LIVE_AT_START.  In the former case, the register
1121     could go away only if it disappeared from GLOBAL_LIVE_AT_START
1122     for one of the successor blocks.  By induction, that cannot
1123     occur.  */
1124  while (qhead != qtail)
1125    {
1126      int rescan, changed;
1127      basic_block bb;
1128      edge e;
1129
1130      bb = *qhead++;
1131      if (qhead == qend)
1132	qhead = queue;
1133      bb->aux = NULL;
1134
1135      /* Begin by propagating live_at_start from the successor blocks.  */
1136      CLEAR_REG_SET (new_live_at_end);
1137
1138      if (bb->succ)
1139	for (e = bb->succ; e; e = e->succ_next)
1140	  {
1141	    basic_block sb = e->dest;
1142
1143	    /* Call-clobbered registers die across exception and
1144	       call edges.  */
1145	    /* ??? Abnormal call edges ignored for the moment, as this gets
1146	       confused by sibling call edges, which crashes reg-stack.  */
1147	    if (e->flags & EDGE_EH)
1148	      {
1149		bitmap_operation (tmp, sb->global_live_at_start,
1150				  call_used, BITMAP_AND_COMPL);
1151		IOR_REG_SET (new_live_at_end, tmp);
1152	      }
1153	    else
1154	      IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
1155
1156	    /* If a target saves one register in another (instead of on
1157	       the stack) the save register will need to be live for EH.  */
1158	    if (e->flags & EDGE_EH)
1159	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1160		if (EH_USES (i))
1161		  SET_REGNO_REG_SET (new_live_at_end, i);
1162	  }
1163      else
1164	{
1165	  /* This might be a noreturn function that throws.  And
1166	     even if it isn't, getting the unwind info right helps
1167	     debugging.  */
1168	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1169	    if (EH_USES (i))
1170	      SET_REGNO_REG_SET (new_live_at_end, i);
1171	}
1172
1173      /* The all-important stack pointer must always be live.  */
1174      SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1175
1176      /* Before reload, there are a few registers that must be forced
1177	 live everywhere -- which might not already be the case for
1178	 blocks within infinite loops.  */
1179      if (! reload_completed)
1180	{
1181	  /* Any reference to any pseudo before reload is a potential
1182	     reference of the frame pointer.  */
1183	  SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1184
1185#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1186	  /* Pseudos with argument area equivalences may require
1187	     reloading via the argument pointer.  */
1188	  if (fixed_regs[ARG_POINTER_REGNUM])
1189	    SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1190#endif
1191
1192	  /* Any constant, or pseudo with constant equivalences, may
1193	     require reloading from memory using the pic register.  */
1194	  if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1195	      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1196	    SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1197	}
1198
1199      /* Regs used in phi nodes are not included in
1200	 global_live_at_start, since they are live only along a
1201	 particular edge.  Set those regs that are live because of a
1202	 phi node alternative corresponding to this particular block.  */
1203      if (in_ssa_form)
1204	for_each_successor_phi (bb, &set_phi_alternative_reg,
1205				new_live_at_end);
1206
1207      if (bb == ENTRY_BLOCK_PTR)
1208	{
1209	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1210	  continue;
1211	}
1212
1213      /* On our first pass through this block, we'll go ahead and continue.
1214	 Recognize first pass by local_set NULL.  On subsequent passes, we
1215	 get to skip out early if live_at_end wouldn't have changed.  */
1216
1217      if (bb->local_set == NULL)
1218	{
1219	  bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1220	  bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1221	  rescan = 1;
1222	}
1223      else
1224	{
1225	  /* If any bits were removed from live_at_end, we'll have to
1226	     rescan the block.  This wouldn't be necessary if we had
1227	     precalculated local_live, however with PROP_SCAN_DEAD_CODE
1228	     local_live is really dependent on live_at_end.  */
1229	  CLEAR_REG_SET (tmp);
1230	  rescan = bitmap_operation (tmp, bb->global_live_at_end,
1231				     new_live_at_end, BITMAP_AND_COMPL);
1232
1233	  if (! rescan)
1234	    {
1235	      /* If any of the registers in the new live_at_end set are
1236		 conditionally set in this basic block, we must rescan.
1237	         This is because conditional lifetimes at the end of the
1238		 block do not just take the live_at_end set into account,
1239		 but also the liveness at the start of each successor
1240		 block.  We can miss changes in those sets if we only
1241		 compare the new live_at_end against the previous one.  */
1242	      CLEAR_REG_SET (tmp);
1243	      rescan = bitmap_operation (tmp, new_live_at_end,
1244					 bb->cond_local_set, BITMAP_AND);
1245	    }
1246
1247	  if (! rescan)
1248	    {
1249	      /* Find the set of changed bits.  Take this opportunity
1250		 to notice that this set is empty and early out.  */
1251	      CLEAR_REG_SET (tmp);
1252	      changed = bitmap_operation (tmp, bb->global_live_at_end,
1253					  new_live_at_end, BITMAP_XOR);
1254	      if (! changed)
1255		continue;
1256
1257	      /* If any of the changed bits overlap with local_set,
1258		 we'll have to rescan the block.  Detect overlap by
1259		 the AND with ~local_set turning off bits.  */
1260	      rescan = bitmap_operation (tmp, tmp, bb->local_set,
1261					 BITMAP_AND_COMPL);
1262	    }
1263	}
1264
1265      /* Let our caller know that BB changed enough to require its
1266	 death notes updated.  */
1267      if (blocks_out)
1268	SET_BIT (blocks_out, bb->index);
1269
1270      if (! rescan)
1271	{
1272	  /* Add to live_at_start the set of all registers in
1273	     new_live_at_end that aren't in the old live_at_end.  */
1274
1275	  bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
1276			    BITMAP_AND_COMPL);
1277	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1278
1279	  changed = bitmap_operation (bb->global_live_at_start,
1280				      bb->global_live_at_start,
1281				      tmp, BITMAP_IOR);
1282	  if (! changed)
1283	    continue;
1284	}
1285      else
1286	{
1287	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1288
1289	  /* Rescan the block insn by insn to turn (a copy of) live_at_end
1290	     into live_at_start.  */
1291	  propagate_block (bb, new_live_at_end, bb->local_set,
1292			   bb->cond_local_set, flags);
1293
1294	  /* If live_at start didn't change, no need to go farther.  */
1295	  if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
1296	    continue;
1297
1298	  COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
1299	}
1300
1301      /* Queue all predecessors of BB so that we may re-examine
1302	 their live_at_end.  */
1303      for (e = bb->pred; e; e = e->pred_next)
1304	{
1305	  basic_block pb = e->src;
1306	  if (pb->aux == NULL)
1307	    {
1308	      *qtail++ = pb;
1309	      if (qtail == qend)
1310		qtail = queue;
1311	      pb->aux = pb;
1312	    }
1313	}
1314    }
1315
1316  FREE_REG_SET (tmp);
1317  FREE_REG_SET (new_live_at_end);
1318  FREE_REG_SET (call_used);
1319
1320  if (blocks_out)
1321    {
1322      EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
1323	{
1324	  basic_block bb = BASIC_BLOCK (i);
1325	  FREE_REG_SET (bb->local_set);
1326	  FREE_REG_SET (bb->cond_local_set);
1327	});
1328    }
1329  else
1330    {
1331      for (i = n_basic_blocks - 1; i >= 0; --i)
1332	{
1333	  basic_block bb = BASIC_BLOCK (i);
1334	  FREE_REG_SET (bb->local_set);
1335	  FREE_REG_SET (bb->cond_local_set);
1336	}
1337    }
1338
1339  free (queue);
1340}
1341
1342
1343/* This structure is used to pass parameters to an from the
1344   the function find_regno_partial(). It is used to pass in the
1345   register number we are looking, as well as to return any rtx
1346   we find.  */
1347
1348typedef struct {
1349  unsigned regno_to_find;
1350  rtx retval;
1351} find_regno_partial_param;
1352
1353
1354/* Find the rtx for the reg numbers specified in 'data' if it is
1355   part of an expression which only uses part of the register.  Return
1356   it in the structure passed in.  */
1357static int
1358find_regno_partial (ptr, data)
1359     rtx *ptr;
1360     void *data;
1361{
1362  find_regno_partial_param *param = (find_regno_partial_param *)data;
1363  unsigned reg = param->regno_to_find;
1364  param->retval = NULL_RTX;
1365
1366  if (*ptr == NULL_RTX)
1367    return 0;
1368
1369  switch (GET_CODE (*ptr))
1370    {
1371    case ZERO_EXTRACT:
1372    case SIGN_EXTRACT:
1373    case STRICT_LOW_PART:
1374      if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg)
1375	{
1376	  param->retval = XEXP (*ptr, 0);
1377	  return 1;
1378	}
1379      break;
1380
1381    case SUBREG:
1382      if (GET_CODE (SUBREG_REG (*ptr)) == REG
1383	  && REGNO (SUBREG_REG (*ptr)) == reg)
1384	{
1385	  param->retval = SUBREG_REG (*ptr);
1386	  return 1;
1387	}
1388      break;
1389
1390    default:
1391      break;
1392    }
1393
1394  return 0;
1395}
1396
1397/* Process all immediate successors of the entry block looking for pseudo
1398   registers which are live on entry. Find all of those whose first
1399   instance is a partial register reference of some kind, and initialize
1400   them to 0 after the entry block.  This will prevent bit sets within
1401   registers whose value is unknown, and may contain some kind of sticky
1402   bits we don't want.  */
1403
1404int
1405initialize_uninitialized_subregs ()
1406{
1407  rtx insn;
1408  edge e;
1409  int reg, did_something = 0;
1410  find_regno_partial_param param;
1411
1412  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1413    {
1414      basic_block bb = e->dest;
1415      regset map = bb->global_live_at_start;
1416      EXECUTE_IF_SET_IN_REG_SET (map,
1417				 FIRST_PSEUDO_REGISTER, reg,
1418	{
1419	  int uid = REGNO_FIRST_UID (reg);
1420	  rtx i;
1421
1422	  /* Find an insn which mentions the register we are looking for.
1423	     Its preferable to have an instance of the register's rtl since
1424	     there may be various flags set which we need to duplicate.
1425	     If we can't find it, its probably an automatic whose initial
1426	     value doesn't matter, or hopefully something we don't care about.  */
1427	  for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1428	    ;
1429	  if (i != NULL_RTX)
1430	    {
1431	      /* Found the insn, now get the REG rtx, if we can.  */
1432	      param.regno_to_find = reg;
1433	      for_each_rtx (&i, find_regno_partial, &param);
1434	      if (param.retval != NULL_RTX)
1435		{
1436		  insn = gen_move_insn (param.retval,
1437				        CONST0_RTX (GET_MODE (param.retval)));
1438		  insert_insn_on_edge (insn, e);
1439		  did_something = 1;
1440		}
1441	    }
1442	});
1443    }
1444
1445  if (did_something)
1446    commit_edge_insertions ();
1447  return did_something;
1448}
1449
1450
1451/* Subroutines of life analysis.  */
1452
1453/* Allocate the permanent data structures that represent the results
1454   of life analysis.  Not static since used also for stupid life analysis.  */
1455
1456void
1457allocate_bb_life_data ()
1458{
1459  int i;
1460
1461  for (i = 0; i < n_basic_blocks; i++)
1462    {
1463      basic_block bb = BASIC_BLOCK (i);
1464
1465      bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1466      bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1467    }
1468
1469  ENTRY_BLOCK_PTR->global_live_at_end
1470    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1471  EXIT_BLOCK_PTR->global_live_at_start
1472    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1473
1474  regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1475}
1476
1477void
1478allocate_reg_life_data ()
1479{
1480  int i;
1481
1482  max_regno = max_reg_num ();
1483
1484  /* Recalculate the register space, in case it has grown.  Old style
1485     vector oriented regsets would set regset_{size,bytes} here also.  */
1486  allocate_reg_info (max_regno, FALSE, FALSE);
1487
1488  /* Reset all the data we'll collect in propagate_block and its
1489     subroutines.  */
1490  for (i = 0; i < max_regno; i++)
1491    {
1492      REG_N_SETS (i) = 0;
1493      REG_N_REFS (i) = 0;
1494      REG_N_DEATHS (i) = 0;
1495      REG_N_CALLS_CROSSED (i) = 0;
1496      REG_LIVE_LENGTH (i) = 0;
1497      REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1498    }
1499}
1500
1501/* Delete dead instructions for propagate_block.  */
1502
1503static void
1504propagate_block_delete_insn (bb, insn)
1505     basic_block bb;
1506     rtx insn;
1507{
1508  rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1509  bool purge = false;
1510
1511  /* If the insn referred to a label, and that label was attached to
1512     an ADDR_VEC, it's safe to delete the ADDR_VEC.  In fact, it's
1513     pretty much mandatory to delete it, because the ADDR_VEC may be
1514     referencing labels that no longer exist.
1515
1516     INSN may reference a deleted label, particularly when a jump
1517     table has been optimized into a direct jump.  There's no
1518     real good way to fix up the reference to the deleted label
1519     when the label is deleted, so we just allow it here.
1520
1521     After dead code elimination is complete, we do search for
1522     any REG_LABEL notes which reference deleted labels as a
1523     sanity check.  */
1524
1525  if (inote && GET_CODE (inote) == CODE_LABEL)
1526    {
1527      rtx label = XEXP (inote, 0);
1528      rtx next;
1529
1530      /* The label may be forced if it has been put in the constant
1531	 pool.  If that is the only use we must discard the table
1532	 jump following it, but not the label itself.  */
1533      if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1534	  && (next = next_nonnote_insn (label)) != NULL
1535	  && GET_CODE (next) == JUMP_INSN
1536	  && (GET_CODE (PATTERN (next)) == ADDR_VEC
1537	      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1538	{
1539	  rtx pat = PATTERN (next);
1540	  int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1541	  int len = XVECLEN (pat, diff_vec_p);
1542	  int i;
1543
1544	  for (i = 0; i < len; i++)
1545	    LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1546
1547	  delete_insn (next);
1548	}
1549    }
1550
1551  if (bb->end == insn)
1552    purge = true;
1553  delete_insn (insn);
1554  if (purge)
1555    purge_dead_edges (bb);
1556}
1557
1558/* Delete dead libcalls for propagate_block.  Return the insn
1559   before the libcall.  */
1560
1561static rtx
1562propagate_block_delete_libcall ( insn, note)
1563     rtx insn, note;
1564{
1565  rtx first = XEXP (note, 0);
1566  rtx before = PREV_INSN (first);
1567
1568  delete_insn_chain (first, insn);
1569  return before;
1570}
1571
1572/* Update the life-status of regs for one insn.  Return the previous insn.  */
1573
1574rtx
1575propagate_one_insn (pbi, insn)
1576     struct propagate_block_info *pbi;
1577     rtx insn;
1578{
1579  rtx prev = PREV_INSN (insn);
1580  int flags = pbi->flags;
1581  int insn_is_dead = 0;
1582  int libcall_is_dead = 0;
1583  rtx note;
1584  int i;
1585
1586  if (! INSN_P (insn))
1587    return prev;
1588
1589  note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1590  if (flags & PROP_SCAN_DEAD_CODE)
1591    {
1592      insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1593      libcall_is_dead = (insn_is_dead && note != 0
1594			 && libcall_dead_p (pbi, note, insn));
1595    }
1596
1597  /* If an instruction consists of just dead store(s) on final pass,
1598     delete it.  */
1599  if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1600    {
1601      /* If we're trying to delete a prologue or epilogue instruction
1602	 that isn't flagged as possibly being dead, something is wrong.
1603	 But if we are keeping the stack pointer depressed, we might well
1604	 be deleting insns that are used to compute the amount to update
1605	 it by, so they are fine.  */
1606      if (reload_completed
1607	  && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1608		&& (TYPE_RETURNS_STACK_DEPRESSED
1609		    (TREE_TYPE (current_function_decl))))
1610	  && (((HAVE_epilogue || HAVE_prologue)
1611	       && prologue_epilogue_contains (insn))
1612	      || (HAVE_sibcall_epilogue
1613		  && sibcall_epilogue_contains (insn)))
1614	  && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1615	fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1616
1617      /* Record sets.  Do this even for dead instructions, since they
1618	 would have killed the values if they hadn't been deleted.  */
1619      mark_set_regs (pbi, PATTERN (insn), insn);
1620
1621      /* CC0 is now known to be dead.  Either this insn used it,
1622	 in which case it doesn't anymore, or clobbered it,
1623	 so the next insn can't use it.  */
1624      pbi->cc0_live = 0;
1625
1626      if (libcall_is_dead)
1627	prev = propagate_block_delete_libcall ( insn, note);
1628      else
1629	propagate_block_delete_insn (pbi->bb, insn);
1630
1631      return prev;
1632    }
1633
1634  /* See if this is an increment or decrement that can be merged into
1635     a following memory address.  */
1636#ifdef AUTO_INC_DEC
1637  {
1638    rtx x = single_set (insn);
1639
1640    /* Does this instruction increment or decrement a register?  */
1641    if ((flags & PROP_AUTOINC)
1642	&& x != 0
1643	&& GET_CODE (SET_DEST (x)) == REG
1644	&& (GET_CODE (SET_SRC (x)) == PLUS
1645	    || GET_CODE (SET_SRC (x)) == MINUS)
1646	&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
1647	&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1648	/* Ok, look for a following memory ref we can combine with.
1649	   If one is found, change the memory ref to a PRE_INC
1650	   or PRE_DEC, cancel this insn, and return 1.
1651	   Return 0 if nothing has been done.  */
1652	&& try_pre_increment_1 (pbi, insn))
1653      return prev;
1654  }
1655#endif /* AUTO_INC_DEC */
1656
1657  CLEAR_REG_SET (pbi->new_set);
1658
1659  /* If this is not the final pass, and this insn is copying the value of
1660     a library call and it's dead, don't scan the insns that perform the
1661     library call, so that the call's arguments are not marked live.  */
1662  if (libcall_is_dead)
1663    {
1664      /* Record the death of the dest reg.  */
1665      mark_set_regs (pbi, PATTERN (insn), insn);
1666
1667      insn = XEXP (note, 0);
1668      return PREV_INSN (insn);
1669    }
1670  else if (GET_CODE (PATTERN (insn)) == SET
1671	   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1672	   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1673	   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1674	   && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1675    /* We have an insn to pop a constant amount off the stack.
1676       (Such insns use PLUS regardless of the direction of the stack,
1677       and any insn to adjust the stack by a constant is always a pop.)
1678       These insns, if not dead stores, have no effect on life.  */
1679    ;
1680  else
1681    {
1682      rtx note;
1683      /* Any regs live at the time of a call instruction must not go
1684	 in a register clobbered by calls.  Find all regs now live and
1685	 record this for them.  */
1686
1687      if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
1688	EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1689				   { REG_N_CALLS_CROSSED (i)++; });
1690
1691      /* Record sets.  Do this even for dead instructions, since they
1692	 would have killed the values if they hadn't been deleted.  */
1693      mark_set_regs (pbi, PATTERN (insn), insn);
1694
1695      if (GET_CODE (insn) == CALL_INSN)
1696	{
1697	  int i;
1698	  rtx note, cond;
1699
1700	  cond = NULL_RTX;
1701	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1702	    cond = COND_EXEC_TEST (PATTERN (insn));
1703
1704	  /* Non-constant calls clobber memory.  */
1705	  if (! CONST_OR_PURE_CALL_P (insn))
1706	    {
1707	      free_EXPR_LIST_list (&pbi->mem_set_list);
1708	      pbi->mem_set_list_len = 0;
1709	    }
1710
1711	  /* There may be extra registers to be clobbered.  */
1712	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1713	       note;
1714	       note = XEXP (note, 1))
1715	    if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1716	      mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1717			  cond, insn, pbi->flags);
1718
1719	  /* Calls change all call-used and global registers.  */
1720	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1721	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1722	      {
1723		/* We do not want REG_UNUSED notes for these registers.  */
1724		mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
1725			    cond, insn,
1726			    pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1727	      }
1728	}
1729
1730      /* If an insn doesn't use CC0, it becomes dead since we assume
1731	 that every insn clobbers it.  So show it dead here;
1732	 mark_used_regs will set it live if it is referenced.  */
1733      pbi->cc0_live = 0;
1734
1735      /* Record uses.  */
1736      if (! insn_is_dead)
1737	mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1738      if ((flags & PROP_EQUAL_NOTES)
1739	  && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))
1740	      || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX))))
1741	mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn);
1742
1743      /* Sometimes we may have inserted something before INSN (such as a move)
1744	 when we make an auto-inc.  So ensure we will scan those insns.  */
1745#ifdef AUTO_INC_DEC
1746      prev = PREV_INSN (insn);
1747#endif
1748
1749      if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1750	{
1751	  int i;
1752	  rtx note, cond;
1753
1754	  cond = NULL_RTX;
1755	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1756	    cond = COND_EXEC_TEST (PATTERN (insn));
1757
1758	  /* Calls use their arguments.  */
1759	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1760	       note;
1761	       note = XEXP (note, 1))
1762	    if (GET_CODE (XEXP (note, 0)) == USE)
1763	      mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
1764			      cond, insn);
1765
1766	  /* The stack ptr is used (honorarily) by a CALL insn.  */
1767	  SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1768
1769	  /* Calls may also reference any of the global registers,
1770	     so they are made live.  */
1771	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1772	    if (global_regs[i])
1773	      mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
1774			     cond, insn);
1775	}
1776    }
1777
1778  /* On final pass, update counts of how many insns in which each reg
1779     is live.  */
1780  if (flags & PROP_REG_INFO)
1781    EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1782			       { REG_LIVE_LENGTH (i)++; });
1783
1784  return prev;
1785}
1786
1787/* Initialize a propagate_block_info struct for public consumption.
1788   Note that the structure itself is opaque to this file, but that
1789   the user can use the regsets provided here.  */
1790
1791struct propagate_block_info *
1792init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
1793     basic_block bb;
1794     regset live, local_set, cond_local_set;
1795     int flags;
1796{
1797  struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1798
1799  pbi->bb = bb;
1800  pbi->reg_live = live;
1801  pbi->mem_set_list = NULL_RTX;
1802  pbi->mem_set_list_len = 0;
1803  pbi->local_set = local_set;
1804  pbi->cond_local_set = cond_local_set;
1805  pbi->cc0_live = 0;
1806  pbi->flags = flags;
1807
1808  if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1809    pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
1810  else
1811    pbi->reg_next_use = NULL;
1812
1813  pbi->new_set = BITMAP_XMALLOC ();
1814
1815#ifdef HAVE_conditional_execution
1816  pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1817				       free_reg_cond_life_info);
1818  pbi->reg_cond_reg = BITMAP_XMALLOC ();
1819
1820  /* If this block ends in a conditional branch, for each register live
1821     from one side of the branch and not the other, record the register
1822     as conditionally dead.  */
1823  if (GET_CODE (bb->end) == JUMP_INSN
1824      && any_condjump_p (bb->end))
1825    {
1826      regset_head diff_head;
1827      regset diff = INITIALIZE_REG_SET (diff_head);
1828      basic_block bb_true, bb_false;
1829      rtx cond_true, cond_false, set_src;
1830      int i;
1831
1832      /* Identify the successor blocks.  */
1833      bb_true = bb->succ->dest;
1834      if (bb->succ->succ_next != NULL)
1835	{
1836	  bb_false = bb->succ->succ_next->dest;
1837
1838	  if (bb->succ->flags & EDGE_FALLTHRU)
1839	    {
1840	      basic_block t = bb_false;
1841	      bb_false = bb_true;
1842	      bb_true = t;
1843	    }
1844	  else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
1845	    abort ();
1846	}
1847      else
1848	{
1849	  /* This can happen with a conditional jump to the next insn.  */
1850	  if (JUMP_LABEL (bb->end) != bb_true->head)
1851	    abort ();
1852
1853	  /* Simplest way to do nothing.  */
1854	  bb_false = bb_true;
1855	}
1856
1857      /* Extract the condition from the branch.  */
1858      set_src = SET_SRC (pc_set (bb->end));
1859      cond_true = XEXP (set_src, 0);
1860      cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
1861				   GET_MODE (cond_true), XEXP (cond_true, 0),
1862				   XEXP (cond_true, 1));
1863      if (GET_CODE (XEXP (set_src, 1)) == PC)
1864	{
1865	  rtx t = cond_false;
1866	  cond_false = cond_true;
1867	  cond_true = t;
1868	}
1869
1870      /* Compute which register lead different lives in the successors.  */
1871      if (bitmap_operation (diff, bb_true->global_live_at_start,
1872			    bb_false->global_live_at_start, BITMAP_XOR))
1873	{
1874	  rtx reg = XEXP (cond_true, 0);
1875
1876	  if (GET_CODE (reg) == SUBREG)
1877	    reg = SUBREG_REG (reg);
1878
1879	  if (GET_CODE (reg) != REG)
1880	    abort ();
1881
1882	  SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
1883
1884	  /* For each such register, mark it conditionally dead.  */
1885	  EXECUTE_IF_SET_IN_REG_SET
1886	    (diff, 0, i,
1887	     {
1888	       struct reg_cond_life_info *rcli;
1889	       rtx cond;
1890
1891	       rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
1892
1893	       if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
1894		 cond = cond_false;
1895	       else
1896		 cond = cond_true;
1897	       rcli->condition = cond;
1898	       rcli->stores = const0_rtx;
1899	       rcli->orig_condition = cond;
1900
1901	       splay_tree_insert (pbi->reg_cond_dead, i,
1902				  (splay_tree_value) rcli);
1903	     });
1904	}
1905
1906      FREE_REG_SET (diff);
1907    }
1908#endif
1909
1910  /* If this block has no successors, any stores to the frame that aren't
1911     used later in the block are dead.  So make a pass over the block
1912     recording any such that are made and show them dead at the end.  We do
1913     a very conservative and simple job here.  */
1914  if (optimize
1915      && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1916	    && (TYPE_RETURNS_STACK_DEPRESSED
1917		(TREE_TYPE (current_function_decl))))
1918      && (flags & PROP_SCAN_DEAD_CODE)
1919      && (bb->succ == NULL
1920	  || (bb->succ->succ_next == NULL
1921	      && bb->succ->dest == EXIT_BLOCK_PTR
1922	      && ! current_function_calls_eh_return)))
1923    {
1924      rtx insn, set;
1925      for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
1926	if (GET_CODE (insn) == INSN
1927	    && (set = single_set (insn))
1928	    && GET_CODE (SET_DEST (set)) == MEM)
1929	  {
1930	    rtx mem = SET_DEST (set);
1931	    rtx canon_mem = canon_rtx (mem);
1932
1933	    /* This optimization is performed by faking a store to the
1934	       memory at the end of the block.  This doesn't work for
1935	       unchanging memories because multiple stores to unchanging
1936	       memory is illegal and alias analysis doesn't consider it.  */
1937	    if (RTX_UNCHANGING_P (canon_mem))
1938	      continue;
1939
1940	    if (XEXP (canon_mem, 0) == frame_pointer_rtx
1941		|| (GET_CODE (XEXP (canon_mem, 0)) == PLUS
1942		    && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
1943		    && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
1944	      add_to_mem_set_list (pbi, canon_mem);
1945	  }
1946    }
1947
1948  return pbi;
1949}
1950
1951/* Release a propagate_block_info struct.  */
1952
1953void
1954free_propagate_block_info (pbi)
1955     struct propagate_block_info *pbi;
1956{
1957  free_EXPR_LIST_list (&pbi->mem_set_list);
1958
1959  BITMAP_XFREE (pbi->new_set);
1960
1961#ifdef HAVE_conditional_execution
1962  splay_tree_delete (pbi->reg_cond_dead);
1963  BITMAP_XFREE (pbi->reg_cond_reg);
1964#endif
1965
1966  if (pbi->reg_next_use)
1967    free (pbi->reg_next_use);
1968
1969  free (pbi);
1970}
1971
1972/* Compute the registers live at the beginning of a basic block BB from
1973   those live at the end.
1974
1975   When called, REG_LIVE contains those live at the end.  On return, it
1976   contains those live at the beginning.
1977
1978   LOCAL_SET, if non-null, will be set with all registers killed
1979   unconditionally by this basic block.
1980   Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
1981   killed conditionally by this basic block.  If there is any unconditional
1982   set of a register, then the corresponding bit will be set in LOCAL_SET
1983   and cleared in COND_LOCAL_SET.
1984   It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set.  In this
1985   case, the resulting set will be equal to the union of the two sets that
1986   would otherwise be computed.
1987
1988   Return non-zero if an INSN is deleted (i.e. by dead code removal).  */
1989
1990int
1991propagate_block (bb, live, local_set, cond_local_set, flags)
1992     basic_block bb;
1993     regset live;
1994     regset local_set;
1995     regset cond_local_set;
1996     int flags;
1997{
1998  struct propagate_block_info *pbi;
1999  rtx insn, prev;
2000  int changed;
2001
2002  pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2003
2004  if (flags & PROP_REG_INFO)
2005    {
2006      int i;
2007
2008      /* Process the regs live at the end of the block.
2009	 Mark them as not local to any one basic block.  */
2010      EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
2011				 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2012    }
2013
2014  /* Scan the block an insn at a time from end to beginning.  */
2015
2016  changed = 0;
2017  for (insn = bb->end;; insn = prev)
2018    {
2019      /* If this is a call to `setjmp' et al, warn if any
2020	 non-volatile datum is live.  */
2021      if ((flags & PROP_REG_INFO)
2022	  && GET_CODE (insn) == CALL_INSN
2023	  && find_reg_note (insn, REG_SETJMP, NULL))
2024	IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2025
2026      prev = propagate_one_insn (pbi, insn);
2027      changed |= NEXT_INSN (prev) != insn;
2028
2029      if (insn == bb->head)
2030	break;
2031    }
2032
2033  free_propagate_block_info (pbi);
2034
2035  return changed;
2036}
2037
2038/* Return 1 if X (the body of an insn, or part of it) is just dead stores
2039   (SET expressions whose destinations are registers dead after the insn).
2040   NEEDED is the regset that says which regs are alive after the insn.
2041
2042   Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2043
2044   If X is the entire body of an insn, NOTES contains the reg notes
2045   pertaining to the insn.  */
2046
2047static int
2048insn_dead_p (pbi, x, call_ok, notes)
2049     struct propagate_block_info *pbi;
2050     rtx x;
2051     int call_ok;
2052     rtx notes ATTRIBUTE_UNUSED;
2053{
2054  enum rtx_code code = GET_CODE (x);
2055
2056#ifdef AUTO_INC_DEC
2057  /* As flow is invoked after combine, we must take existing AUTO_INC
2058     expressions into account.  */
2059  for (; notes; notes = XEXP (notes, 1))
2060    {
2061      if (REG_NOTE_KIND (notes) == REG_INC)
2062	{
2063	  int regno = REGNO (XEXP (notes, 0));
2064
2065	  /* Don't delete insns to set global regs.  */
2066	  if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2067	      || REGNO_REG_SET_P (pbi->reg_live, regno))
2068	    return 0;
2069	}
2070    }
2071#endif
2072
2073  /* If setting something that's a reg or part of one,
2074     see if that register's altered value will be live.  */
2075
2076  if (code == SET)
2077    {
2078      rtx r = SET_DEST (x);
2079
2080#ifdef HAVE_cc0
2081      if (GET_CODE (r) == CC0)
2082	return ! pbi->cc0_live;
2083#endif
2084
2085      /* A SET that is a subroutine call cannot be dead.  */
2086      if (GET_CODE (SET_SRC (x)) == CALL)
2087	{
2088	  if (! call_ok)
2089	    return 0;
2090	}
2091
2092      /* Don't eliminate loads from volatile memory or volatile asms.  */
2093      else if (volatile_refs_p (SET_SRC (x)))
2094	return 0;
2095
2096      if (GET_CODE (r) == MEM)
2097	{
2098	  rtx temp, canon_r;
2099
2100	  if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2101	    return 0;
2102
2103	  canon_r = canon_rtx (r);
2104
2105	  /* Walk the set of memory locations we are currently tracking
2106	     and see if one is an identical match to this memory location.
2107	     If so, this memory write is dead (remember, we're walking
2108	     backwards from the end of the block to the start).  Since
2109	     rtx_equal_p does not check the alias set or flags, we also
2110	     must have the potential for them to conflict (anti_dependence).  */
2111	  for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2112	    if (anti_dependence (r, XEXP (temp, 0)))
2113	      {
2114		rtx mem = XEXP (temp, 0);
2115
2116		if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2117		    && (GET_MODE_SIZE (GET_MODE (canon_r))
2118			<= GET_MODE_SIZE (GET_MODE (mem))))
2119		  return 1;
2120
2121#ifdef AUTO_INC_DEC
2122		/* Check if memory reference matches an auto increment. Only
2123		   post increment/decrement or modify are valid.  */
2124		if (GET_MODE (mem) == GET_MODE (r)
2125		    && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2126			|| GET_CODE (XEXP (mem, 0)) == POST_INC
2127			|| GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2128		    && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2129		    && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2130		  return 1;
2131#endif
2132	      }
2133	}
2134      else
2135	{
2136	  while (GET_CODE (r) == SUBREG
2137		 || GET_CODE (r) == STRICT_LOW_PART
2138		 || GET_CODE (r) == ZERO_EXTRACT)
2139	    r = XEXP (r, 0);
2140
2141	  if (GET_CODE (r) == REG)
2142	    {
2143	      int regno = REGNO (r);
2144
2145	      /* Obvious.  */
2146	      if (REGNO_REG_SET_P (pbi->reg_live, regno))
2147		return 0;
2148
2149	      /* If this is a hard register, verify that subsequent
2150		 words are not needed.  */
2151	      if (regno < FIRST_PSEUDO_REGISTER)
2152		{
2153		  int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2154
2155		  while (--n > 0)
2156		    if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2157		      return 0;
2158		}
2159
2160	      /* Don't delete insns to set global regs.  */
2161	      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2162		return 0;
2163
2164	      /* Make sure insns to set the stack pointer aren't deleted.  */
2165	      if (regno == STACK_POINTER_REGNUM)
2166		return 0;
2167
2168	      /* ??? These bits might be redundant with the force live bits
2169		 in calculate_global_regs_live.  We would delete from
2170		 sequential sets; whether this actually affects real code
2171		 for anything but the stack pointer I don't know.  */
2172	      /* Make sure insns to set the frame pointer aren't deleted.  */
2173	      if (regno == FRAME_POINTER_REGNUM
2174		  && (! reload_completed || frame_pointer_needed))
2175		return 0;
2176#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2177	      if (regno == HARD_FRAME_POINTER_REGNUM
2178		  && (! reload_completed || frame_pointer_needed))
2179		return 0;
2180#endif
2181
2182#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2183	      /* Make sure insns to set arg pointer are never deleted
2184		 (if the arg pointer isn't fixed, there will be a USE
2185		 for it, so we can treat it normally).  */
2186	      if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2187		return 0;
2188#endif
2189
2190	      /* Otherwise, the set is dead.  */
2191	      return 1;
2192	    }
2193	}
2194    }
2195
2196  /* If performing several activities, insn is dead if each activity
2197     is individually dead.  Also, CLOBBERs and USEs can be ignored; a
2198     CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2199     worth keeping.  */
2200  else if (code == PARALLEL)
2201    {
2202      int i = XVECLEN (x, 0);
2203
2204      for (i--; i >= 0; i--)
2205	if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2206	    && GET_CODE (XVECEXP (x, 0, i)) != USE
2207	    && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2208	  return 0;
2209
2210      return 1;
2211    }
2212
2213  /* A CLOBBER of a pseudo-register that is dead serves no purpose.  That
2214     is not necessarily true for hard registers.  */
2215  else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2216	   && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2217	   && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2218    return 1;
2219
2220  /* We do not check other CLOBBER or USE here.  An insn consisting of just
2221     a CLOBBER or just a USE should not be deleted.  */
2222  return 0;
2223}
2224
2225/* If INSN is the last insn in a libcall, and assuming INSN is dead,
2226   return 1 if the entire library call is dead.
2227   This is true if INSN copies a register (hard or pseudo)
2228   and if the hard return reg of the call insn is dead.
2229   (The caller should have tested the destination of the SET inside
2230   INSN already for death.)
2231
2232   If this insn doesn't just copy a register, then we don't
2233   have an ordinary libcall.  In that case, cse could not have
2234   managed to substitute the source for the dest later on,
2235   so we can assume the libcall is dead.
2236
2237   PBI is the block info giving pseudoregs live before this insn.
2238   NOTE is the REG_RETVAL note of the insn.  */
2239
2240static int
2241libcall_dead_p (pbi, note, insn)
2242     struct propagate_block_info *pbi;
2243     rtx note;
2244     rtx insn;
2245{
2246  rtx x = single_set (insn);
2247
2248  if (x)
2249    {
2250      rtx r = SET_SRC (x);
2251
2252      if (GET_CODE (r) == REG)
2253	{
2254	  rtx call = XEXP (note, 0);
2255	  rtx call_pat;
2256	  int i;
2257
2258	  /* Find the call insn.  */
2259	  while (call != insn && GET_CODE (call) != CALL_INSN)
2260	    call = NEXT_INSN (call);
2261
2262	  /* If there is none, do nothing special,
2263	     since ordinary death handling can understand these insns.  */
2264	  if (call == insn)
2265	    return 0;
2266
2267	  /* See if the hard reg holding the value is dead.
2268	     If this is a PARALLEL, find the call within it.  */
2269	  call_pat = PATTERN (call);
2270	  if (GET_CODE (call_pat) == PARALLEL)
2271	    {
2272	      for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2273		if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2274		    && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2275		  break;
2276
2277	      /* This may be a library call that is returning a value
2278		 via invisible pointer.  Do nothing special, since
2279		 ordinary death handling can understand these insns.  */
2280	      if (i < 0)
2281		return 0;
2282
2283	      call_pat = XVECEXP (call_pat, 0, i);
2284	    }
2285
2286	  return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
2287	}
2288    }
2289  return 1;
2290}
2291
2292/* Return 1 if register REGNO was used before it was set, i.e. if it is
2293   live at function entry.  Don't count global register variables, variables
2294   in registers that can be used for function arg passing, or variables in
2295   fixed hard registers.  */
2296
2297int
2298regno_uninitialized (regno)
2299     unsigned int regno;
2300{
2301  if (n_basic_blocks == 0
2302      || (regno < FIRST_PSEUDO_REGISTER
2303	  && (global_regs[regno]
2304	      || fixed_regs[regno]
2305	      || FUNCTION_ARG_REGNO_P (regno))))
2306    return 0;
2307
2308  return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
2309}
2310
2311/* 1 if register REGNO was alive at a place where `setjmp' was called
2312   and was set more than once or is an argument.
2313   Such regs may be clobbered by `longjmp'.  */
2314
2315int
2316regno_clobbered_at_setjmp (regno)
2317     int regno;
2318{
2319  if (n_basic_blocks == 0)
2320    return 0;
2321
2322  return ((REG_N_SETS (regno) > 1
2323	   || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
2324	  && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2325}
2326
2327/* Add MEM to PBI->MEM_SET_LIST.  MEM should be canonical.  Respect the
2328   maximal list size; look for overlaps in mode and select the largest.  */
2329static void
2330add_to_mem_set_list (pbi, mem)
2331     struct propagate_block_info *pbi;
2332     rtx mem;
2333{
2334  rtx i;
2335
2336  /* We don't know how large a BLKmode store is, so we must not
2337     take them into consideration.  */
2338  if (GET_MODE (mem) == BLKmode)
2339    return;
2340
2341  for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2342    {
2343      rtx e = XEXP (i, 0);
2344      if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2345	{
2346	  if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2347	    {
2348#ifdef AUTO_INC_DEC
2349	      /* If we must store a copy of the mem, we can just modify
2350		 the mode of the stored copy.  */
2351	      if (pbi->flags & PROP_AUTOINC)
2352	        PUT_MODE (e, GET_MODE (mem));
2353	      else
2354#endif
2355	        XEXP (i, 0) = mem;
2356	    }
2357	  return;
2358	}
2359    }
2360
2361  if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
2362    {
2363#ifdef AUTO_INC_DEC
2364      /* Store a copy of mem, otherwise the address may be
2365	 scrogged by find_auto_inc.  */
2366      if (pbi->flags & PROP_AUTOINC)
2367	mem = shallow_copy_rtx (mem);
2368#endif
2369      pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2370      pbi->mem_set_list_len++;
2371    }
2372}
2373
2374/* INSN references memory, possibly using autoincrement addressing modes.
2375   Find any entries on the mem_set_list that need to be invalidated due
2376   to an address change.  */
2377
2378static void
2379invalidate_mems_from_autoinc (pbi, insn)
2380     struct propagate_block_info *pbi;
2381     rtx insn;
2382{
2383  rtx note = REG_NOTES (insn);
2384  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2385    if (REG_NOTE_KIND (note) == REG_INC)
2386      invalidate_mems_from_set (pbi, XEXP (note, 0));
2387}
2388
2389/* EXP is a REG.  Remove any dependent entries from pbi->mem_set_list.  */
2390
2391static void
2392invalidate_mems_from_set (pbi, exp)
2393     struct propagate_block_info *pbi;
2394     rtx exp;
2395{
2396  rtx temp = pbi->mem_set_list;
2397  rtx prev = NULL_RTX;
2398  rtx next;
2399
2400  while (temp)
2401    {
2402      next = XEXP (temp, 1);
2403      if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2404	{
2405	  /* Splice this entry out of the list.  */
2406	  if (prev)
2407	    XEXP (prev, 1) = next;
2408	  else
2409	    pbi->mem_set_list = next;
2410	  free_EXPR_LIST_node (temp);
2411	  pbi->mem_set_list_len--;
2412	}
2413      else
2414	prev = temp;
2415      temp = next;
2416    }
2417}
2418
2419/* Process the registers that are set within X.  Their bits are set to
2420   1 in the regset DEAD, because they are dead prior to this insn.
2421
2422   If INSN is nonzero, it is the insn being processed.
2423
2424   FLAGS is the set of operations to perform.  */
2425
2426static void
2427mark_set_regs (pbi, x, insn)
2428     struct propagate_block_info *pbi;
2429     rtx x, insn;
2430{
2431  rtx cond = NULL_RTX;
2432  rtx link;
2433  enum rtx_code code;
2434
2435  if (insn)
2436    for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2437      {
2438	if (REG_NOTE_KIND (link) == REG_INC)
2439	  mark_set_1 (pbi, SET, XEXP (link, 0),
2440		      (GET_CODE (x) == COND_EXEC
2441		       ? COND_EXEC_TEST (x) : NULL_RTX),
2442		      insn, pbi->flags);
2443      }
2444 retry:
2445  switch (code = GET_CODE (x))
2446    {
2447    case SET:
2448    case CLOBBER:
2449      mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
2450      return;
2451
2452    case COND_EXEC:
2453      cond = COND_EXEC_TEST (x);
2454      x = COND_EXEC_CODE (x);
2455      goto retry;
2456
2457    case PARALLEL:
2458      {
2459	int i;
2460
2461	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2462	  {
2463	    rtx sub = XVECEXP (x, 0, i);
2464	    switch (code = GET_CODE (sub))
2465	      {
2466	      case COND_EXEC:
2467		if (cond != NULL_RTX)
2468		  abort ();
2469
2470		cond = COND_EXEC_TEST (sub);
2471		sub = COND_EXEC_CODE (sub);
2472		if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
2473		  break;
2474		/* Fall through.  */
2475
2476	      case SET:
2477	      case CLOBBER:
2478		mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
2479		break;
2480
2481	      default:
2482		break;
2483	      }
2484	  }
2485	break;
2486      }
2487
2488    default:
2489      break;
2490    }
2491}
2492
2493/* Process a single set, which appears in INSN.  REG (which may not
2494   actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2495   being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2496   If the set is conditional (because it appear in a COND_EXEC), COND
2497   will be the condition.  */
2498
2499static void
2500mark_set_1 (pbi, code, reg, cond, insn, flags)
2501     struct propagate_block_info *pbi;
2502     enum rtx_code code;
2503     rtx reg, cond, insn;
2504     int flags;
2505{
2506  int regno_first = -1, regno_last = -1;
2507  unsigned long not_dead = 0;
2508  int i;
2509
2510  /* Modifying just one hardware register of a multi-reg value or just a
2511     byte field of a register does not mean the value from before this insn
2512     is now dead.  Of course, if it was dead after it's unused now.  */
2513
2514  switch (GET_CODE (reg))
2515    {
2516    case PARALLEL:
2517      /* Some targets place small structures in registers for return values of
2518	 functions.  We have to detect this case specially here to get correct
2519	 flow information.  */
2520      for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2521	if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2522	  mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2523		      flags);
2524      return;
2525
2526    case ZERO_EXTRACT:
2527    case SIGN_EXTRACT:
2528    case STRICT_LOW_PART:
2529      /* ??? Assumes STRICT_LOW_PART not used on multi-word registers.  */
2530      do
2531	reg = XEXP (reg, 0);
2532      while (GET_CODE (reg) == SUBREG
2533	     || GET_CODE (reg) == ZERO_EXTRACT
2534	     || GET_CODE (reg) == SIGN_EXTRACT
2535	     || GET_CODE (reg) == STRICT_LOW_PART);
2536      if (GET_CODE (reg) == MEM)
2537	break;
2538      not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2539      /* Fall through.  */
2540
2541    case REG:
2542      regno_last = regno_first = REGNO (reg);
2543      if (regno_first < FIRST_PSEUDO_REGISTER)
2544	regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
2545      break;
2546
2547    case SUBREG:
2548      if (GET_CODE (SUBREG_REG (reg)) == REG)
2549	{
2550	  enum machine_mode outer_mode = GET_MODE (reg);
2551	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2552
2553	  /* Identify the range of registers affected.  This is moderately
2554	     tricky for hard registers.  See alter_subreg.  */
2555
2556	  regno_last = regno_first = REGNO (SUBREG_REG (reg));
2557	  if (regno_first < FIRST_PSEUDO_REGISTER)
2558	    {
2559	      regno_first += subreg_regno_offset (regno_first, inner_mode,
2560						  SUBREG_BYTE (reg),
2561						  outer_mode);
2562	      regno_last = (regno_first
2563			    + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
2564
2565	      /* Since we've just adjusted the register number ranges, make
2566		 sure REG matches.  Otherwise some_was_live will be clear
2567		 when it shouldn't have been, and we'll create incorrect
2568		 REG_UNUSED notes.  */
2569	      reg = gen_rtx_REG (outer_mode, regno_first);
2570	    }
2571	  else
2572	    {
2573	      /* If the number of words in the subreg is less than the number
2574		 of words in the full register, we have a well-defined partial
2575		 set.  Otherwise the high bits are undefined.
2576
2577		 This is only really applicable to pseudos, since we just took
2578		 care of multi-word hard registers.  */
2579	      if (((GET_MODE_SIZE (outer_mode)
2580		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2581		  < ((GET_MODE_SIZE (inner_mode)
2582		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2583		not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2584							    regno_first);
2585
2586	      reg = SUBREG_REG (reg);
2587	    }
2588	}
2589      else
2590	reg = SUBREG_REG (reg);
2591      break;
2592
2593    default:
2594      break;
2595    }
2596
2597  /* If this set is a MEM, then it kills any aliased writes.
2598     If this set is a REG, then it kills any MEMs which use the reg.  */
2599  if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2600    {
2601      if (GET_CODE (reg) == REG)
2602	invalidate_mems_from_set (pbi, reg);
2603
2604      /* If the memory reference had embedded side effects (autoincrement
2605	 address modes.  Then we may need to kill some entries on the
2606	 memory set list.  */
2607      if (insn && GET_CODE (reg) == MEM)
2608	invalidate_mems_from_autoinc (pbi, insn);
2609
2610      if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2611	  /* ??? With more effort we could track conditional memory life.  */
2612	  && ! cond
2613	  /* There are no REG_INC notes for SP, so we can't assume we'll see
2614	     everything that invalidates it.  To be safe, don't eliminate any
2615	     stores though SP; none of them should be redundant anyway.  */
2616	  && ! reg_mentioned_p (stack_pointer_rtx, reg))
2617        add_to_mem_set_list (pbi, canon_rtx (reg));
2618    }
2619
2620  if (GET_CODE (reg) == REG
2621      && ! (regno_first == FRAME_POINTER_REGNUM
2622	    && (! reload_completed || frame_pointer_needed))
2623#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2624      && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2625	    && (! reload_completed || frame_pointer_needed))
2626#endif
2627#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2628      && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2629#endif
2630      )
2631    {
2632      int some_was_live = 0, some_was_dead = 0;
2633
2634      for (i = regno_first; i <= regno_last; ++i)
2635	{
2636	  int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2637	  if (pbi->local_set)
2638	    {
2639	      /* Order of the set operation matters here since both
2640		 sets may be the same.  */
2641	      CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2642	      if (cond != NULL_RTX
2643		  && ! REGNO_REG_SET_P (pbi->local_set, i))
2644		SET_REGNO_REG_SET (pbi->cond_local_set, i);
2645	      else
2646		SET_REGNO_REG_SET (pbi->local_set, i);
2647	    }
2648	  if (code != CLOBBER)
2649	    SET_REGNO_REG_SET (pbi->new_set, i);
2650
2651	  some_was_live |= needed_regno;
2652	  some_was_dead |= ! needed_regno;
2653	}
2654
2655#ifdef HAVE_conditional_execution
2656      /* Consider conditional death in deciding that the register needs
2657	 a death note.  */
2658      if (some_was_live && ! not_dead
2659	  /* The stack pointer is never dead.  Well, not strictly true,
2660	     but it's very difficult to tell from here.  Hopefully
2661	     combine_stack_adjustments will fix up the most egregious
2662	     errors.  */
2663	  && regno_first != STACK_POINTER_REGNUM)
2664	{
2665	  for (i = regno_first; i <= regno_last; ++i)
2666	    if (! mark_regno_cond_dead (pbi, i, cond))
2667	      not_dead |= ((unsigned long) 1) << (i - regno_first);
2668	}
2669#endif
2670
2671      /* Additional data to record if this is the final pass.  */
2672      if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2673		   | PROP_DEATH_NOTES | PROP_AUTOINC))
2674	{
2675	  rtx y;
2676	  int blocknum = pbi->bb->index;
2677
2678	  y = NULL_RTX;
2679	  if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2680	    {
2681	      y = pbi->reg_next_use[regno_first];
2682
2683	      /* The next use is no longer next, since a store intervenes.  */
2684	      for (i = regno_first; i <= regno_last; ++i)
2685		pbi->reg_next_use[i] = 0;
2686	    }
2687
2688	  if (flags & PROP_REG_INFO)
2689	    {
2690	      for (i = regno_first; i <= regno_last; ++i)
2691		{
2692		  /* Count (weighted) references, stores, etc.  This counts a
2693		     register twice if it is modified, but that is correct.  */
2694		  REG_N_SETS (i) += 1;
2695		  REG_N_REFS (i) += 1;
2696		  REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2697
2698	          /* The insns where a reg is live are normally counted
2699		     elsewhere, but we want the count to include the insn
2700		     where the reg is set, and the normal counting mechanism
2701		     would not count it.  */
2702		  REG_LIVE_LENGTH (i) += 1;
2703		}
2704
2705	      /* If this is a hard reg, record this function uses the reg.  */
2706	      if (regno_first < FIRST_PSEUDO_REGISTER)
2707		{
2708		  for (i = regno_first; i <= regno_last; i++)
2709		    regs_ever_live[i] = 1;
2710		}
2711	      else
2712		{
2713		  /* Keep track of which basic blocks each reg appears in.  */
2714		  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2715		    REG_BASIC_BLOCK (regno_first) = blocknum;
2716		  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2717		    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2718		}
2719	    }
2720
2721	  if (! some_was_dead)
2722	    {
2723	      if (flags & PROP_LOG_LINKS)
2724		{
2725		  /* Make a logical link from the next following insn
2726		     that uses this register, back to this insn.
2727		     The following insns have already been processed.
2728
2729		     We don't build a LOG_LINK for hard registers containing
2730		     in ASM_OPERANDs.  If these registers get replaced,
2731		     we might wind up changing the semantics of the insn,
2732		     even if reload can make what appear to be valid
2733		     assignments later.  */
2734		  if (y && (BLOCK_NUM (y) == blocknum)
2735		      && (regno_first >= FIRST_PSEUDO_REGISTER
2736			  || asm_noperands (PATTERN (y)) < 0))
2737		    LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2738		}
2739	    }
2740	  else if (not_dead)
2741	    ;
2742	  else if (! some_was_live)
2743	    {
2744	      if (flags & PROP_REG_INFO)
2745		REG_N_DEATHS (regno_first) += 1;
2746
2747	      if (flags & PROP_DEATH_NOTES)
2748		{
2749		  /* Note that dead stores have already been deleted
2750		     when possible.  If we get here, we have found a
2751		     dead store that cannot be eliminated (because the
2752		     same insn does something useful).  Indicate this
2753		     by marking the reg being set as dying here.  */
2754		  REG_NOTES (insn)
2755		    = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2756		}
2757	    }
2758	  else
2759	    {
2760	      if (flags & PROP_DEATH_NOTES)
2761		{
2762		  /* This is a case where we have a multi-word hard register
2763		     and some, but not all, of the words of the register are
2764		     needed in subsequent insns.  Write REG_UNUSED notes
2765		     for those parts that were not needed.  This case should
2766		     be rare.  */
2767
2768		  for (i = regno_first; i <= regno_last; ++i)
2769		    if (! REGNO_REG_SET_P (pbi->reg_live, i))
2770		      REG_NOTES (insn)
2771			= alloc_EXPR_LIST (REG_UNUSED,
2772					   gen_rtx_REG (reg_raw_mode[i], i),
2773					   REG_NOTES (insn));
2774		}
2775	    }
2776	}
2777
2778      /* Mark the register as being dead.  */
2779      if (some_was_live
2780	  /* The stack pointer is never dead.  Well, not strictly true,
2781	     but it's very difficult to tell from here.  Hopefully
2782	     combine_stack_adjustments will fix up the most egregious
2783	     errors.  */
2784	  && regno_first != STACK_POINTER_REGNUM)
2785	{
2786	  for (i = regno_first; i <= regno_last; ++i)
2787	    if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2788	      CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2789	}
2790    }
2791  else if (GET_CODE (reg) == REG)
2792    {
2793      if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2794	pbi->reg_next_use[regno_first] = 0;
2795    }
2796
2797  /* If this is the last pass and this is a SCRATCH, show it will be dying
2798     here and count it.  */
2799  else if (GET_CODE (reg) == SCRATCH)
2800    {
2801      if (flags & PROP_DEATH_NOTES)
2802	REG_NOTES (insn)
2803	  = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2804    }
2805}
2806
2807#ifdef HAVE_conditional_execution
2808/* Mark REGNO conditionally dead.
2809   Return true if the register is now unconditionally dead.  */
2810
2811static int
2812mark_regno_cond_dead (pbi, regno, cond)
2813     struct propagate_block_info *pbi;
2814     int regno;
2815     rtx cond;
2816{
2817  /* If this is a store to a predicate register, the value of the
2818     predicate is changing, we don't know that the predicate as seen
2819     before is the same as that seen after.  Flush all dependent
2820     conditions from reg_cond_dead.  This will make all such
2821     conditionally live registers unconditionally live.  */
2822  if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
2823    flush_reg_cond_reg (pbi, regno);
2824
2825  /* If this is an unconditional store, remove any conditional
2826     life that may have existed.  */
2827  if (cond == NULL_RTX)
2828    splay_tree_remove (pbi->reg_cond_dead, regno);
2829  else
2830    {
2831      splay_tree_node node;
2832      struct reg_cond_life_info *rcli;
2833      rtx ncond;
2834
2835      /* Otherwise this is a conditional set.  Record that fact.
2836	 It may have been conditionally used, or there may be a
2837	 subsequent set with a complimentary condition.  */
2838
2839      node = splay_tree_lookup (pbi->reg_cond_dead, regno);
2840      if (node == NULL)
2841	{
2842	  /* The register was unconditionally live previously.
2843	     Record the current condition as the condition under
2844	     which it is dead.  */
2845	  rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
2846	  rcli->condition = cond;
2847	  rcli->stores = cond;
2848	  rcli->orig_condition = const0_rtx;
2849	  splay_tree_insert (pbi->reg_cond_dead, regno,
2850			     (splay_tree_value) rcli);
2851
2852	  SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2853
2854	  /* Not unconditionally dead.  */
2855	  return 0;
2856	}
2857      else
2858	{
2859	  /* The register was conditionally live previously.
2860	     Add the new condition to the old.  */
2861	  rcli = (struct reg_cond_life_info *) node->value;
2862	  ncond = rcli->condition;
2863	  ncond = ior_reg_cond (ncond, cond, 1);
2864	  if (rcli->stores == const0_rtx)
2865	    rcli->stores = cond;
2866	  else if (rcli->stores != const1_rtx)
2867	    rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
2868
2869	  /* If the register is now unconditionally dead, remove the entry
2870	     in the splay_tree.  A register is unconditionally dead if the
2871	     dead condition ncond is true.  A register is also unconditionally
2872	     dead if the sum of all conditional stores is an unconditional
2873	     store (stores is true), and the dead condition is identically the
2874	     same as the original dead condition initialized at the end of
2875	     the block.  This is a pointer compare, not an rtx_equal_p
2876	     compare.  */
2877	  if (ncond == const1_rtx
2878	      || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
2879	    splay_tree_remove (pbi->reg_cond_dead, regno);
2880	  else
2881	    {
2882	      rcli->condition = ncond;
2883
2884	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2885
2886	      /* Not unconditionally dead.  */
2887	      return 0;
2888	    }
2889	}
2890    }
2891
2892  return 1;
2893}
2894
2895/* Called from splay_tree_delete for pbi->reg_cond_life.  */
2896
2897static void
2898free_reg_cond_life_info (value)
2899     splay_tree_value value;
2900{
2901  struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
2902  free (rcli);
2903}
2904
2905/* Helper function for flush_reg_cond_reg.  */
2906
2907static int
2908flush_reg_cond_reg_1 (node, data)
2909     splay_tree_node node;
2910     void *data;
2911{
2912  struct reg_cond_life_info *rcli;
2913  int *xdata = (int *) data;
2914  unsigned int regno = xdata[0];
2915
2916  /* Don't need to search if last flushed value was farther on in
2917     the in-order traversal.  */
2918  if (xdata[1] >= (int) node->key)
2919    return 0;
2920
2921  /* Splice out portions of the expression that refer to regno.  */
2922  rcli = (struct reg_cond_life_info *) node->value;
2923  rcli->condition = elim_reg_cond (rcli->condition, regno);
2924  if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
2925    rcli->stores = elim_reg_cond (rcli->stores, regno);
2926
2927  /* If the entire condition is now false, signal the node to be removed.  */
2928  if (rcli->condition == const0_rtx)
2929    {
2930      xdata[1] = node->key;
2931      return -1;
2932    }
2933  else if (rcli->condition == const1_rtx)
2934    abort ();
2935
2936  return 0;
2937}
2938
2939/* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE.  */
2940
2941static void
2942flush_reg_cond_reg (pbi, regno)
2943     struct propagate_block_info *pbi;
2944     int regno;
2945{
2946  int pair[2];
2947
2948  pair[0] = regno;
2949  pair[1] = -1;
2950  while (splay_tree_foreach (pbi->reg_cond_dead,
2951			     flush_reg_cond_reg_1, pair) == -1)
2952    splay_tree_remove (pbi->reg_cond_dead, pair[1]);
2953
2954  CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
2955}
2956
2957/* Logical arithmetic on predicate conditions.  IOR, NOT and AND.
2958   For ior/and, the ADD flag determines whether we want to add the new
2959   condition X to the old one unconditionally.  If it is zero, we will
2960   only return a new expression if X allows us to simplify part of
2961   OLD, otherwise we return NULL to the caller.
2962   If ADD is nonzero, we will return a new condition in all cases.  The
2963   toplevel caller of one of these functions should always pass 1 for
2964   ADD.  */
2965
2966static rtx
2967ior_reg_cond (old, x, add)
2968     rtx old, x;
2969     int add;
2970{
2971  rtx op0, op1;
2972
2973  if (GET_RTX_CLASS (GET_CODE (old)) == '<')
2974    {
2975      if (GET_RTX_CLASS (GET_CODE (x)) == '<'
2976	  && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
2977	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
2978	return const1_rtx;
2979      if (GET_CODE (x) == GET_CODE (old)
2980	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
2981	return old;
2982      if (! add)
2983	return NULL;
2984      return gen_rtx_IOR (0, old, x);
2985    }
2986
2987  switch (GET_CODE (old))
2988    {
2989    case IOR:
2990      op0 = ior_reg_cond (XEXP (old, 0), x, 0);
2991      op1 = ior_reg_cond (XEXP (old, 1), x, 0);
2992      if (op0 != NULL || op1 != NULL)
2993	{
2994	  if (op0 == const0_rtx)
2995	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
2996	  if (op1 == const0_rtx)
2997	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
2998	  if (op0 == const1_rtx || op1 == const1_rtx)
2999	    return const1_rtx;
3000	  if (op0 == NULL)
3001	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3002	  else if (rtx_equal_p (x, op0))
3003	    /* (x | A) | x ~ (x | A).  */
3004	    return old;
3005	  if (op1 == NULL)
3006	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3007	  else if (rtx_equal_p (x, op1))
3008	    /* (A | x) | x ~ (A | x).  */
3009	    return old;
3010	  return gen_rtx_IOR (0, op0, op1);
3011	}
3012      if (! add)
3013	return NULL;
3014      return gen_rtx_IOR (0, old, x);
3015
3016    case AND:
3017      op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3018      op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3019      if (op0 != NULL || op1 != NULL)
3020	{
3021	  if (op0 == const1_rtx)
3022	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3023	  if (op1 == const1_rtx)
3024	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3025	  if (op0 == const0_rtx || op1 == const0_rtx)
3026	    return const0_rtx;
3027	  if (op0 == NULL)
3028	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3029	  else if (rtx_equal_p (x, op0))
3030	    /* (x & A) | x ~ x.  */
3031	    return op0;
3032	  if (op1 == NULL)
3033	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3034	  else if (rtx_equal_p (x, op1))
3035	    /* (A & x) | x ~ x.  */
3036	    return op1;
3037	  return gen_rtx_AND (0, op0, op1);
3038	}
3039      if (! add)
3040	return NULL;
3041      return gen_rtx_IOR (0, old, x);
3042
3043    case NOT:
3044      op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3045      if (op0 != NULL)
3046	return not_reg_cond (op0);
3047      if (! add)
3048	return NULL;
3049      return gen_rtx_IOR (0, old, x);
3050
3051    default:
3052      abort ();
3053    }
3054}
3055
3056static rtx
3057not_reg_cond (x)
3058     rtx x;
3059{
3060  enum rtx_code x_code;
3061
3062  if (x == const0_rtx)
3063    return const1_rtx;
3064  else if (x == const1_rtx)
3065    return const0_rtx;
3066  x_code = GET_CODE (x);
3067  if (x_code == NOT)
3068    return XEXP (x, 0);
3069  if (GET_RTX_CLASS (x_code) == '<'
3070      && GET_CODE (XEXP (x, 0)) == REG)
3071    {
3072      if (XEXP (x, 1) != const0_rtx)
3073	abort ();
3074
3075      return gen_rtx_fmt_ee (reverse_condition (x_code),
3076			     VOIDmode, XEXP (x, 0), const0_rtx);
3077    }
3078  return gen_rtx_NOT (0, x);
3079}
3080
3081static rtx
3082and_reg_cond (old, x, add)
3083     rtx old, x;
3084     int add;
3085{
3086  rtx op0, op1;
3087
3088  if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3089    {
3090      if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3091	  && GET_CODE (x) == reverse_condition (GET_CODE (old))
3092	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3093	return const0_rtx;
3094      if (GET_CODE (x) == GET_CODE (old)
3095	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3096	return old;
3097      if (! add)
3098	return NULL;
3099      return gen_rtx_AND (0, old, x);
3100    }
3101
3102  switch (GET_CODE (old))
3103    {
3104    case IOR:
3105      op0 = and_reg_cond (XEXP (old, 0), x, 0);
3106      op1 = and_reg_cond (XEXP (old, 1), x, 0);
3107      if (op0 != NULL || op1 != NULL)
3108	{
3109	  if (op0 == const0_rtx)
3110	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3111	  if (op1 == const0_rtx)
3112	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3113	  if (op0 == const1_rtx || op1 == const1_rtx)
3114	    return const1_rtx;
3115	  if (op0 == NULL)
3116	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3117	  else if (rtx_equal_p (x, op0))
3118	    /* (x | A) & x ~ x.  */
3119	    return op0;
3120	  if (op1 == NULL)
3121	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3122	  else if (rtx_equal_p (x, op1))
3123	    /* (A | x) & x ~ x.  */
3124	    return op1;
3125	  return gen_rtx_IOR (0, op0, op1);
3126	}
3127      if (! add)
3128	return NULL;
3129      return gen_rtx_AND (0, old, x);
3130
3131    case AND:
3132      op0 = and_reg_cond (XEXP (old, 0), x, 0);
3133      op1 = and_reg_cond (XEXP (old, 1), x, 0);
3134      if (op0 != NULL || op1 != NULL)
3135	{
3136	  if (op0 == const1_rtx)
3137	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3138	  if (op1 == const1_rtx)
3139	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3140	  if (op0 == const0_rtx || op1 == const0_rtx)
3141	    return const0_rtx;
3142	  if (op0 == NULL)
3143	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3144	  else if (rtx_equal_p (x, op0))
3145	    /* (x & A) & x ~ (x & A).  */
3146	    return old;
3147	  if (op1 == NULL)
3148	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3149	  else if (rtx_equal_p (x, op1))
3150	    /* (A & x) & x ~ (A & x).  */
3151	    return old;
3152	  return gen_rtx_AND (0, op0, op1);
3153	}
3154      if (! add)
3155	return NULL;
3156      return gen_rtx_AND (0, old, x);
3157
3158    case NOT:
3159      op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3160      if (op0 != NULL)
3161	return not_reg_cond (op0);
3162      if (! add)
3163	return NULL;
3164      return gen_rtx_AND (0, old, x);
3165
3166    default:
3167      abort ();
3168    }
3169}
3170
3171/* Given a condition X, remove references to reg REGNO and return the
3172   new condition.  The removal will be done so that all conditions
3173   involving REGNO are considered to evaluate to false.  This function
3174   is used when the value of REGNO changes.  */
3175
3176static rtx
3177elim_reg_cond (x, regno)
3178     rtx x;
3179     unsigned int regno;
3180{
3181  rtx op0, op1;
3182
3183  if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3184    {
3185      if (REGNO (XEXP (x, 0)) == regno)
3186	return const0_rtx;
3187      return x;
3188    }
3189
3190  switch (GET_CODE (x))
3191    {
3192    case AND:
3193      op0 = elim_reg_cond (XEXP (x, 0), regno);
3194      op1 = elim_reg_cond (XEXP (x, 1), regno);
3195      if (op0 == const0_rtx || op1 == const0_rtx)
3196	return const0_rtx;
3197      if (op0 == const1_rtx)
3198	return op1;
3199      if (op1 == const1_rtx)
3200	return op0;
3201      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3202	return x;
3203      return gen_rtx_AND (0, op0, op1);
3204
3205    case IOR:
3206      op0 = elim_reg_cond (XEXP (x, 0), regno);
3207      op1 = elim_reg_cond (XEXP (x, 1), regno);
3208      if (op0 == const1_rtx || op1 == const1_rtx)
3209	return const1_rtx;
3210      if (op0 == const0_rtx)
3211	return op1;
3212      if (op1 == const0_rtx)
3213	return op0;
3214      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3215	return x;
3216      return gen_rtx_IOR (0, op0, op1);
3217
3218    case NOT:
3219      op0 = elim_reg_cond (XEXP (x, 0), regno);
3220      if (op0 == const0_rtx)
3221	return const1_rtx;
3222      if (op0 == const1_rtx)
3223	return const0_rtx;
3224      if (op0 != XEXP (x, 0))
3225	return not_reg_cond (op0);
3226      return x;
3227
3228    default:
3229      abort ();
3230    }
3231}
3232#endif /* HAVE_conditional_execution */
3233
3234#ifdef AUTO_INC_DEC
3235
3236/* Try to substitute the auto-inc expression INC as the address inside
3237   MEM which occurs in INSN.  Currently, the address of MEM is an expression
3238   involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3239   that has a single set whose source is a PLUS of INCR_REG and something
3240   else.  */
3241
3242static void
3243attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
3244     struct propagate_block_info *pbi;
3245     rtx inc, insn, mem, incr, incr_reg;
3246{
3247  int regno = REGNO (incr_reg);
3248  rtx set = single_set (incr);
3249  rtx q = SET_DEST (set);
3250  rtx y = SET_SRC (set);
3251  int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3252
3253  /* Make sure this reg appears only once in this insn.  */
3254  if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3255    return;
3256
3257  if (dead_or_set_p (incr, incr_reg)
3258      /* Mustn't autoinc an eliminable register.  */
3259      && (regno >= FIRST_PSEUDO_REGISTER
3260	  || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3261    {
3262      /* This is the simple case.  Try to make the auto-inc.  If
3263	 we can't, we are done.  Otherwise, we will do any
3264	 needed updates below.  */
3265      if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3266	return;
3267    }
3268  else if (GET_CODE (q) == REG
3269	   /* PREV_INSN used here to check the semi-open interval
3270	      [insn,incr).  */
3271	   && ! reg_used_between_p (q,  PREV_INSN (insn), incr)
3272	   /* We must also check for sets of q as q may be
3273	      a call clobbered hard register and there may
3274	      be a call between PREV_INSN (insn) and incr.  */
3275	   && ! reg_set_between_p (q,  PREV_INSN (insn), incr))
3276    {
3277      /* We have *p followed sometime later by q = p+size.
3278	 Both p and q must be live afterward,
3279	 and q is not used between INSN and its assignment.
3280	 Change it to q = p, ...*q..., q = q+size.
3281	 Then fall into the usual case.  */
3282      rtx insns, temp;
3283
3284      start_sequence ();
3285      emit_move_insn (q, incr_reg);
3286      insns = get_insns ();
3287      end_sequence ();
3288
3289      /* If we can't make the auto-inc, or can't make the
3290	 replacement into Y, exit.  There's no point in making
3291	 the change below if we can't do the auto-inc and doing
3292	 so is not correct in the pre-inc case.  */
3293
3294      XEXP (inc, 0) = q;
3295      validate_change (insn, &XEXP (mem, 0), inc, 1);
3296      validate_change (incr, &XEXP (y, opnum), q, 1);
3297      if (! apply_change_group ())
3298	return;
3299
3300      /* We now know we'll be doing this change, so emit the
3301	 new insn(s) and do the updates.  */
3302      emit_insns_before (insns, insn);
3303
3304      if (pbi->bb->head == insn)
3305	pbi->bb->head = insns;
3306
3307      /* INCR will become a NOTE and INSN won't contain a
3308	 use of INCR_REG.  If a use of INCR_REG was just placed in
3309	 the insn before INSN, make that the next use.
3310	 Otherwise, invalidate it.  */
3311      if (GET_CODE (PREV_INSN (insn)) == INSN
3312	  && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3313	  && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3314	pbi->reg_next_use[regno] = PREV_INSN (insn);
3315      else
3316	pbi->reg_next_use[regno] = 0;
3317
3318      incr_reg = q;
3319      regno = REGNO (q);
3320
3321      /* REGNO is now used in INCR which is below INSN, but
3322	 it previously wasn't live here.  If we don't mark
3323	 it as live, we'll put a REG_DEAD note for it
3324	 on this insn, which is incorrect.  */
3325      SET_REGNO_REG_SET (pbi->reg_live, regno);
3326
3327      /* If there are any calls between INSN and INCR, show
3328	 that REGNO now crosses them.  */
3329      for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3330	if (GET_CODE (temp) == CALL_INSN)
3331	  REG_N_CALLS_CROSSED (regno)++;
3332
3333      /* Invalidate alias info for Q since we just changed its value.  */
3334      clear_reg_alias_info (q);
3335    }
3336  else
3337    return;
3338
3339  /* If we haven't returned, it means we were able to make the
3340     auto-inc, so update the status.  First, record that this insn
3341     has an implicit side effect.  */
3342
3343  REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3344
3345  /* Modify the old increment-insn to simply copy
3346     the already-incremented value of our register.  */
3347  if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
3348    abort ();
3349
3350  /* If that makes it a no-op (copying the register into itself) delete
3351     it so it won't appear to be a "use" and a "set" of this
3352     register.  */
3353  if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3354    {
3355      /* If the original source was dead, it's dead now.  */
3356      rtx note;
3357
3358      while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3359	{
3360	  remove_note (incr, note);
3361	  if (XEXP (note, 0) != incr_reg)
3362	    CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3363	}
3364
3365      PUT_CODE (incr, NOTE);
3366      NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3367      NOTE_SOURCE_FILE (incr) = 0;
3368    }
3369
3370  if (regno >= FIRST_PSEUDO_REGISTER)
3371    {
3372      /* Count an extra reference to the reg.  When a reg is
3373	 incremented, spilling it is worse, so we want to make
3374	 that less likely.  */
3375      REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3376
3377      /* Count the increment as a setting of the register,
3378	 even though it isn't a SET in rtl.  */
3379      REG_N_SETS (regno)++;
3380    }
3381}
3382
3383/* X is a MEM found in INSN.  See if we can convert it into an auto-increment
3384   reference.  */
3385
3386static void
3387find_auto_inc (pbi, x, insn)
3388     struct propagate_block_info *pbi;
3389     rtx x;
3390     rtx insn;
3391{
3392  rtx addr = XEXP (x, 0);
3393  HOST_WIDE_INT offset = 0;
3394  rtx set, y, incr, inc_val;
3395  int regno;
3396  int size = GET_MODE_SIZE (GET_MODE (x));
3397
3398  if (GET_CODE (insn) == JUMP_INSN)
3399    return;
3400
3401  /* Here we detect use of an index register which might be good for
3402     postincrement, postdecrement, preincrement, or predecrement.  */
3403
3404  if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3405    offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3406
3407  if (GET_CODE (addr) != REG)
3408    return;
3409
3410  regno = REGNO (addr);
3411
3412  /* Is the next use an increment that might make auto-increment? */
3413  incr = pbi->reg_next_use[regno];
3414  if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3415    return;
3416  set = single_set (incr);
3417  if (set == 0 || GET_CODE (set) != SET)
3418    return;
3419  y = SET_SRC (set);
3420
3421  if (GET_CODE (y) != PLUS)
3422    return;
3423
3424  if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3425    inc_val = XEXP (y, 1);
3426  else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3427    inc_val = XEXP (y, 0);
3428  else
3429    return;
3430
3431  if (GET_CODE (inc_val) == CONST_INT)
3432    {
3433      if (HAVE_POST_INCREMENT
3434	  && (INTVAL (inc_val) == size && offset == 0))
3435	attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3436			  incr, addr);
3437      else if (HAVE_POST_DECREMENT
3438	       && (INTVAL (inc_val) == -size && offset == 0))
3439	attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3440			  incr, addr);
3441      else if (HAVE_PRE_INCREMENT
3442	       && (INTVAL (inc_val) == size && offset == size))
3443	attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3444			  incr, addr);
3445      else if (HAVE_PRE_DECREMENT
3446	       && (INTVAL (inc_val) == -size && offset == -size))
3447	attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3448			  incr, addr);
3449      else if (HAVE_POST_MODIFY_DISP && offset == 0)
3450	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3451						    gen_rtx_PLUS (Pmode,
3452								  addr,
3453								  inc_val)),
3454			  insn, x, incr, addr);
3455    }
3456  else if (GET_CODE (inc_val) == REG
3457	   && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3458				   NEXT_INSN (incr)))
3459
3460    {
3461      if (HAVE_POST_MODIFY_REG && offset == 0)
3462	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3463						    gen_rtx_PLUS (Pmode,
3464								  addr,
3465								  inc_val)),
3466			  insn, x, incr, addr);
3467    }
3468}
3469
3470#endif /* AUTO_INC_DEC */
3471
3472static void
3473mark_used_reg (pbi, reg, cond, insn)
3474     struct propagate_block_info *pbi;
3475     rtx reg;
3476     rtx cond ATTRIBUTE_UNUSED;
3477     rtx insn;
3478{
3479  unsigned int regno_first, regno_last, i;
3480  int some_was_live, some_was_dead, some_not_set;
3481
3482  regno_last = regno_first = REGNO (reg);
3483  if (regno_first < FIRST_PSEUDO_REGISTER)
3484    regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
3485
3486  /* Find out if any of this register is live after this instruction.  */
3487  some_was_live = some_was_dead = 0;
3488  for (i = regno_first; i <= regno_last; ++i)
3489    {
3490      int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3491      some_was_live |= needed_regno;
3492      some_was_dead |= ! needed_regno;
3493    }
3494
3495  /* Find out if any of the register was set this insn.  */
3496  some_not_set = 0;
3497  for (i = regno_first; i <= regno_last; ++i)
3498    some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3499
3500  if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3501    {
3502      /* Record where each reg is used, so when the reg is set we know
3503	 the next insn that uses it.  */
3504      pbi->reg_next_use[regno_first] = insn;
3505    }
3506
3507  if (pbi->flags & PROP_REG_INFO)
3508    {
3509      if (regno_first < FIRST_PSEUDO_REGISTER)
3510	{
3511	  /* If this is a register we are going to try to eliminate,
3512	     don't mark it live here.  If we are successful in
3513	     eliminating it, it need not be live unless it is used for
3514	     pseudos, in which case it will have been set live when it
3515	     was allocated to the pseudos.  If the register will not
3516	     be eliminated, reload will set it live at that point.
3517
3518	     Otherwise, record that this function uses this register.  */
3519	  /* ??? The PPC backend tries to "eliminate" on the pic
3520	     register to itself.  This should be fixed.  In the mean
3521	     time, hack around it.  */
3522
3523	  if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3524	         && (regno_first == FRAME_POINTER_REGNUM
3525		     || regno_first == ARG_POINTER_REGNUM)))
3526	    for (i = regno_first; i <= regno_last; ++i)
3527	      regs_ever_live[i] = 1;
3528	}
3529      else
3530	{
3531	  /* Keep track of which basic block each reg appears in.  */
3532
3533	  int blocknum = pbi->bb->index;
3534	  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3535	    REG_BASIC_BLOCK (regno_first) = blocknum;
3536	  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3537	    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3538
3539	  /* Count (weighted) number of uses of each reg.  */
3540	  REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3541	  REG_N_REFS (regno_first)++;
3542	}
3543    }
3544
3545  /* Record and count the insns in which a reg dies.  If it is used in
3546     this insn and was dead below the insn then it dies in this insn.
3547     If it was set in this insn, we do not make a REG_DEAD note;
3548     likewise if we already made such a note.  */
3549  if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3550      && some_was_dead
3551      && some_not_set)
3552    {
3553      /* Check for the case where the register dying partially
3554	 overlaps the register set by this insn.  */
3555      if (regno_first != regno_last)
3556	for (i = regno_first; i <= regno_last; ++i)
3557	  some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3558
3559      /* If none of the words in X is needed, make a REG_DEAD note.
3560	 Otherwise, we must make partial REG_DEAD notes.  */
3561      if (! some_was_live)
3562	{
3563	  if ((pbi->flags & PROP_DEATH_NOTES)
3564	      && ! find_regno_note (insn, REG_DEAD, regno_first))
3565	    REG_NOTES (insn)
3566	      = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3567
3568	  if (pbi->flags & PROP_REG_INFO)
3569	    REG_N_DEATHS (regno_first)++;
3570	}
3571      else
3572	{
3573	  /* Don't make a REG_DEAD note for a part of a register
3574	     that is set in the insn.  */
3575	  for (i = regno_first; i <= regno_last; ++i)
3576	    if (! REGNO_REG_SET_P (pbi->reg_live, i)
3577		&& ! dead_or_set_regno_p (insn, i))
3578	      REG_NOTES (insn)
3579		= alloc_EXPR_LIST (REG_DEAD,
3580				   gen_rtx_REG (reg_raw_mode[i], i),
3581				   REG_NOTES (insn));
3582	}
3583    }
3584
3585  /* Mark the register as being live.  */
3586  for (i = regno_first; i <= regno_last; ++i)
3587    {
3588#ifdef HAVE_conditional_execution
3589      int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
3590#endif
3591
3592      SET_REGNO_REG_SET (pbi->reg_live, i);
3593
3594#ifdef HAVE_conditional_execution
3595      /* If this is a conditional use, record that fact.  If it is later
3596	 conditionally set, we'll know to kill the register.  */
3597      if (cond != NULL_RTX)
3598	{
3599	  splay_tree_node node;
3600	  struct reg_cond_life_info *rcli;
3601	  rtx ncond;
3602
3603	  if (this_was_live)
3604	    {
3605	      node = splay_tree_lookup (pbi->reg_cond_dead, i);
3606	      if (node == NULL)
3607		{
3608		  /* The register was unconditionally live previously.
3609		     No need to do anything.  */
3610		}
3611	      else
3612		{
3613		  /* The register was conditionally live previously.
3614		     Subtract the new life cond from the old death cond.  */
3615		  rcli = (struct reg_cond_life_info *) node->value;
3616		  ncond = rcli->condition;
3617		  ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3618
3619		  /* If the register is now unconditionally live,
3620		     remove the entry in the splay_tree.  */
3621		  if (ncond == const0_rtx)
3622		    splay_tree_remove (pbi->reg_cond_dead, i);
3623		  else
3624		    {
3625		      rcli->condition = ncond;
3626		      SET_REGNO_REG_SET (pbi->reg_cond_reg,
3627					 REGNO (XEXP (cond, 0)));
3628		    }
3629		}
3630	    }
3631	  else
3632	    {
3633	      /* The register was not previously live at all.  Record
3634		 the condition under which it is still dead.  */
3635	      rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3636	      rcli->condition = not_reg_cond (cond);
3637	      rcli->stores = const0_rtx;
3638	      rcli->orig_condition = const0_rtx;
3639	      splay_tree_insert (pbi->reg_cond_dead, i,
3640				 (splay_tree_value) rcli);
3641
3642	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3643	    }
3644	}
3645      else if (this_was_live)
3646	{
3647	  /* The register may have been conditionally live previously, but
3648	     is now unconditionally live.  Remove it from the conditionally
3649	     dead list, so that a conditional set won't cause us to think
3650	     it dead.  */
3651	  splay_tree_remove (pbi->reg_cond_dead, i);
3652	}
3653#endif
3654    }
3655}
3656
3657/* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3658   This is done assuming the registers needed from X are those that
3659   have 1-bits in PBI->REG_LIVE.
3660
3661   INSN is the containing instruction.  If INSN is dead, this function
3662   is not called.  */
3663
3664static void
3665mark_used_regs (pbi, x, cond, insn)
3666     struct propagate_block_info *pbi;
3667     rtx x, cond, insn;
3668{
3669  RTX_CODE code;
3670  int regno;
3671  int flags = pbi->flags;
3672
3673 retry:
3674  if (!x)
3675    return;
3676  code = GET_CODE (x);
3677  switch (code)
3678    {
3679    case LABEL_REF:
3680    case SYMBOL_REF:
3681    case CONST_INT:
3682    case CONST:
3683    case CONST_DOUBLE:
3684    case CONST_VECTOR:
3685    case PC:
3686    case ADDR_VEC:
3687    case ADDR_DIFF_VEC:
3688      return;
3689
3690#ifdef HAVE_cc0
3691    case CC0:
3692      pbi->cc0_live = 1;
3693      return;
3694#endif
3695
3696    case CLOBBER:
3697      /* If we are clobbering a MEM, mark any registers inside the address
3698	 as being used.  */
3699      if (GET_CODE (XEXP (x, 0)) == MEM)
3700	mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
3701      return;
3702
3703    case MEM:
3704      /* Don't bother watching stores to mems if this is not the
3705	 final pass.  We'll not be deleting dead stores this round.  */
3706      if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3707	{
3708	  /* Invalidate the data for the last MEM stored, but only if MEM is
3709	     something that can be stored into.  */
3710	  if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3711	      && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3712	    /* Needn't clear the memory set list.  */
3713	    ;
3714	  else
3715	    {
3716	      rtx temp = pbi->mem_set_list;
3717	      rtx prev = NULL_RTX;
3718	      rtx next;
3719
3720	      while (temp)
3721		{
3722		  next = XEXP (temp, 1);
3723		  if (anti_dependence (XEXP (temp, 0), x))
3724		    {
3725		      /* Splice temp out of the list.  */
3726		      if (prev)
3727			XEXP (prev, 1) = next;
3728		      else
3729			pbi->mem_set_list = next;
3730		      free_EXPR_LIST_node (temp);
3731		      pbi->mem_set_list_len--;
3732		    }
3733		  else
3734		    prev = temp;
3735		  temp = next;
3736		}
3737	    }
3738
3739	  /* If the memory reference had embedded side effects (autoincrement
3740	     address modes.  Then we may need to kill some entries on the
3741	     memory set list.  */
3742	  if (insn)
3743	    invalidate_mems_from_autoinc (pbi, insn);
3744	}
3745
3746#ifdef AUTO_INC_DEC
3747      if (flags & PROP_AUTOINC)
3748        find_auto_inc (pbi, x, insn);
3749#endif
3750      break;
3751
3752    case SUBREG:
3753#ifdef CLASS_CANNOT_CHANGE_MODE
3754      if (GET_CODE (SUBREG_REG (x)) == REG
3755	  && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3756	  && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
3757					 GET_MODE (SUBREG_REG (x))))
3758	REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
3759#endif
3760
3761      /* While we're here, optimize this case.  */
3762      x = SUBREG_REG (x);
3763      if (GET_CODE (x) != REG)
3764	goto retry;
3765      /* Fall through.  */
3766
3767    case REG:
3768      /* See a register other than being set => mark it as needed.  */
3769      mark_used_reg (pbi, x, cond, insn);
3770      return;
3771
3772    case SET:
3773      {
3774	rtx testreg = SET_DEST (x);
3775	int mark_dest = 0;
3776
3777	/* If storing into MEM, don't show it as being used.  But do
3778	   show the address as being used.  */
3779	if (GET_CODE (testreg) == MEM)
3780	  {
3781#ifdef AUTO_INC_DEC
3782	    if (flags & PROP_AUTOINC)
3783	      find_auto_inc (pbi, testreg, insn);
3784#endif
3785	    mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
3786	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
3787	    return;
3788	  }
3789
3790	/* Storing in STRICT_LOW_PART is like storing in a reg
3791	   in that this SET might be dead, so ignore it in TESTREG.
3792	   but in some other ways it is like using the reg.
3793
3794	   Storing in a SUBREG or a bit field is like storing the entire
3795	   register in that if the register's value is not used
3796	   then this SET is not needed.  */
3797	while (GET_CODE (testreg) == STRICT_LOW_PART
3798	       || GET_CODE (testreg) == ZERO_EXTRACT
3799	       || GET_CODE (testreg) == SIGN_EXTRACT
3800	       || GET_CODE (testreg) == SUBREG)
3801	  {
3802#ifdef CLASS_CANNOT_CHANGE_MODE
3803	    if (GET_CODE (testreg) == SUBREG
3804		&& GET_CODE (SUBREG_REG (testreg)) == REG
3805		&& REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3806		&& CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
3807					       GET_MODE (testreg)))
3808	      REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
3809#endif
3810
3811	    /* Modifying a single register in an alternate mode
3812	       does not use any of the old value.  But these other
3813	       ways of storing in a register do use the old value.  */
3814	    if (GET_CODE (testreg) == SUBREG
3815		&& !((REG_BYTES (SUBREG_REG (testreg))
3816		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD
3817		     > (REG_BYTES (testreg)
3818			+ UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3819	      ;
3820	    else
3821	      mark_dest = 1;
3822
3823	    testreg = XEXP (testreg, 0);
3824	  }
3825
3826	/* If this is a store into a register or group of registers,
3827	   recursively scan the value being stored.  */
3828
3829	if ((GET_CODE (testreg) == PARALLEL
3830	     && GET_MODE (testreg) == BLKmode)
3831	    || (GET_CODE (testreg) == REG
3832		&& (regno = REGNO (testreg),
3833		    ! (regno == FRAME_POINTER_REGNUM
3834		       && (! reload_completed || frame_pointer_needed)))
3835#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3836		&& ! (regno == HARD_FRAME_POINTER_REGNUM
3837		      && (! reload_completed || frame_pointer_needed))
3838#endif
3839#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3840		&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3841#endif
3842		))
3843	  {
3844	    if (mark_dest)
3845	      mark_used_regs (pbi, SET_DEST (x), cond, insn);
3846	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
3847	    return;
3848	  }
3849      }
3850      break;
3851
3852    case ASM_OPERANDS:
3853    case UNSPEC_VOLATILE:
3854    case TRAP_IF:
3855    case ASM_INPUT:
3856      {
3857	/* Traditional and volatile asm instructions must be considered to use
3858	   and clobber all hard registers, all pseudo-registers and all of
3859	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
3860
3861	   Consider for instance a volatile asm that changes the fpu rounding
3862	   mode.  An insn should not be moved across this even if it only uses
3863	   pseudo-regs because it might give an incorrectly rounded result.
3864
3865	   ?!? Unfortunately, marking all hard registers as live causes massive
3866	   problems for the register allocator and marking all pseudos as live
3867	   creates mountains of uninitialized variable warnings.
3868
3869	   So for now, just clear the memory set list and mark any regs
3870	   we can find in ASM_OPERANDS as used.  */
3871	if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3872	  {
3873	    free_EXPR_LIST_list (&pbi->mem_set_list);
3874	    pbi->mem_set_list_len = 0;
3875	  }
3876
3877	/* For all ASM_OPERANDS, we must traverse the vector of input operands.
3878	   We can not just fall through here since then we would be confused
3879	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3880	   traditional asms unlike their normal usage.  */
3881	if (code == ASM_OPERANDS)
3882	  {
3883	    int j;
3884
3885	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3886	      mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
3887	  }
3888	break;
3889      }
3890
3891    case COND_EXEC:
3892      if (cond != NULL_RTX)
3893	abort ();
3894
3895      mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
3896
3897      cond = COND_EXEC_TEST (x);
3898      x = COND_EXEC_CODE (x);
3899      goto retry;
3900
3901    case PHI:
3902      /* We _do_not_ want to scan operands of phi nodes.  Operands of
3903	 a phi function are evaluated only when control reaches this
3904	 block along a particular edge.  Therefore, regs that appear
3905	 as arguments to phi should not be added to the global live at
3906	 start.  */
3907      return;
3908
3909    default:
3910      break;
3911    }
3912
3913  /* Recursively scan the operands of this expression.  */
3914
3915  {
3916    const char * const fmt = GET_RTX_FORMAT (code);
3917    int i;
3918
3919    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3920      {
3921	if (fmt[i] == 'e')
3922	  {
3923	    /* Tail recursive case: save a function call level.  */
3924	    if (i == 0)
3925	      {
3926		x = XEXP (x, 0);
3927		goto retry;
3928	      }
3929	    mark_used_regs (pbi, XEXP (x, i), cond, insn);
3930	  }
3931	else if (fmt[i] == 'E')
3932	  {
3933	    int j;
3934	    for (j = 0; j < XVECLEN (x, i); j++)
3935	      mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
3936	  }
3937      }
3938  }
3939}
3940
3941#ifdef AUTO_INC_DEC
3942
3943static int
3944try_pre_increment_1 (pbi, insn)
3945     struct propagate_block_info *pbi;
3946     rtx insn;
3947{
3948  /* Find the next use of this reg.  If in same basic block,
3949     make it do pre-increment or pre-decrement if appropriate.  */
3950  rtx x = single_set (insn);
3951  HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
3952			  * INTVAL (XEXP (SET_SRC (x), 1)));
3953  int regno = REGNO (SET_DEST (x));
3954  rtx y = pbi->reg_next_use[regno];
3955  if (y != 0
3956      && SET_DEST (x) != stack_pointer_rtx
3957      && BLOCK_NUM (y) == BLOCK_NUM (insn)
3958      /* Don't do this if the reg dies, or gets set in y; a standard addressing
3959	 mode would be better.  */
3960      && ! dead_or_set_p (y, SET_DEST (x))
3961      && try_pre_increment (y, SET_DEST (x), amount))
3962    {
3963      /* We have found a suitable auto-increment and already changed
3964	 insn Y to do it.  So flush this increment instruction.  */
3965      propagate_block_delete_insn (pbi->bb, insn);
3966
3967      /* Count a reference to this reg for the increment insn we are
3968	 deleting.  When a reg is incremented, spilling it is worse,
3969	 so we want to make that less likely.  */
3970      if (regno >= FIRST_PSEUDO_REGISTER)
3971	{
3972	  REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3973	  REG_N_SETS (regno)++;
3974	}
3975
3976      /* Flush any remembered memories depending on the value of
3977	 the incremented register.  */
3978      invalidate_mems_from_set (pbi, SET_DEST (x));
3979
3980      return 1;
3981    }
3982  return 0;
3983}
3984
3985/* Try to change INSN so that it does pre-increment or pre-decrement
3986   addressing on register REG in order to add AMOUNT to REG.
3987   AMOUNT is negative for pre-decrement.
3988   Returns 1 if the change could be made.
3989   This checks all about the validity of the result of modifying INSN.  */
3990
3991static int
3992try_pre_increment (insn, reg, amount)
3993     rtx insn, reg;
3994     HOST_WIDE_INT amount;
3995{
3996  rtx use;
3997
3998  /* Nonzero if we can try to make a pre-increment or pre-decrement.
3999     For example, addl $4,r1; movl (r1),... can become movl +(r1),...  */
4000  int pre_ok = 0;
4001  /* Nonzero if we can try to make a post-increment or post-decrement.
4002     For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4003     It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4004     supports both pre-inc and post-inc, or both pre-dec and post-dec.  */
4005  int post_ok = 0;
4006
4007  /* Nonzero if the opportunity actually requires post-inc or post-dec.  */
4008  int do_post = 0;
4009
4010  /* From the sign of increment, see which possibilities are conceivable
4011     on this target machine.  */
4012  if (HAVE_PRE_INCREMENT && amount > 0)
4013    pre_ok = 1;
4014  if (HAVE_POST_INCREMENT && amount > 0)
4015    post_ok = 1;
4016
4017  if (HAVE_PRE_DECREMENT && amount < 0)
4018    pre_ok = 1;
4019  if (HAVE_POST_DECREMENT && amount < 0)
4020    post_ok = 1;
4021
4022  if (! (pre_ok || post_ok))
4023    return 0;
4024
4025  /* It is not safe to add a side effect to a jump insn
4026     because if the incremented register is spilled and must be reloaded
4027     there would be no way to store the incremented value back in memory.  */
4028
4029  if (GET_CODE (insn) == JUMP_INSN)
4030    return 0;
4031
4032  use = 0;
4033  if (pre_ok)
4034    use = find_use_as_address (PATTERN (insn), reg, 0);
4035  if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4036    {
4037      use = find_use_as_address (PATTERN (insn), reg, -amount);
4038      do_post = 1;
4039    }
4040
4041  if (use == 0 || use == (rtx) (size_t) 1)
4042    return 0;
4043
4044  if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4045    return 0;
4046
4047  /* See if this combination of instruction and addressing mode exists.  */
4048  if (! validate_change (insn, &XEXP (use, 0),
4049			 gen_rtx_fmt_e (amount > 0
4050					? (do_post ? POST_INC : PRE_INC)
4051					: (do_post ? POST_DEC : PRE_DEC),
4052					Pmode, reg), 0))
4053    return 0;
4054
4055  /* Record that this insn now has an implicit side effect on X.  */
4056  REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4057  return 1;
4058}
4059
4060#endif /* AUTO_INC_DEC */
4061
4062/* Find the place in the rtx X where REG is used as a memory address.
4063   Return the MEM rtx that so uses it.
4064   If PLUSCONST is nonzero, search instead for a memory address equivalent to
4065   (plus REG (const_int PLUSCONST)).
4066
4067   If such an address does not appear, return 0.
4068   If REG appears more than once, or is used other than in such an address,
4069   return (rtx) 1.  */
4070
4071rtx
4072find_use_as_address (x, reg, plusconst)
4073     rtx x;
4074     rtx reg;
4075     HOST_WIDE_INT plusconst;
4076{
4077  enum rtx_code code = GET_CODE (x);
4078  const char * const fmt = GET_RTX_FORMAT (code);
4079  int i;
4080  rtx value = 0;
4081  rtx tem;
4082
4083  if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4084    return x;
4085
4086  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4087      && XEXP (XEXP (x, 0), 0) == reg
4088      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4089      && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4090    return x;
4091
4092  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4093    {
4094      /* If REG occurs inside a MEM used in a bit-field reference,
4095	 that is unacceptable.  */
4096      if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4097	return (rtx) (size_t) 1;
4098    }
4099
4100  if (x == reg)
4101    return (rtx) (size_t) 1;
4102
4103  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4104    {
4105      if (fmt[i] == 'e')
4106	{
4107	  tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4108	  if (value == 0)
4109	    value = tem;
4110	  else if (tem != 0)
4111	    return (rtx) (size_t) 1;
4112	}
4113      else if (fmt[i] == 'E')
4114	{
4115	  int j;
4116	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4117	    {
4118	      tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4119	      if (value == 0)
4120		value = tem;
4121	      else if (tem != 0)
4122		return (rtx) (size_t) 1;
4123	    }
4124	}
4125    }
4126
4127  return value;
4128}
4129
4130/* Write information about registers and basic blocks into FILE.
4131   This is part of making a debugging dump.  */
4132
4133void
4134dump_regset (r, outf)
4135     regset r;
4136     FILE *outf;
4137{
4138  int i;
4139  if (r == NULL)
4140    {
4141      fputs (" (nil)", outf);
4142      return;
4143    }
4144
4145  EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4146    {
4147      fprintf (outf, " %d", i);
4148      if (i < FIRST_PSEUDO_REGISTER)
4149	fprintf (outf, " [%s]",
4150		 reg_names[i]);
4151    });
4152}
4153
4154/* Print a human-reaable representation of R on the standard error
4155   stream.  This function is designed to be used from within the
4156   debugger.  */
4157
4158void
4159debug_regset (r)
4160     regset r;
4161{
4162  dump_regset (r, stderr);
4163  putc ('\n', stderr);
4164}
4165
4166/* Recompute register set/reference counts immediately prior to register
4167   allocation.
4168
4169   This avoids problems with set/reference counts changing to/from values
4170   which have special meanings to the register allocators.
4171
4172   Additionally, the reference counts are the primary component used by the
4173   register allocators to prioritize pseudos for allocation to hard regs.
4174   More accurate reference counts generally lead to better register allocation.
4175
4176   F is the first insn to be scanned.
4177
4178   LOOP_STEP denotes how much loop_depth should be incremented per
4179   loop nesting level in order to increase the ref count more for
4180   references in a loop.
4181
4182   It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4183   possibly other information which is used by the register allocators.  */
4184
4185void
4186recompute_reg_usage (f, loop_step)
4187     rtx f ATTRIBUTE_UNUSED;
4188     int loop_step ATTRIBUTE_UNUSED;
4189{
4190  allocate_reg_life_data ();
4191  update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
4192}
4193
4194/* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4195   blocks.  If BLOCKS is NULL, assume the universal set.  Returns a count
4196   of the number of registers that died.  */
4197
4198int
4199count_or_remove_death_notes (blocks, kill)
4200     sbitmap blocks;
4201     int kill;
4202{
4203  int i, count = 0;
4204
4205  for (i = n_basic_blocks - 1; i >= 0; --i)
4206    {
4207      basic_block bb;
4208      rtx insn;
4209
4210      if (blocks && ! TEST_BIT (blocks, i))
4211	continue;
4212
4213      bb = BASIC_BLOCK (i);
4214
4215      for (insn = bb->head;; insn = NEXT_INSN (insn))
4216	{
4217	  if (INSN_P (insn))
4218	    {
4219	      rtx *pprev = &REG_NOTES (insn);
4220	      rtx link = *pprev;
4221
4222	      while (link)
4223		{
4224		  switch (REG_NOTE_KIND (link))
4225		    {
4226		    case REG_DEAD:
4227		      if (GET_CODE (XEXP (link, 0)) == REG)
4228			{
4229			  rtx reg = XEXP (link, 0);
4230			  int n;
4231
4232			  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4233			    n = 1;
4234			  else
4235			    n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
4236			  count += n;
4237			}
4238		      /* Fall through.  */
4239
4240		    case REG_UNUSED:
4241		      if (kill)
4242			{
4243			  rtx next = XEXP (link, 1);
4244			  free_EXPR_LIST_node (link);
4245			  *pprev = link = next;
4246			  break;
4247			}
4248		      /* Fall through.  */
4249
4250		    default:
4251		      pprev = &XEXP (link, 1);
4252		      link = *pprev;
4253		      break;
4254		    }
4255		}
4256	    }
4257
4258	  if (insn == bb->end)
4259	    break;
4260	}
4261    }
4262
4263  return count;
4264}
4265/* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4266   if blocks is NULL.  */
4267
4268static void
4269clear_log_links (blocks)
4270     sbitmap blocks;
4271{
4272  rtx insn;
4273  int i;
4274
4275  if (!blocks)
4276    {
4277      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4278	if (INSN_P (insn))
4279	  free_INSN_LIST_list (&LOG_LINKS (insn));
4280    }
4281  else
4282    EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4283      {
4284	basic_block bb = BASIC_BLOCK (i);
4285
4286	for (insn = bb->head; insn != NEXT_INSN (bb->end);
4287	     insn = NEXT_INSN (insn))
4288	  if (INSN_P (insn))
4289	    free_INSN_LIST_list (&LOG_LINKS (insn));
4290      });
4291}
4292
4293/* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4294   correspond to the hard registers, if any, set in that map.  This
4295   could be done far more efficiently by having all sorts of special-cases
4296   with moving single words, but probably isn't worth the trouble.  */
4297
4298void
4299reg_set_to_hard_reg_set (to, from)
4300     HARD_REG_SET *to;
4301     bitmap from;
4302{
4303  int i;
4304
4305  EXECUTE_IF_SET_IN_BITMAP
4306    (from, 0, i,
4307     {
4308       if (i >= FIRST_PSEUDO_REGISTER)
4309	 return;
4310       SET_HARD_REG_BIT (*to, i);
4311     });
4312}
4313