alias.c revision 96263
191094Sdes/* Alias analysis for GNU C
2115619Sdes   Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
391094Sdes   Contributed by John Carr (jfc@mit.edu).
491094Sdes
591094SdesThis file is part of GCC.
699158Sdes
799158SdesGCC is free software; you can redistribute it and/or modify it under
899158Sdesthe terms of the GNU General Public License as published by the Free
991094SdesSoftware Foundation; either version 2, or (at your option) any later
1091094Sdesversion.
1191094Sdes
1291094SdesGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1391094SdesWARRANTY; without even the implied warranty of MERCHANTABILITY or
1491094SdesFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1591094Sdesfor more details.
1691094Sdes
1791094SdesYou should have received a copy of the GNU General Public License
1891094Sdesalong with GCC; see the file COPYING.  If not, write to the Free
1991094SdesSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
2091094Sdes02111-1307, USA.  */
2191094Sdes
2291094Sdes#include "config.h"
2391094Sdes#include "system.h"
2491094Sdes#include "rtl.h"
2591094Sdes#include "tree.h"
2691094Sdes#include "tm_p.h"
2791094Sdes#include "function.h"
2891094Sdes#include "expr.h"
2991094Sdes#include "regs.h"
3091094Sdes#include "hard-reg-set.h"
3191094Sdes#include "basic-block.h"
3291094Sdes#include "flags.h"
3391094Sdes#include "output.h"
34115619Sdes#include "toplev.h"
3591094Sdes#include "cselib.h"
3691094Sdes#include "splay-tree.h"
3791094Sdes#include "ggc.h"
3891094Sdes#include "langhooks.h"
3993982Sdes
4096364Sdes/* The alias sets assigned to MEMs assist the back-end in determining
4193982Sdes   which MEMs can alias which other MEMs.  In general, two MEMs in
4291094Sdes   different alias sets cannot alias each other, with one important
4391094Sdes   exception.  Consider something like:
4491094Sdes
4591094Sdes     struct S {int i; double d; };
4691094Sdes
4793982Sdes   a store to an `S' can alias something of either type `int' or type
4893982Sdes   `double'.  (However, a store to an `int' cannot alias a `double'
4993982Sdes   and vice versa.)  We indicate this via a tree structure that looks
5093982Sdes   like:
5191094Sdes           struct S
5291094Sdes            /   \
5391094Sdes	   /     \
5491094Sdes         |/_     _\|
5591094Sdes         int    double
5691094Sdes
5791094Sdes   (The arrows are directed and point downwards.)
5891094Sdes    In this situation we say the alias set for `struct S' is the
5993982Sdes   `superset' and that those for `int' and `double' are `subsets'.
6091094Sdes
6191094Sdes   To see whether two alias sets can point to the same memory, we must
6291094Sdes   see if either alias set is a subset of the other. We need not trace
6393982Sdes   past immediate descendents, however, since we propagate all
6493982Sdes   grandchildren up one level.
6593982Sdes
6693982Sdes   Alias set zero is implicitly a superset of all other alias sets.
6791094Sdes   However, this is no actual entry for alias set zero.  It is an
68107937Sdes   error to attempt to explicitly construct a subset of zero.  */
6991094Sdes
70107937Sdestypedef struct alias_set_entry
7193982Sdes{
7293982Sdes  /* The alias set number, as stored in MEM_ALIAS_SET.  */
7393982Sdes  HOST_WIDE_INT alias_set;
7493982Sdes
7593982Sdes  /* The children of the alias set.  These are not just the immediate
7693982Sdes     children, but, in fact, all descendents.  So, if we have:
7793982Sdes
7893982Sdes       struct T { struct S s; float f; }
7993982Sdes
8093982Sdes     continuing our example above, the children here will be all of
8193982Sdes     `int', `double', `float', and `struct S'.  */
8293982Sdes  splay_tree children;
8393982Sdes
8493982Sdes  /* Nonzero if would have a child of zero: this effectively makes this
8593982Sdes     alias set the same as alias set zero.  */
8693982Sdes  int has_zero_child;
8793982Sdes} *alias_set_entry;
8893982Sdes
89107937Sdesstatic int rtx_equal_for_memref_p	PARAMS ((rtx, rtx));
9093982Sdesstatic rtx find_symbolic_term		PARAMS ((rtx));
9191100Sdesrtx get_addr				PARAMS ((rtx));
9291100Sdesstatic int memrefs_conflict_p		PARAMS ((int, rtx, int, rtx,
9393982Sdes						 HOST_WIDE_INT));
9491100Sdesstatic void record_set			PARAMS ((rtx, rtx, void *));
95107937Sdesstatic rtx find_base_term		PARAMS ((rtx));
9691100Sdesstatic int base_alias_check		PARAMS ((rtx, rtx, enum machine_mode,
97107937Sdes						 enum machine_mode));
9891100Sdesstatic rtx find_base_value		PARAMS ((rtx));
9993982Sdesstatic int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
10093982Sdesstatic int insert_subset_children       PARAMS ((splay_tree_node, void*));
10193982Sdesstatic tree find_base_decl		PARAMS ((tree));
10293982Sdesstatic alias_set_entry get_alias_set_entry PARAMS ((HOST_WIDE_INT));
10393982Sdesstatic rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
10491100Sdes						      int (*) (rtx, int)));
10591100Sdesstatic int aliases_everything_p         PARAMS ((rtx));
10693982Sdesstatic bool nonoverlapping_component_refs_p PARAMS ((tree, tree));
10791094Sdesstatic tree decl_for_component_ref	PARAMS ((tree));
108107937Sdesstatic rtx adjust_offset_for_component_ref PARAMS ((tree, rtx));
10993982Sdesstatic int nonoverlapping_memrefs_p	PARAMS ((rtx, rtx));
11093982Sdesstatic int write_dependence_p           PARAMS ((rtx, rtx, int));
11193982Sdesstatic int nonlocal_mentioned_p         PARAMS ((rtx));
112115619Sdes
113107937Sdes/* Set up all info needed to perform alias analysis on memory references.  */
11493982Sdes
115115619Sdes/* Returns the size in bytes of the mode of X.  */
116115619Sdes#define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
117115619Sdes
11893982Sdes/* Returns nonzero if MEM1 and MEM2 do not alias because they are in
11993982Sdes   different alias sets.  We ignore alias sets in functions making use
120107937Sdes   of variable arguments because the va_arg macros on some systems are
12194706Sdes   not legal ANSI C.  */
122115619Sdes#define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2)			\
12393982Sdes  mems_in_disjoint_alias_sets_p (MEM1, MEM2)
124107937Sdes
125110556Sdes/* Cap the number of passes we make over the insns propagating alias
126110556Sdes   information through set chains.   10 is a completely arbitrary choice.  */
12791094Sdes#define MAX_ALIAS_LOOP_PASSES 10
12891100Sdes
12991100Sdes/* reg_base_value[N] gives an address to which register N is related.
13091100Sdes   If all sets after the first add or subtract to the current value
13191100Sdes   or otherwise modify it so it does not point to a different top level
13291100Sdes   object, reg_base_value[N] is equal to the address part of the source
13391100Sdes   of the first set.
13491100Sdes
13591100Sdes   A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF.  ADDRESS
13693982Sdes   expressions represent certain special values: function arguments and
13791100Sdes   the stack, frame, and argument pointers.
13893982Sdes
13993982Sdes   The contents of an ADDRESS is not normally used, the mode of the
14093982Sdes   ADDRESS determines whether the ADDRESS is a function argument or some
14193982Sdes   other special value.  Pointer equality, not rtx_equal_p, determines whether
14293982Sdes   two ADDRESS expressions refer to the same base address.
14393982Sdes
14493982Sdes   The only use of the contents of an ADDRESS is for determining if the
14593982Sdes   current function performs nonlocal memory memory references for the
14693982Sdes   purposes of marking the function as a constant function.  */
14795908Sdes
14893982Sdesstatic rtx *reg_base_value;
14993982Sdesstatic rtx *new_reg_base_value;
15095908Sdesstatic unsigned int reg_base_value_size; /* size of reg_base_value array */
15193982Sdes
15293982Sdes#define REG_BASE_VALUE(X) \
15393982Sdes  (REGNO (X) < reg_base_value_size \
15493982Sdes   ? reg_base_value[REGNO (X)] : 0)
15593982Sdes
15693982Sdes/* Vector of known invariant relationships between registers.  Set in
15793982Sdes   loop unrolling.  Indexed by register number, if nonzero the value
15893982Sdes   is an expression describing this register in terms of another.
15993982Sdes
16093982Sdes   The length of this array is REG_BASE_VALUE_SIZE.
16193982Sdes
16293982Sdes   Because this array contains only pseudo registers it has no effect
16393982Sdes   after reload.  */
16493982Sdesstatic rtx *alias_invariant;
16593982Sdes
16693982Sdes/* Vector indexed by N giving the initial (unchanging) value known for
167   pseudo-register N.  This array is initialized in
168   init_alias_analysis, and does not change until end_alias_analysis
169   is called.  */
170rtx *reg_known_value;
171
172/* Indicates number of valid entries in reg_known_value.  */
173static unsigned int reg_known_value_size;
174
175/* Vector recording for each reg_known_value whether it is due to a
176   REG_EQUIV note.  Future passes (viz., reload) may replace the
177   pseudo with the equivalent expression and so we account for the
178   dependences that would be introduced if that happens.
179
180   The REG_EQUIV notes created in assign_parms may mention the arg
181   pointer, and there are explicit insns in the RTL that modify the
182   arg pointer.  Thus we must ensure that such insns don't get
183   scheduled across each other because that would invalidate the
184   REG_EQUIV notes.  One could argue that the REG_EQUIV notes are
185   wrong, but solving the problem in the scheduler will likely give
186   better code, so we do it here.  */
187char *reg_known_equiv_p;
188
189/* True when scanning insns from the start of the rtl to the
190   NOTE_INSN_FUNCTION_BEG note.  */
191static int copying_arguments;
192
193/* The splay-tree used to store the various alias set entries.  */
194static splay_tree alias_sets;
195
196/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
197   such an entry, or NULL otherwise.  */
198
199static alias_set_entry
200get_alias_set_entry (alias_set)
201     HOST_WIDE_INT alias_set;
202{
203  splay_tree_node sn
204    = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
205
206  return sn != 0 ? ((alias_set_entry) sn->value) : 0;
207}
208
209/* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
210   the two MEMs cannot alias each other.  */
211
212static int
213mems_in_disjoint_alias_sets_p (mem1, mem2)
214     rtx mem1;
215     rtx mem2;
216{
217#ifdef ENABLE_CHECKING
218/* Perform a basic sanity check.  Namely, that there are no alias sets
219   if we're not using strict aliasing.  This helps to catch bugs
220   whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
221   where a MEM is allocated in some way other than by the use of
222   gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared.  If we begin to
223   use alias sets to indicate that spilled registers cannot alias each
224   other, we might need to remove this check.  */
225  if (! flag_strict_aliasing
226      && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
227    abort ();
228#endif
229
230  return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
231}
232
233/* Insert the NODE into the splay tree given by DATA.  Used by
234   record_alias_subset via splay_tree_foreach.  */
235
236static int
237insert_subset_children (node, data)
238     splay_tree_node node;
239     void *data;
240{
241  splay_tree_insert ((splay_tree) data, node->key, node->value);
242
243  return 0;
244}
245
246/* Return 1 if the two specified alias sets may conflict.  */
247
248int
249alias_sets_conflict_p (set1, set2)
250     HOST_WIDE_INT set1, set2;
251{
252  alias_set_entry ase;
253
254  /* If have no alias set information for one of the operands, we have
255     to assume it can alias anything.  */
256  if (set1 == 0 || set2 == 0
257      /* If the two alias sets are the same, they may alias.  */
258      || set1 == set2)
259    return 1;
260
261  /* See if the first alias set is a subset of the second.  */
262  ase = get_alias_set_entry (set1);
263  if (ase != 0
264      && (ase->has_zero_child
265	  || splay_tree_lookup (ase->children,
266				(splay_tree_key) set2)))
267    return 1;
268
269  /* Now do the same, but with the alias sets reversed.  */
270  ase = get_alias_set_entry (set2);
271  if (ase != 0
272      && (ase->has_zero_child
273	  || splay_tree_lookup (ase->children,
274				(splay_tree_key) set1)))
275    return 1;
276
277  /* The two alias sets are distinct and neither one is the
278     child of the other.  Therefore, they cannot alias.  */
279  return 0;
280}
281
282/* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
283   has any readonly fields.  If any of the fields have types that
284   contain readonly fields, return true as well.  */
285
286int
287readonly_fields_p (type)
288     tree type;
289{
290  tree field;
291
292  if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
293      && TREE_CODE (type) != QUAL_UNION_TYPE)
294    return 0;
295
296  for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
297    if (TREE_CODE (field) == FIELD_DECL
298	&& (TREE_READONLY (field)
299	    || readonly_fields_p (TREE_TYPE (field))))
300      return 1;
301
302  return 0;
303}
304
305/* Return 1 if any MEM object of type T1 will always conflict (using the
306   dependency routines in this file) with any MEM object of type T2.
307   This is used when allocating temporary storage.  If T1 and/or T2 are
308   NULL_TREE, it means we know nothing about the storage.  */
309
310int
311objects_must_conflict_p (t1, t2)
312     tree t1, t2;
313{
314  /* If neither has a type specified, we don't know if they'll conflict
315     because we may be using them to store objects of various types, for
316     example the argument and local variables areas of inlined functions.  */
317  if (t1 == 0 && t2 == 0)
318    return 0;
319
320  /* If one or the other has readonly fields or is readonly,
321     then they may not conflict.  */
322  if ((t1 != 0 && readonly_fields_p (t1))
323      || (t2 != 0 && readonly_fields_p (t2))
324      || (t1 != 0 && TYPE_READONLY (t1))
325      || (t2 != 0 && TYPE_READONLY (t2)))
326    return 0;
327
328  /* If they are the same type, they must conflict.  */
329  if (t1 == t2
330      /* Likewise if both are volatile.  */
331      || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
332    return 1;
333
334  /* If one is aggregate and the other is scalar then they may not
335     conflict.  */
336  if ((t1 != 0 && AGGREGATE_TYPE_P (t1))
337      != (t2 != 0 && AGGREGATE_TYPE_P (t2)))
338    return 0;
339
340  /* Otherwise they conflict only if the alias sets conflict.  */
341  return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
342				t2 ? get_alias_set (t2) : 0);
343}
344
345/* T is an expression with pointer type.  Find the DECL on which this
346   expression is based.  (For example, in `a[i]' this would be `a'.)
347   If there is no such DECL, or a unique decl cannot be determined,
348   NULL_TREE is returned.  */
349
350static tree
351find_base_decl (t)
352     tree t;
353{
354  tree d0, d1, d2;
355
356  if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
357    return 0;
358
359  /* If this is a declaration, return it.  */
360  if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
361    return t;
362
363  /* Handle general expressions.  It would be nice to deal with
364     COMPONENT_REFs here.  If we could tell that `a' and `b' were the
365     same, then `a->f' and `b->f' are also the same.  */
366  switch (TREE_CODE_CLASS (TREE_CODE (t)))
367    {
368    case '1':
369      return find_base_decl (TREE_OPERAND (t, 0));
370
371    case '2':
372      /* Return 0 if found in neither or both are the same.  */
373      d0 = find_base_decl (TREE_OPERAND (t, 0));
374      d1 = find_base_decl (TREE_OPERAND (t, 1));
375      if (d0 == d1)
376	return d0;
377      else if (d0 == 0)
378	return d1;
379      else if (d1 == 0)
380	return d0;
381      else
382	return 0;
383
384    case '3':
385      d0 = find_base_decl (TREE_OPERAND (t, 0));
386      d1 = find_base_decl (TREE_OPERAND (t, 1));
387      d2 = find_base_decl (TREE_OPERAND (t, 2));
388
389      /* Set any nonzero values from the last, then from the first.  */
390      if (d1 == 0) d1 = d2;
391      if (d0 == 0) d0 = d1;
392      if (d1 == 0) d1 = d0;
393      if (d2 == 0) d2 = d1;
394
395      /* At this point all are nonzero or all are zero.  If all three are the
396	 same, return it.  Otherwise, return zero.  */
397      return (d0 == d1 && d1 == d2) ? d0 : 0;
398
399    default:
400      return 0;
401    }
402}
403
404/* Return 1 if all the nested component references handled by
405   get_inner_reference in T are such that we can address the object in T.  */
406
407int
408can_address_p (t)
409     tree t;
410{
411  /* If we're at the end, it is vacuously addressable.  */
412  if (! handled_component_p (t))
413    return 1;
414
415  /* Bitfields are never addressable.  */
416  else if (TREE_CODE (t) == BIT_FIELD_REF)
417    return 0;
418
419  /* Fields are addressable unless they are marked as nonaddressable or
420     the containing type has alias set 0.  */
421  else if (TREE_CODE (t) == COMPONENT_REF
422	   && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
423	   && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
424	   && can_address_p (TREE_OPERAND (t, 0)))
425    return 1;
426
427  /* Likewise for arrays.  */
428  else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
429	   && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
430	   && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
431	   && can_address_p (TREE_OPERAND (t, 0)))
432    return 1;
433
434  return 0;
435}
436
437/* Return the alias set for T, which may be either a type or an
438   expression.  Call language-specific routine for help, if needed.  */
439
440HOST_WIDE_INT
441get_alias_set (t)
442     tree t;
443{
444  HOST_WIDE_INT set;
445
446  /* If we're not doing any alias analysis, just assume everything
447     aliases everything else.  Also return 0 if this or its type is
448     an error.  */
449  if (! flag_strict_aliasing || t == error_mark_node
450      || (! TYPE_P (t)
451	  && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
452    return 0;
453
454  /* We can be passed either an expression or a type.  This and the
455     language-specific routine may make mutually-recursive calls to each other
456     to figure out what to do.  At each juncture, we see if this is a tree
457     that the language may need to handle specially.  First handle things that
458     aren't types.  */
459  if (! TYPE_P (t))
460    {
461      tree inner = t;
462      tree placeholder_ptr = 0;
463
464      /* Remove any nops, then give the language a chance to do
465	 something with this tree before we look at it.  */
466      STRIP_NOPS (t);
467      set = (*lang_hooks.get_alias_set) (t);
468      if (set != -1)
469	return set;
470
471      /* First see if the actual object referenced is an INDIRECT_REF from a
472	 restrict-qualified pointer or a "void *".  Replace
473	 PLACEHOLDER_EXPRs.  */
474      while (TREE_CODE (inner) == PLACEHOLDER_EXPR
475	     || handled_component_p (inner))
476	{
477	  if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
478	    inner = find_placeholder (inner, &placeholder_ptr);
479	  else
480	    inner = TREE_OPERAND (inner, 0);
481
482	  STRIP_NOPS (inner);
483	}
484
485      /* Check for accesses through restrict-qualified pointers.  */
486      if (TREE_CODE (inner) == INDIRECT_REF)
487	{
488	  tree decl = find_base_decl (TREE_OPERAND (inner, 0));
489
490	  if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
491	    {
492	      /* If we haven't computed the actual alias set, do it now.  */
493	      if (DECL_POINTER_ALIAS_SET (decl) == -2)
494		{
495		  /* No two restricted pointers can point at the same thing.
496		     However, a restricted pointer can point at the same thing
497		     as an unrestricted pointer, if that unrestricted pointer
498		     is based on the restricted pointer.  So, we make the
499		     alias set for the restricted pointer a subset of the
500		     alias set for the type pointed to by the type of the
501		     decl.  */
502		  HOST_WIDE_INT pointed_to_alias_set
503		    = get_alias_set (TREE_TYPE (TREE_TYPE (decl)));
504
505		  if (pointed_to_alias_set == 0)
506		    /* It's not legal to make a subset of alias set zero.  */
507		    ;
508		  else
509		    {
510		      DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
511		      record_alias_subset  (pointed_to_alias_set,
512					    DECL_POINTER_ALIAS_SET (decl));
513		    }
514		}
515
516	      /* We use the alias set indicated in the declaration.  */
517	      return DECL_POINTER_ALIAS_SET (decl);
518	    }
519
520	  /* If we have an INDIRECT_REF via a void pointer, we don't
521	     know anything about what that might alias.  */
522	  else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE)
523	    return 0;
524	}
525
526      /* Otherwise, pick up the outermost object that we could have a pointer
527	 to, processing conversion and PLACEHOLDER_EXPR as above.  */
528      placeholder_ptr = 0;
529      while (TREE_CODE (t) == PLACEHOLDER_EXPR
530	     || (handled_component_p (t) && ! can_address_p (t)))
531	{
532	  if (TREE_CODE (t) == PLACEHOLDER_EXPR)
533	    t = find_placeholder (t, &placeholder_ptr);
534	  else
535	    t = TREE_OPERAND (t, 0);
536
537	  STRIP_NOPS (t);
538	}
539
540      /* If we've already determined the alias set for a decl, just return
541	 it.  This is necessary for C++ anonymous unions, whose component
542	 variables don't look like union members (boo!).  */
543      if (TREE_CODE (t) == VAR_DECL
544	  && DECL_RTL_SET_P (t) && GET_CODE (DECL_RTL (t)) == MEM)
545	return MEM_ALIAS_SET (DECL_RTL (t));
546
547      /* Now all we care about is the type.  */
548      t = TREE_TYPE (t);
549    }
550
551  /* Variant qualifiers don't affect the alias set, so get the main
552     variant. If this is a type with a known alias set, return it.  */
553  t = TYPE_MAIN_VARIANT (t);
554  if (TYPE_ALIAS_SET_KNOWN_P (t))
555    return TYPE_ALIAS_SET (t);
556
557  /* See if the language has special handling for this type.  */
558  set = (*lang_hooks.get_alias_set) (t);
559  if (set != -1)
560    return set;
561
562  /* There are no objects of FUNCTION_TYPE, so there's no point in
563     using up an alias set for them.  (There are, of course, pointers
564     and references to functions, but that's different.)  */
565  else if (TREE_CODE (t) == FUNCTION_TYPE)
566    set = 0;
567  else
568    /* Otherwise make a new alias set for this type.  */
569    set = new_alias_set ();
570
571  TYPE_ALIAS_SET (t) = set;
572
573  /* If this is an aggregate type, we must record any component aliasing
574     information.  */
575  if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
576    record_component_aliases (t);
577
578  return set;
579}
580
581/* Return a brand-new alias set.  */
582
583HOST_WIDE_INT
584new_alias_set ()
585{
586  static HOST_WIDE_INT last_alias_set;
587
588  if (flag_strict_aliasing)
589    return ++last_alias_set;
590  else
591    return 0;
592}
593
594/* Indicate that things in SUBSET can alias things in SUPERSET, but
595   not vice versa.  For example, in C, a store to an `int' can alias a
596   structure containing an `int', but not vice versa.  Here, the
597   structure would be the SUPERSET and `int' the SUBSET.  This
598   function should be called only once per SUPERSET/SUBSET pair.
599
600   It is illegal for SUPERSET to be zero; everything is implicitly a
601   subset of alias set zero.  */
602
603void
604record_alias_subset (superset, subset)
605     HOST_WIDE_INT superset;
606     HOST_WIDE_INT subset;
607{
608  alias_set_entry superset_entry;
609  alias_set_entry subset_entry;
610
611  /* It is possible in complex type situations for both sets to be the same,
612     in which case we can ignore this operation.  */
613  if (superset == subset)
614    return;
615
616  if (superset == 0)
617    abort ();
618
619  superset_entry = get_alias_set_entry (superset);
620  if (superset_entry == 0)
621    {
622      /* Create an entry for the SUPERSET, so that we have a place to
623	 attach the SUBSET.  */
624      superset_entry
625	= (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
626      superset_entry->alias_set = superset;
627      superset_entry->children
628	= splay_tree_new (splay_tree_compare_ints, 0, 0);
629      superset_entry->has_zero_child = 0;
630      splay_tree_insert (alias_sets, (splay_tree_key) superset,
631			 (splay_tree_value) superset_entry);
632    }
633
634  if (subset == 0)
635    superset_entry->has_zero_child = 1;
636  else
637    {
638      subset_entry = get_alias_set_entry (subset);
639      /* If there is an entry for the subset, enter all of its children
640	 (if they are not already present) as children of the SUPERSET.  */
641      if (subset_entry)
642	{
643	  if (subset_entry->has_zero_child)
644	    superset_entry->has_zero_child = 1;
645
646	  splay_tree_foreach (subset_entry->children, insert_subset_children,
647			      superset_entry->children);
648	}
649
650      /* Enter the SUBSET itself as a child of the SUPERSET.  */
651      splay_tree_insert (superset_entry->children,
652			 (splay_tree_key) subset, 0);
653    }
654}
655
656/* Record that component types of TYPE, if any, are part of that type for
657   aliasing purposes.  For record types, we only record component types
658   for fields that are marked addressable.  For array types, we always
659   record the component types, so the front end should not call this
660   function if the individual component aren't addressable.  */
661
662void
663record_component_aliases (type)
664     tree type;
665{
666  HOST_WIDE_INT superset = get_alias_set (type);
667  tree field;
668
669  if (superset == 0)
670    return;
671
672  switch (TREE_CODE (type))
673    {
674    case ARRAY_TYPE:
675      if (! TYPE_NONALIASED_COMPONENT (type))
676	record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
677      break;
678
679    case RECORD_TYPE:
680    case UNION_TYPE:
681    case QUAL_UNION_TYPE:
682      for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
683	if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
684	  record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
685      break;
686
687    case COMPLEX_TYPE:
688      record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
689      break;
690
691    default:
692      break;
693    }
694}
695
696/* Allocate an alias set for use in storing and reading from the varargs
697   spill area.  */
698
699HOST_WIDE_INT
700get_varargs_alias_set ()
701{
702  static HOST_WIDE_INT set = -1;
703
704  if (set == -1)
705    set = new_alias_set ();
706
707  return set;
708}
709
710/* Likewise, but used for the fixed portions of the frame, e.g., register
711   save areas.  */
712
713HOST_WIDE_INT
714get_frame_alias_set ()
715{
716  static HOST_WIDE_INT set = -1;
717
718  if (set == -1)
719    set = new_alias_set ();
720
721  return set;
722}
723
724/* Inside SRC, the source of a SET, find a base address.  */
725
726static rtx
727find_base_value (src)
728     rtx src;
729{
730  unsigned int regno;
731
732  switch (GET_CODE (src))
733    {
734    case SYMBOL_REF:
735    case LABEL_REF:
736      return src;
737
738    case REG:
739      regno = REGNO (src);
740      /* At the start of a function, argument registers have known base
741	 values which may be lost later.  Returning an ADDRESS
742	 expression here allows optimization based on argument values
743	 even when the argument registers are used for other purposes.  */
744      if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
745	return new_reg_base_value[regno];
746
747      /* If a pseudo has a known base value, return it.  Do not do this
748	 for non-fixed hard regs since it can result in a circular
749	 dependency chain for registers which have values at function entry.
750
751	 The test above is not sufficient because the scheduler may move
752	 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN.  */
753      if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
754	  && regno < reg_base_value_size
755	  && reg_base_value[regno])
756	return reg_base_value[regno];
757
758      return src;
759
760    case MEM:
761      /* Check for an argument passed in memory.  Only record in the
762	 copying-arguments block; it is too hard to track changes
763	 otherwise.  */
764      if (copying_arguments
765	  && (XEXP (src, 0) == arg_pointer_rtx
766	      || (GET_CODE (XEXP (src, 0)) == PLUS
767		  && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
768	return gen_rtx_ADDRESS (VOIDmode, src);
769      return 0;
770
771    case CONST:
772      src = XEXP (src, 0);
773      if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
774	break;
775
776      /* ... fall through ...  */
777
778    case PLUS:
779    case MINUS:
780      {
781	rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
782
783	/* If either operand is a REG that is a known pointer, then it
784	   is the base.  */
785	if (REG_P (src_0) && REG_POINTER (src_0))
786	  return find_base_value (src_0);
787	if (REG_P (src_1) && REG_POINTER (src_1))
788	  return find_base_value (src_1);
789
790	/* If either operand is a REG, then see if we already have
791	   a known value for it.  */
792	if (REG_P (src_0))
793	  {
794	    temp = find_base_value (src_0);
795	    if (temp != 0)
796	      src_0 = temp;
797	  }
798
799	if (REG_P (src_1))
800	  {
801	    temp = find_base_value (src_1);
802	    if (temp!= 0)
803	      src_1 = temp;
804	  }
805
806	/* If either base is named object or a special address
807	   (like an argument or stack reference), then use it for the
808	   base term.  */
809	if (src_0 != 0
810	    && (GET_CODE (src_0) == SYMBOL_REF
811		|| GET_CODE (src_0) == LABEL_REF
812		|| (GET_CODE (src_0) == ADDRESS
813		    && GET_MODE (src_0) != VOIDmode)))
814	  return src_0;
815
816	if (src_1 != 0
817	    && (GET_CODE (src_1) == SYMBOL_REF
818		|| GET_CODE (src_1) == LABEL_REF
819		|| (GET_CODE (src_1) == ADDRESS
820		    && GET_MODE (src_1) != VOIDmode)))
821	  return src_1;
822
823	/* Guess which operand is the base address:
824	   If either operand is a symbol, then it is the base.  If
825	   either operand is a CONST_INT, then the other is the base.  */
826	if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
827	  return find_base_value (src_0);
828	else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
829	  return find_base_value (src_1);
830
831	return 0;
832      }
833
834    case LO_SUM:
835      /* The standard form is (lo_sum reg sym) so look only at the
836	 second operand.  */
837      return find_base_value (XEXP (src, 1));
838
839    case AND:
840      /* If the second operand is constant set the base
841	 address to the first operand.  */
842      if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
843	return find_base_value (XEXP (src, 0));
844      return 0;
845
846    case TRUNCATE:
847      if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
848	break;
849      /* Fall through.  */
850    case HIGH:
851    case PRE_INC:
852    case PRE_DEC:
853    case POST_INC:
854    case POST_DEC:
855    case PRE_MODIFY:
856    case POST_MODIFY:
857      return find_base_value (XEXP (src, 0));
858
859    case ZERO_EXTEND:
860    case SIGN_EXTEND:	/* used for NT/Alpha pointers */
861      {
862	rtx temp = find_base_value (XEXP (src, 0));
863
864#ifdef POINTERS_EXTEND_UNSIGNED
865	if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
866	  temp = convert_memory_address (Pmode, temp);
867#endif
868
869	return temp;
870      }
871
872    default:
873      break;
874    }
875
876  return 0;
877}
878
879/* Called from init_alias_analysis indirectly through note_stores.  */
880
881/* While scanning insns to find base values, reg_seen[N] is nonzero if
882   register N has been set in this function.  */
883static char *reg_seen;
884
885/* Addresses which are known not to alias anything else are identified
886   by a unique integer.  */
887static int unique_id;
888
889static void
890record_set (dest, set, data)
891     rtx dest, set;
892     void *data ATTRIBUTE_UNUSED;
893{
894  unsigned regno;
895  rtx src;
896
897  if (GET_CODE (dest) != REG)
898    return;
899
900  regno = REGNO (dest);
901
902  if (regno >= reg_base_value_size)
903    abort ();
904
905  if (set)
906    {
907      /* A CLOBBER wipes out any old value but does not prevent a previously
908	 unset register from acquiring a base address (i.e. reg_seen is not
909	 set).  */
910      if (GET_CODE (set) == CLOBBER)
911	{
912	  new_reg_base_value[regno] = 0;
913	  return;
914	}
915      src = SET_SRC (set);
916    }
917  else
918    {
919      if (reg_seen[regno])
920	{
921	  new_reg_base_value[regno] = 0;
922	  return;
923	}
924      reg_seen[regno] = 1;
925      new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
926						   GEN_INT (unique_id++));
927      return;
928    }
929
930  /* This is not the first set.  If the new value is not related to the
931     old value, forget the base value. Note that the following code is
932     not detected:
933     extern int x, y;  int *p = &x; p += (&y-&x);
934     ANSI C does not allow computing the difference of addresses
935     of distinct top level objects.  */
936  if (new_reg_base_value[regno])
937    switch (GET_CODE (src))
938      {
939      case LO_SUM:
940      case MINUS:
941	if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
942	  new_reg_base_value[regno] = 0;
943	break;
944      case PLUS:
945	/* If the value we add in the PLUS is also a valid base value,
946	   this might be the actual base value, and the original value
947	   an index.  */
948	{
949	  rtx other = NULL_RTX;
950
951	  if (XEXP (src, 0) == dest)
952	    other = XEXP (src, 1);
953	  else if (XEXP (src, 1) == dest)
954	    other = XEXP (src, 0);
955
956	  if (! other || find_base_value (other))
957	    new_reg_base_value[regno] = 0;
958	  break;
959	}
960      case AND:
961	if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
962	  new_reg_base_value[regno] = 0;
963	break;
964      default:
965	new_reg_base_value[regno] = 0;
966	break;
967      }
968  /* If this is the first set of a register, record the value.  */
969  else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
970	   && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
971    new_reg_base_value[regno] = find_base_value (src);
972
973  reg_seen[regno] = 1;
974}
975
976/* Called from loop optimization when a new pseudo-register is
977   created.  It indicates that REGNO is being set to VAL.  f INVARIANT
978   is true then this value also describes an invariant relationship
979   which can be used to deduce that two registers with unknown values
980   are different.  */
981
982void
983record_base_value (regno, val, invariant)
984     unsigned int regno;
985     rtx val;
986     int invariant;
987{
988  if (regno >= reg_base_value_size)
989    return;
990
991  if (invariant && alias_invariant)
992    alias_invariant[regno] = val;
993
994  if (GET_CODE (val) == REG)
995    {
996      if (REGNO (val) < reg_base_value_size)
997	reg_base_value[regno] = reg_base_value[REGNO (val)];
998
999      return;
1000    }
1001
1002  reg_base_value[regno] = find_base_value (val);
1003}
1004
1005/* Clear alias info for a register.  This is used if an RTL transformation
1006   changes the value of a register.  This is used in flow by AUTO_INC_DEC
1007   optimizations.  We don't need to clear reg_base_value, since flow only
1008   changes the offset.  */
1009
1010void
1011clear_reg_alias_info (reg)
1012     rtx reg;
1013{
1014  unsigned int regno = REGNO (reg);
1015
1016  if (regno < reg_known_value_size && regno >= FIRST_PSEUDO_REGISTER)
1017    reg_known_value[regno] = reg;
1018}
1019
1020/* Returns a canonical version of X, from the point of view alias
1021   analysis.  (For example, if X is a MEM whose address is a register,
1022   and the register has a known value (say a SYMBOL_REF), then a MEM
1023   whose address is the SYMBOL_REF is returned.)  */
1024
1025rtx
1026canon_rtx (x)
1027     rtx x;
1028{
1029  /* Recursively look for equivalences.  */
1030  if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
1031      && REGNO (x) < reg_known_value_size)
1032    return reg_known_value[REGNO (x)] == x
1033      ? x : canon_rtx (reg_known_value[REGNO (x)]);
1034  else if (GET_CODE (x) == PLUS)
1035    {
1036      rtx x0 = canon_rtx (XEXP (x, 0));
1037      rtx x1 = canon_rtx (XEXP (x, 1));
1038
1039      if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1040	{
1041	  if (GET_CODE (x0) == CONST_INT)
1042	    return plus_constant (x1, INTVAL (x0));
1043	  else if (GET_CODE (x1) == CONST_INT)
1044	    return plus_constant (x0, INTVAL (x1));
1045	  return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1046	}
1047    }
1048
1049  /* This gives us much better alias analysis when called from
1050     the loop optimizer.   Note we want to leave the original
1051     MEM alone, but need to return the canonicalized MEM with
1052     all the flags with their original values.  */
1053  else if (GET_CODE (x) == MEM)
1054    x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1055
1056  return x;
1057}
1058
1059/* Return 1 if X and Y are identical-looking rtx's.
1060
1061   We use the data in reg_known_value above to see if two registers with
1062   different numbers are, in fact, equivalent.  */
1063
1064static int
1065rtx_equal_for_memref_p (x, y)
1066     rtx x, y;
1067{
1068  int i;
1069  int j;
1070  enum rtx_code code;
1071  const char *fmt;
1072
1073  if (x == 0 && y == 0)
1074    return 1;
1075  if (x == 0 || y == 0)
1076    return 0;
1077
1078  x = canon_rtx (x);
1079  y = canon_rtx (y);
1080
1081  if (x == y)
1082    return 1;
1083
1084  code = GET_CODE (x);
1085  /* Rtx's of different codes cannot be equal.  */
1086  if (code != GET_CODE (y))
1087    return 0;
1088
1089  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1090     (REG:SI x) and (REG:HI x) are NOT equivalent.  */
1091
1092  if (GET_MODE (x) != GET_MODE (y))
1093    return 0;
1094
1095  /* Some RTL can be compared without a recursive examination.  */
1096  switch (code)
1097    {
1098    case VALUE:
1099      return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
1100
1101    case REG:
1102      return REGNO (x) == REGNO (y);
1103
1104    case LABEL_REF:
1105      return XEXP (x, 0) == XEXP (y, 0);
1106
1107    case SYMBOL_REF:
1108      return XSTR (x, 0) == XSTR (y, 0);
1109
1110    case CONST_INT:
1111    case CONST_DOUBLE:
1112      /* There's no need to compare the contents of CONST_DOUBLEs or
1113	 CONST_INTs because pointer equality is a good enough
1114	 comparison for these nodes.  */
1115      return 0;
1116
1117    case ADDRESSOF:
1118      return (XINT (x, 1) == XINT (y, 1)
1119	      && rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)));
1120
1121    default:
1122      break;
1123    }
1124
1125  /* For commutative operations, the RTX match if the operand match in any
1126     order.  Also handle the simple binary and unary cases without a loop.  */
1127  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
1128    return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1129	     && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1130	    || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1131		&& rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1132  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
1133    return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1134	    && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
1135  else if (GET_RTX_CLASS (code) == '1')
1136    return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
1137
1138  /* Compare the elements.  If any pair of corresponding elements
1139     fail to match, return 0 for the whole things.
1140
1141     Limit cases to types which actually appear in addresses.  */
1142
1143  fmt = GET_RTX_FORMAT (code);
1144  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1145    {
1146      switch (fmt[i])
1147	{
1148	case 'i':
1149	  if (XINT (x, i) != XINT (y, i))
1150	    return 0;
1151	  break;
1152
1153	case 'E':
1154	  /* Two vectors must have the same length.  */
1155	  if (XVECLEN (x, i) != XVECLEN (y, i))
1156	    return 0;
1157
1158	  /* And the corresponding elements must match.  */
1159	  for (j = 0; j < XVECLEN (x, i); j++)
1160	    if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
1161					XVECEXP (y, i, j)) == 0)
1162	      return 0;
1163	  break;
1164
1165	case 'e':
1166	  if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
1167	    return 0;
1168	  break;
1169
1170	  /* This can happen for asm operands.  */
1171	case 's':
1172	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1173	    return 0;
1174	  break;
1175
1176	/* This can happen for an asm which clobbers memory.  */
1177	case '0':
1178	  break;
1179
1180	  /* It is believed that rtx's at this level will never
1181	     contain anything but integers and other rtx's,
1182	     except for within LABEL_REFs and SYMBOL_REFs.  */
1183	default:
1184	  abort ();
1185	}
1186    }
1187  return 1;
1188}
1189
1190/* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1191   X and return it, or return 0 if none found.  */
1192
1193static rtx
1194find_symbolic_term (x)
1195     rtx x;
1196{
1197  int i;
1198  enum rtx_code code;
1199  const char *fmt;
1200
1201  code = GET_CODE (x);
1202  if (code == SYMBOL_REF || code == LABEL_REF)
1203    return x;
1204  if (GET_RTX_CLASS (code) == 'o')
1205    return 0;
1206
1207  fmt = GET_RTX_FORMAT (code);
1208  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1209    {
1210      rtx t;
1211
1212      if (fmt[i] == 'e')
1213	{
1214	  t = find_symbolic_term (XEXP (x, i));
1215	  if (t != 0)
1216	    return t;
1217	}
1218      else if (fmt[i] == 'E')
1219	break;
1220    }
1221  return 0;
1222}
1223
1224static rtx
1225find_base_term (x)
1226     rtx x;
1227{
1228  cselib_val *val;
1229  struct elt_loc_list *l;
1230
1231#if defined (FIND_BASE_TERM)
1232  /* Try machine-dependent ways to find the base term.  */
1233  x = FIND_BASE_TERM (x);
1234#endif
1235
1236  switch (GET_CODE (x))
1237    {
1238    case REG:
1239      return REG_BASE_VALUE (x);
1240
1241    case TRUNCATE:
1242      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1243        return 0;
1244      /* Fall through.  */
1245    case HIGH:
1246    case PRE_INC:
1247    case PRE_DEC:
1248    case POST_INC:
1249    case POST_DEC:
1250    case PRE_MODIFY:
1251    case POST_MODIFY:
1252      return find_base_term (XEXP (x, 0));
1253
1254    case ZERO_EXTEND:
1255    case SIGN_EXTEND:	/* Used for Alpha/NT pointers */
1256      {
1257	rtx temp = find_base_term (XEXP (x, 0));
1258
1259#ifdef POINTERS_EXTEND_UNSIGNED
1260	if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
1261	  temp = convert_memory_address (Pmode, temp);
1262#endif
1263
1264	return temp;
1265      }
1266
1267    case VALUE:
1268      val = CSELIB_VAL_PTR (x);
1269      for (l = val->locs; l; l = l->next)
1270	if ((x = find_base_term (l->loc)) != 0)
1271	  return x;
1272      return 0;
1273
1274    case CONST:
1275      x = XEXP (x, 0);
1276      if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1277	return 0;
1278      /* fall through */
1279    case LO_SUM:
1280    case PLUS:
1281    case MINUS:
1282      {
1283	rtx tmp1 = XEXP (x, 0);
1284	rtx tmp2 = XEXP (x, 1);
1285
1286	/* This is a little bit tricky since we have to determine which of
1287	   the two operands represents the real base address.  Otherwise this
1288	   routine may return the index register instead of the base register.
1289
1290	   That may cause us to believe no aliasing was possible, when in
1291	   fact aliasing is possible.
1292
1293	   We use a few simple tests to guess the base register.  Additional
1294	   tests can certainly be added.  For example, if one of the operands
1295	   is a shift or multiply, then it must be the index register and the
1296	   other operand is the base register.  */
1297
1298	if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1299	  return find_base_term (tmp2);
1300
1301	/* If either operand is known to be a pointer, then use it
1302	   to determine the base term.  */
1303	if (REG_P (tmp1) && REG_POINTER (tmp1))
1304	  return find_base_term (tmp1);
1305
1306	if (REG_P (tmp2) && REG_POINTER (tmp2))
1307	  return find_base_term (tmp2);
1308
1309	/* Neither operand was known to be a pointer.  Go ahead and find the
1310	   base term for both operands.  */
1311	tmp1 = find_base_term (tmp1);
1312	tmp2 = find_base_term (tmp2);
1313
1314	/* If either base term is named object or a special address
1315	   (like an argument or stack reference), then use it for the
1316	   base term.  */
1317	if (tmp1 != 0
1318	    && (GET_CODE (tmp1) == SYMBOL_REF
1319		|| GET_CODE (tmp1) == LABEL_REF
1320		|| (GET_CODE (tmp1) == ADDRESS
1321		    && GET_MODE (tmp1) != VOIDmode)))
1322	  return tmp1;
1323
1324	if (tmp2 != 0
1325	    && (GET_CODE (tmp2) == SYMBOL_REF
1326		|| GET_CODE (tmp2) == LABEL_REF
1327		|| (GET_CODE (tmp2) == ADDRESS
1328		    && GET_MODE (tmp2) != VOIDmode)))
1329	  return tmp2;
1330
1331	/* We could not determine which of the two operands was the
1332	   base register and which was the index.  So we can determine
1333	   nothing from the base alias check.  */
1334	return 0;
1335      }
1336
1337    case AND:
1338      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1339	return find_base_term (XEXP (x, 0));
1340      return 0;
1341
1342    case SYMBOL_REF:
1343    case LABEL_REF:
1344      return x;
1345
1346    case ADDRESSOF:
1347      return REG_BASE_VALUE (frame_pointer_rtx);
1348
1349    default:
1350      return 0;
1351    }
1352}
1353
1354/* Return 0 if the addresses X and Y are known to point to different
1355   objects, 1 if they might be pointers to the same object.  */
1356
1357static int
1358base_alias_check (x, y, x_mode, y_mode)
1359     rtx x, y;
1360     enum machine_mode x_mode, y_mode;
1361{
1362  rtx x_base = find_base_term (x);
1363  rtx y_base = find_base_term (y);
1364
1365  /* If the address itself has no known base see if a known equivalent
1366     value has one.  If either address still has no known base, nothing
1367     is known about aliasing.  */
1368  if (x_base == 0)
1369    {
1370      rtx x_c;
1371
1372      if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1373	return 1;
1374
1375      x_base = find_base_term (x_c);
1376      if (x_base == 0)
1377	return 1;
1378    }
1379
1380  if (y_base == 0)
1381    {
1382      rtx y_c;
1383      if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1384	return 1;
1385
1386      y_base = find_base_term (y_c);
1387      if (y_base == 0)
1388	return 1;
1389    }
1390
1391  /* If the base addresses are equal nothing is known about aliasing.  */
1392  if (rtx_equal_p (x_base, y_base))
1393    return 1;
1394
1395  /* The base addresses of the read and write are different expressions.
1396     If they are both symbols and they are not accessed via AND, there is
1397     no conflict.  We can bring knowledge of object alignment into play
1398     here.  For example, on alpha, "char a, b;" can alias one another,
1399     though "char a; long b;" cannot.  */
1400  if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1401    {
1402      if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1403	return 1;
1404      if (GET_CODE (x) == AND
1405	  && (GET_CODE (XEXP (x, 1)) != CONST_INT
1406	      || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1407	return 1;
1408      if (GET_CODE (y) == AND
1409	  && (GET_CODE (XEXP (y, 1)) != CONST_INT
1410	      || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1411	return 1;
1412      /* Differing symbols never alias.  */
1413      return 0;
1414    }
1415
1416  /* If one address is a stack reference there can be no alias:
1417     stack references using different base registers do not alias,
1418     a stack reference can not alias a parameter, and a stack reference
1419     can not alias a global.  */
1420  if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1421      || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1422    return 0;
1423
1424  if (! flag_argument_noalias)
1425    return 1;
1426
1427  if (flag_argument_noalias > 1)
1428    return 0;
1429
1430  /* Weak noalias assertion (arguments are distinct, but may match globals).  */
1431  return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1432}
1433
1434/* Convert the address X into something we can use.  This is done by returning
1435   it unchanged unless it is a value; in the latter case we call cselib to get
1436   a more useful rtx.  */
1437
1438rtx
1439get_addr (x)
1440     rtx x;
1441{
1442  cselib_val *v;
1443  struct elt_loc_list *l;
1444
1445  if (GET_CODE (x) != VALUE)
1446    return x;
1447  v = CSELIB_VAL_PTR (x);
1448  for (l = v->locs; l; l = l->next)
1449    if (CONSTANT_P (l->loc))
1450      return l->loc;
1451  for (l = v->locs; l; l = l->next)
1452    if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
1453      return l->loc;
1454  if (v->locs)
1455    return v->locs->loc;
1456  return x;
1457}
1458
1459/*  Return the address of the (N_REFS + 1)th memory reference to ADDR
1460    where SIZE is the size in bytes of the memory reference.  If ADDR
1461    is not modified by the memory reference then ADDR is returned.  */
1462
1463rtx
1464addr_side_effect_eval (addr, size, n_refs)
1465     rtx addr;
1466     int size;
1467     int n_refs;
1468{
1469  int offset = 0;
1470
1471  switch (GET_CODE (addr))
1472    {
1473    case PRE_INC:
1474      offset = (n_refs + 1) * size;
1475      break;
1476    case PRE_DEC:
1477      offset = -(n_refs + 1) * size;
1478      break;
1479    case POST_INC:
1480      offset = n_refs * size;
1481      break;
1482    case POST_DEC:
1483      offset = -n_refs * size;
1484      break;
1485
1486    default:
1487      return addr;
1488    }
1489
1490  if (offset)
1491    addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
1492  else
1493    addr = XEXP (addr, 0);
1494
1495  return addr;
1496}
1497
1498/* Return nonzero if X and Y (memory addresses) could reference the
1499   same location in memory.  C is an offset accumulator.  When
1500   C is nonzero, we are testing aliases between X and Y + C.
1501   XSIZE is the size in bytes of the X reference,
1502   similarly YSIZE is the size in bytes for Y.
1503
1504   If XSIZE or YSIZE is zero, we do not know the amount of memory being
1505   referenced (the reference was BLKmode), so make the most pessimistic
1506   assumptions.
1507
1508   If XSIZE or YSIZE is negative, we may access memory outside the object
1509   being referenced as a side effect.  This can happen when using AND to
1510   align memory references, as is done on the Alpha.
1511
1512   Nice to notice that varying addresses cannot conflict with fp if no
1513   local variables had their addresses taken, but that's too hard now.  */
1514
1515static int
1516memrefs_conflict_p (xsize, x, ysize, y, c)
1517     rtx x, y;
1518     int xsize, ysize;
1519     HOST_WIDE_INT c;
1520{
1521  if (GET_CODE (x) == VALUE)
1522    x = get_addr (x);
1523  if (GET_CODE (y) == VALUE)
1524    y = get_addr (y);
1525  if (GET_CODE (x) == HIGH)
1526    x = XEXP (x, 0);
1527  else if (GET_CODE (x) == LO_SUM)
1528    x = XEXP (x, 1);
1529  else
1530    x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1531  if (GET_CODE (y) == HIGH)
1532    y = XEXP (y, 0);
1533  else if (GET_CODE (y) == LO_SUM)
1534    y = XEXP (y, 1);
1535  else
1536    y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1537
1538  if (rtx_equal_for_memref_p (x, y))
1539    {
1540      if (xsize <= 0 || ysize <= 0)
1541	return 1;
1542      if (c >= 0 && xsize > c)
1543	return 1;
1544      if (c < 0 && ysize+c > 0)
1545	return 1;
1546      return 0;
1547    }
1548
1549  /* This code used to check for conflicts involving stack references and
1550     globals but the base address alias code now handles these cases.  */
1551
1552  if (GET_CODE (x) == PLUS)
1553    {
1554      /* The fact that X is canonicalized means that this
1555	 PLUS rtx is canonicalized.  */
1556      rtx x0 = XEXP (x, 0);
1557      rtx x1 = XEXP (x, 1);
1558
1559      if (GET_CODE (y) == PLUS)
1560	{
1561	  /* The fact that Y is canonicalized means that this
1562	     PLUS rtx is canonicalized.  */
1563	  rtx y0 = XEXP (y, 0);
1564	  rtx y1 = XEXP (y, 1);
1565
1566	  if (rtx_equal_for_memref_p (x1, y1))
1567	    return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1568	  if (rtx_equal_for_memref_p (x0, y0))
1569	    return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1570	  if (GET_CODE (x1) == CONST_INT)
1571	    {
1572	      if (GET_CODE (y1) == CONST_INT)
1573		return memrefs_conflict_p (xsize, x0, ysize, y0,
1574					   c - INTVAL (x1) + INTVAL (y1));
1575	      else
1576		return memrefs_conflict_p (xsize, x0, ysize, y,
1577					   c - INTVAL (x1));
1578	    }
1579	  else if (GET_CODE (y1) == CONST_INT)
1580	    return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1581
1582	  return 1;
1583	}
1584      else if (GET_CODE (x1) == CONST_INT)
1585	return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1586    }
1587  else if (GET_CODE (y) == PLUS)
1588    {
1589      /* The fact that Y is canonicalized means that this
1590	 PLUS rtx is canonicalized.  */
1591      rtx y0 = XEXP (y, 0);
1592      rtx y1 = XEXP (y, 1);
1593
1594      if (GET_CODE (y1) == CONST_INT)
1595	return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1596      else
1597	return 1;
1598    }
1599
1600  if (GET_CODE (x) == GET_CODE (y))
1601    switch (GET_CODE (x))
1602      {
1603      case MULT:
1604	{
1605	  /* Handle cases where we expect the second operands to be the
1606	     same, and check only whether the first operand would conflict
1607	     or not.  */
1608	  rtx x0, y0;
1609	  rtx x1 = canon_rtx (XEXP (x, 1));
1610	  rtx y1 = canon_rtx (XEXP (y, 1));
1611	  if (! rtx_equal_for_memref_p (x1, y1))
1612	    return 1;
1613	  x0 = canon_rtx (XEXP (x, 0));
1614	  y0 = canon_rtx (XEXP (y, 0));
1615	  if (rtx_equal_for_memref_p (x0, y0))
1616	    return (xsize == 0 || ysize == 0
1617		    || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1618
1619	  /* Can't properly adjust our sizes.  */
1620	  if (GET_CODE (x1) != CONST_INT)
1621	    return 1;
1622	  xsize /= INTVAL (x1);
1623	  ysize /= INTVAL (x1);
1624	  c /= INTVAL (x1);
1625	  return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1626	}
1627
1628      case REG:
1629	/* Are these registers known not to be equal?  */
1630	if (alias_invariant)
1631	  {
1632	    unsigned int r_x = REGNO (x), r_y = REGNO (y);
1633	    rtx i_x, i_y;	/* invariant relationships of X and Y */
1634
1635	    i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1636	    i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1637
1638	    if (i_x == 0 && i_y == 0)
1639	      break;
1640
1641	    if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1642				      ysize, i_y ? i_y : y, c))
1643	      return 0;
1644	  }
1645	break;
1646
1647      default:
1648	break;
1649      }
1650
1651  /* Treat an access through an AND (e.g. a subword access on an Alpha)
1652     as an access with indeterminate size.  Assume that references
1653     besides AND are aligned, so if the size of the other reference is
1654     at least as large as the alignment, assume no other overlap.  */
1655  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1656    {
1657      if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1658	xsize = -1;
1659      return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1660    }
1661  if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1662    {
1663      /* ??? If we are indexing far enough into the array/structure, we
1664	 may yet be able to determine that we can not overlap.  But we
1665	 also need to that we are far enough from the end not to overlap
1666	 a following reference, so we do nothing with that for now.  */
1667      if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1668	ysize = -1;
1669      return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1670    }
1671
1672  if (GET_CODE (x) == ADDRESSOF)
1673    {
1674      if (y == frame_pointer_rtx
1675	  || GET_CODE (y) == ADDRESSOF)
1676	return xsize <= 0 || ysize <= 0;
1677    }
1678  if (GET_CODE (y) == ADDRESSOF)
1679    {
1680      if (x == frame_pointer_rtx)
1681	return xsize <= 0 || ysize <= 0;
1682    }
1683
1684  if (CONSTANT_P (x))
1685    {
1686      if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1687	{
1688	  c += (INTVAL (y) - INTVAL (x));
1689	  return (xsize <= 0 || ysize <= 0
1690		  || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1691	}
1692
1693      if (GET_CODE (x) == CONST)
1694	{
1695	  if (GET_CODE (y) == CONST)
1696	    return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1697				       ysize, canon_rtx (XEXP (y, 0)), c);
1698	  else
1699	    return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1700				       ysize, y, c);
1701	}
1702      if (GET_CODE (y) == CONST)
1703	return memrefs_conflict_p (xsize, x, ysize,
1704				   canon_rtx (XEXP (y, 0)), c);
1705
1706      if (CONSTANT_P (y))
1707	return (xsize <= 0 || ysize <= 0
1708		|| (rtx_equal_for_memref_p (x, y)
1709		    && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1710
1711      return 1;
1712    }
1713  return 1;
1714}
1715
1716/* Functions to compute memory dependencies.
1717
1718   Since we process the insns in execution order, we can build tables
1719   to keep track of what registers are fixed (and not aliased), what registers
1720   are varying in known ways, and what registers are varying in unknown
1721   ways.
1722
1723   If both memory references are volatile, then there must always be a
1724   dependence between the two references, since their order can not be
1725   changed.  A volatile and non-volatile reference can be interchanged
1726   though.
1727
1728   A MEM_IN_STRUCT reference at a non-AND varying address can never
1729   conflict with a non-MEM_IN_STRUCT reference at a fixed address.  We
1730   also must allow AND addresses, because they may generate accesses
1731   outside the object being referenced.  This is used to generate
1732   aligned addresses from unaligned addresses, for instance, the alpha
1733   storeqi_unaligned pattern.  */
1734
1735/* Read dependence: X is read after read in MEM takes place.  There can
1736   only be a dependence here if both reads are volatile.  */
1737
1738int
1739read_dependence (mem, x)
1740     rtx mem;
1741     rtx x;
1742{
1743  return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1744}
1745
1746/* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1747   MEM2 is a reference to a structure at a varying address, or returns
1748   MEM2 if vice versa.  Otherwise, returns NULL_RTX.  If a non-NULL
1749   value is returned MEM1 and MEM2 can never alias.  VARIES_P is used
1750   to decide whether or not an address may vary; it should return
1751   nonzero whenever variation is possible.
1752   MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2.  */
1753
1754static rtx
1755fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1756     rtx mem1, mem2;
1757     rtx mem1_addr, mem2_addr;
1758     int (*varies_p) PARAMS ((rtx, int));
1759{
1760  if (! flag_strict_aliasing)
1761    return NULL_RTX;
1762
1763  if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1764      && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1765    /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1766       varying address.  */
1767    return mem1;
1768
1769  if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1770      && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1771    /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1772       varying address.  */
1773    return mem2;
1774
1775  return NULL_RTX;
1776}
1777
1778/* Returns nonzero if something about the mode or address format MEM1
1779   indicates that it might well alias *anything*.  */
1780
1781static int
1782aliases_everything_p (mem)
1783     rtx mem;
1784{
1785  if (GET_CODE (XEXP (mem, 0)) == AND)
1786    /* If the address is an AND, its very hard to know at what it is
1787       actually pointing.  */
1788    return 1;
1789
1790  return 0;
1791}
1792
1793/* Return true if we can determine that the fields referenced cannot
1794   overlap for any pair of objects.  */
1795
1796static bool
1797nonoverlapping_component_refs_p (x, y)
1798     tree x, y;
1799{
1800  tree fieldx, fieldy, typex, typey, orig_y;
1801
1802  do
1803    {
1804      /* The comparison has to be done at a common type, since we don't
1805	 know how the inheritance hierarchy works.  */
1806      orig_y = y;
1807      do
1808	{
1809	  fieldx = TREE_OPERAND (x, 1);
1810	  typex = DECL_FIELD_CONTEXT (fieldx);
1811
1812	  y = orig_y;
1813	  do
1814	    {
1815	      fieldy = TREE_OPERAND (y, 1);
1816	      typey = DECL_FIELD_CONTEXT (fieldy);
1817
1818	      if (typex == typey)
1819		goto found;
1820
1821	      y = TREE_OPERAND (y, 0);
1822	    }
1823	  while (y && TREE_CODE (y) == COMPONENT_REF);
1824
1825	  x = TREE_OPERAND (x, 0);
1826	}
1827      while (x && TREE_CODE (x) == COMPONENT_REF);
1828
1829      /* Never found a common type.  */
1830      return false;
1831
1832    found:
1833      /* If we're left with accessing different fields of a structure,
1834	 then no overlap.  */
1835      if (TREE_CODE (typex) == RECORD_TYPE
1836	  && fieldx != fieldy)
1837	return true;
1838
1839      /* The comparison on the current field failed.  If we're accessing
1840	 a very nested structure, look at the next outer level.  */
1841      x = TREE_OPERAND (x, 0);
1842      y = TREE_OPERAND (y, 0);
1843    }
1844  while (x && y
1845	 && TREE_CODE (x) == COMPONENT_REF
1846	 && TREE_CODE (y) == COMPONENT_REF);
1847
1848  return false;
1849}
1850
1851/* Look at the bottom of the COMPONENT_REF list for a DECL, and return it.  */
1852
1853static tree
1854decl_for_component_ref (x)
1855     tree x;
1856{
1857  do
1858    {
1859      x = TREE_OPERAND (x, 0);
1860    }
1861  while (x && TREE_CODE (x) == COMPONENT_REF);
1862
1863  return x && DECL_P (x) ? x : NULL_TREE;
1864}
1865
1866/* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1867   offset of the field reference.  */
1868
1869static rtx
1870adjust_offset_for_component_ref (x, offset)
1871     tree x;
1872     rtx offset;
1873{
1874  HOST_WIDE_INT ioffset;
1875
1876  if (! offset)
1877    return NULL_RTX;
1878
1879  ioffset = INTVAL (offset);
1880  do
1881    {
1882      tree field = TREE_OPERAND (x, 1);
1883
1884      if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
1885	return NULL_RTX;
1886      ioffset += (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1887		  + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1888		     / BITS_PER_UNIT));
1889
1890      x = TREE_OPERAND (x, 0);
1891    }
1892  while (x && TREE_CODE (x) == COMPONENT_REF);
1893
1894  return GEN_INT (ioffset);
1895}
1896
1897/* Return nonzero if we can deterimine the exprs corresponding to memrefs
1898   X and Y and they do not overlap.  */
1899
1900static int
1901nonoverlapping_memrefs_p (x, y)
1902     rtx x, y;
1903{
1904  tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
1905  rtx rtlx, rtly;
1906  rtx basex, basey;
1907  rtx moffsetx, moffsety;
1908  HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
1909
1910  /* Unless both have exprs, we can't tell anything.  */
1911  if (exprx == 0 || expry == 0)
1912    return 0;
1913
1914  /* If both are field references, we may be able to determine something.  */
1915  if (TREE_CODE (exprx) == COMPONENT_REF
1916      && TREE_CODE (expry) == COMPONENT_REF
1917      && nonoverlapping_component_refs_p (exprx, expry))
1918    return 1;
1919
1920  /* If the field reference test failed, look at the DECLs involved.  */
1921  moffsetx = MEM_OFFSET (x);
1922  if (TREE_CODE (exprx) == COMPONENT_REF)
1923    {
1924      tree t = decl_for_component_ref (exprx);
1925      if (! t)
1926	return 0;
1927      moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
1928      exprx = t;
1929    }
1930  moffsety = MEM_OFFSET (y);
1931  if (TREE_CODE (expry) == COMPONENT_REF)
1932    {
1933      tree t = decl_for_component_ref (expry);
1934      if (! t)
1935	return 0;
1936      moffsety = adjust_offset_for_component_ref (expry, moffsety);
1937      expry = t;
1938    }
1939
1940  if (! DECL_P (exprx) || ! DECL_P (expry))
1941    return 0;
1942
1943  rtlx = DECL_RTL (exprx);
1944  rtly = DECL_RTL (expry);
1945
1946  /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
1947     can't overlap unless they are the same because we never reuse that part
1948     of the stack frame used for locals for spilled pseudos.  */
1949  if ((GET_CODE (rtlx) != MEM || GET_CODE (rtly) != MEM)
1950      && ! rtx_equal_p (rtlx, rtly))
1951    return 1;
1952
1953  /* Get the base and offsets of both decls.  If either is a register, we
1954     know both are and are the same, so use that as the base.  The only
1955     we can avoid overlap is if we can deduce that they are nonoverlapping
1956     pieces of that decl, which is very rare.  */
1957  basex = GET_CODE (rtlx) == MEM ? XEXP (rtlx, 0) : rtlx;
1958  if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
1959    offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
1960
1961  basey = GET_CODE (rtly) == MEM ? XEXP (rtly, 0) : rtly;
1962  if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
1963    offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
1964
1965  /* If the bases are different, we know they do not overlap if both
1966     are constants or if one is a constant and the other a pointer into the
1967     stack frame.  Otherwise a different base means we can't tell if they
1968     overlap or not.  */
1969  if (! rtx_equal_p (basex, basey))
1970      return ((CONSTANT_P (basex) && CONSTANT_P (basey))
1971	      || (CONSTANT_P (basex) && REG_P (basey)
1972		  && REGNO_PTR_FRAME_P (REGNO (basey)))
1973	      || (CONSTANT_P (basey) && REG_P (basex)
1974		  && REGNO_PTR_FRAME_P (REGNO (basex))));
1975
1976  sizex = (GET_CODE (rtlx) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
1977	   : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
1978	   : -1);
1979  sizey = (GET_CODE (rtly) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtly))
1980	   : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
1981	   -1);
1982
1983  /* If we have an offset for either memref, it can update the values computed
1984     above.  */
1985  if (moffsetx)
1986    offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
1987  if (moffsety)
1988    offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
1989
1990  /* If a memref has both a size and an offset, we can use the smaller size.
1991     We can't do this if the offset isn't known because we must view this
1992     memref as being anywhere inside the DECL's MEM.  */
1993  if (MEM_SIZE (x) && moffsetx)
1994    sizex = INTVAL (MEM_SIZE (x));
1995  if (MEM_SIZE (y) && moffsety)
1996    sizey = INTVAL (MEM_SIZE (y));
1997
1998  /* Put the values of the memref with the lower offset in X's values.  */
1999  if (offsetx > offsety)
2000    {
2001      tem = offsetx, offsetx = offsety, offsety = tem;
2002      tem = sizex, sizex = sizey, sizey = tem;
2003    }
2004
2005  /* If we don't know the size of the lower-offset value, we can't tell
2006     if they conflict.  Otherwise, we do the test.  */
2007  return sizex >= 0 && offsety > offsetx + sizex;
2008}
2009
2010/* True dependence: X is read after store in MEM takes place.  */
2011
2012int
2013true_dependence (mem, mem_mode, x, varies)
2014     rtx mem;
2015     enum machine_mode mem_mode;
2016     rtx x;
2017     int (*varies) PARAMS ((rtx, int));
2018{
2019  rtx x_addr, mem_addr;
2020  rtx base;
2021
2022  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2023    return 1;
2024
2025  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2026     This is used in epilogue deallocation functions.  */
2027  if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2028    return 1;
2029  if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2030    return 1;
2031
2032  if (DIFFERENT_ALIAS_SETS_P (x, mem))
2033    return 0;
2034
2035  /* Unchanging memory can't conflict with non-unchanging memory.
2036     A non-unchanging read can conflict with a non-unchanging write.
2037     An unchanging read can conflict with an unchanging write since
2038     there may be a single store to this address to initialize it.
2039     Note that an unchanging store can conflict with a non-unchanging read
2040     since we have to make conservative assumptions when we have a
2041     record with readonly fields and we are copying the whole thing.
2042     Just fall through to the code below to resolve potential conflicts.
2043     This won't handle all cases optimally, but the possible performance
2044     loss should be negligible.  */
2045  if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2046    return 0;
2047
2048  if (nonoverlapping_memrefs_p (mem, x))
2049    return 0;
2050
2051  if (mem_mode == VOIDmode)
2052    mem_mode = GET_MODE (mem);
2053
2054  x_addr = get_addr (XEXP (x, 0));
2055  mem_addr = get_addr (XEXP (mem, 0));
2056
2057  base = find_base_term (x_addr);
2058  if (base && (GET_CODE (base) == LABEL_REF
2059	       || (GET_CODE (base) == SYMBOL_REF
2060		   && CONSTANT_POOL_ADDRESS_P (base))))
2061    return 0;
2062
2063  if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2064    return 0;
2065
2066  x_addr = canon_rtx (x_addr);
2067  mem_addr = canon_rtx (mem_addr);
2068
2069  if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2070			    SIZE_FOR_MODE (x), x_addr, 0))
2071    return 0;
2072
2073  if (aliases_everything_p (x))
2074    return 1;
2075
2076  /* We cannot use aliases_everything_p to test MEM, since we must look
2077     at MEM_MODE, rather than GET_MODE (MEM).  */
2078  if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2079    return 1;
2080
2081  /* In true_dependence we also allow BLKmode to alias anything.  Why
2082     don't we do this in anti_dependence and output_dependence?  */
2083  if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2084    return 1;
2085
2086  return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2087					      varies);
2088}
2089
2090/* Canonical true dependence: X is read after store in MEM takes place.
2091   Variant of true_dependence which assumes MEM has already been
2092   canonicalized (hence we no longer do that here).
2093   The mem_addr argument has been added, since true_dependence computed
2094   this value prior to canonicalizing.  */
2095
2096int
2097canon_true_dependence (mem, mem_mode, mem_addr, x, varies)
2098     rtx mem, mem_addr, x;
2099     enum machine_mode mem_mode;
2100     int (*varies) PARAMS ((rtx, int));
2101{
2102  rtx x_addr;
2103
2104  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2105    return 1;
2106
2107  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2108     This is used in epilogue deallocation functions.  */
2109  if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2110    return 1;
2111  if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2112    return 1;
2113
2114  if (DIFFERENT_ALIAS_SETS_P (x, mem))
2115    return 0;
2116
2117  /* If X is an unchanging read, then it can't possibly conflict with any
2118     non-unchanging store.  It may conflict with an unchanging write though,
2119     because there may be a single store to this address to initialize it.
2120     Just fall through to the code below to resolve the case where we have
2121     both an unchanging read and an unchanging write.  This won't handle all
2122     cases optimally, but the possible performance loss should be
2123     negligible.  */
2124  if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2125    return 0;
2126
2127  if (nonoverlapping_memrefs_p (x, mem))
2128    return 0;
2129
2130  x_addr = get_addr (XEXP (x, 0));
2131
2132  if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2133    return 0;
2134
2135  x_addr = canon_rtx (x_addr);
2136  if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2137			    SIZE_FOR_MODE (x), x_addr, 0))
2138    return 0;
2139
2140  if (aliases_everything_p (x))
2141    return 1;
2142
2143  /* We cannot use aliases_everything_p to test MEM, since we must look
2144     at MEM_MODE, rather than GET_MODE (MEM).  */
2145  if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2146    return 1;
2147
2148  /* In true_dependence we also allow BLKmode to alias anything.  Why
2149     don't we do this in anti_dependence and output_dependence?  */
2150  if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2151    return 1;
2152
2153  return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2154					      varies);
2155}
2156
2157/* Returns non-zero if a write to X might alias a previous read from
2158   (or, if WRITEP is non-zero, a write to) MEM.  */
2159
2160static int
2161write_dependence_p (mem, x, writep)
2162     rtx mem;
2163     rtx x;
2164     int writep;
2165{
2166  rtx x_addr, mem_addr;
2167  rtx fixed_scalar;
2168  rtx base;
2169
2170  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2171    return 1;
2172
2173  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2174     This is used in epilogue deallocation functions.  */
2175  if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2176    return 1;
2177  if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2178    return 1;
2179
2180  if (DIFFERENT_ALIAS_SETS_P (x, mem))
2181    return 0;
2182
2183  /* Unchanging memory can't conflict with non-unchanging memory.  */
2184  if (RTX_UNCHANGING_P (x) != RTX_UNCHANGING_P (mem))
2185    return 0;
2186
2187  /* If MEM is an unchanging read, then it can't possibly conflict with
2188     the store to X, because there is at most one store to MEM, and it must
2189     have occurred somewhere before MEM.  */
2190  if (! writep && RTX_UNCHANGING_P (mem))
2191    return 0;
2192
2193  if (nonoverlapping_memrefs_p (x, mem))
2194    return 0;
2195
2196  x_addr = get_addr (XEXP (x, 0));
2197  mem_addr = get_addr (XEXP (mem, 0));
2198
2199  if (! writep)
2200    {
2201      base = find_base_term (mem_addr);
2202      if (base && (GET_CODE (base) == LABEL_REF
2203		   || (GET_CODE (base) == SYMBOL_REF
2204		       && CONSTANT_POOL_ADDRESS_P (base))))
2205	return 0;
2206    }
2207
2208  if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2209			  GET_MODE (mem)))
2210    return 0;
2211
2212  x_addr = canon_rtx (x_addr);
2213  mem_addr = canon_rtx (mem_addr);
2214
2215  if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2216			   SIZE_FOR_MODE (x), x_addr, 0))
2217    return 0;
2218
2219  fixed_scalar
2220    = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2221					 rtx_addr_varies_p);
2222
2223  return (!(fixed_scalar == mem && !aliases_everything_p (x))
2224	  && !(fixed_scalar == x && !aliases_everything_p (mem)));
2225}
2226
2227/* Anti dependence: X is written after read in MEM takes place.  */
2228
2229int
2230anti_dependence (mem, x)
2231     rtx mem;
2232     rtx x;
2233{
2234  return write_dependence_p (mem, x, /*writep=*/0);
2235}
2236
2237/* Output dependence: X is written after store in MEM takes place.  */
2238
2239int
2240output_dependence (mem, x)
2241     rtx mem;
2242     rtx x;
2243{
2244  return write_dependence_p (mem, x, /*writep=*/1);
2245}
2246
2247/* Returns non-zero if X mentions something which is not
2248   local to the function and is not constant.  */
2249
2250static int
2251nonlocal_mentioned_p (x)
2252     rtx x;
2253{
2254  rtx base;
2255  RTX_CODE code;
2256  int regno;
2257
2258  code = GET_CODE (x);
2259
2260  if (GET_RTX_CLASS (code) == 'i')
2261    {
2262      /* Constant functions can be constant if they don't use
2263         scratch memory used to mark function w/o side effects.  */
2264      if (code == CALL_INSN && CONST_OR_PURE_CALL_P (x))
2265        {
2266	  x = CALL_INSN_FUNCTION_USAGE (x);
2267	  if (x == 0)
2268	    return 0;
2269        }
2270      else
2271        x = PATTERN (x);
2272      code = GET_CODE (x);
2273    }
2274
2275  switch (code)
2276    {
2277    case SUBREG:
2278      if (GET_CODE (SUBREG_REG (x)) == REG)
2279	{
2280	  /* Global registers are not local.  */
2281	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2282	      && global_regs[subreg_regno (x)])
2283	    return 1;
2284	  return 0;
2285	}
2286      break;
2287
2288    case REG:
2289      regno = REGNO (x);
2290      /* Global registers are not local.  */
2291      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2292	return 1;
2293      return 0;
2294
2295    case SCRATCH:
2296    case PC:
2297    case CC0:
2298    case CONST_INT:
2299    case CONST_DOUBLE:
2300    case CONST_VECTOR:
2301    case CONST:
2302    case LABEL_REF:
2303      return 0;
2304
2305    case SYMBOL_REF:
2306      /* Constants in the function's constants pool are constant.  */
2307      if (CONSTANT_POOL_ADDRESS_P (x))
2308	return 0;
2309      return 1;
2310
2311    case CALL:
2312      /* Non-constant calls and recursion are not local.  */
2313      return 1;
2314
2315    case MEM:
2316      /* Be overly conservative and consider any volatile memory
2317	 reference as not local.  */
2318      if (MEM_VOLATILE_P (x))
2319	return 1;
2320      base = find_base_term (XEXP (x, 0));
2321      if (base)
2322	{
2323	  /* A Pmode ADDRESS could be a reference via the structure value
2324	     address or static chain.  Such memory references are nonlocal.
2325
2326	     Thus, we have to examine the contents of the ADDRESS to find
2327	     out if this is a local reference or not.  */
2328	  if (GET_CODE (base) == ADDRESS
2329	      && GET_MODE (base) == Pmode
2330	      && (XEXP (base, 0) == stack_pointer_rtx
2331		  || XEXP (base, 0) == arg_pointer_rtx
2332#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2333		  || XEXP (base, 0) == hard_frame_pointer_rtx
2334#endif
2335		  || XEXP (base, 0) == frame_pointer_rtx))
2336	    return 0;
2337	  /* Constants in the function's constant pool are constant.  */
2338	  if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2339	    return 0;
2340	}
2341      return 1;
2342
2343    case UNSPEC_VOLATILE:
2344    case ASM_INPUT:
2345      return 1;
2346
2347    case ASM_OPERANDS:
2348      if (MEM_VOLATILE_P (x))
2349	return 1;
2350
2351    /* FALLTHROUGH */
2352
2353    default:
2354      break;
2355    }
2356
2357  /* Recursively scan the operands of this expression.  */
2358
2359  {
2360    const char *fmt = GET_RTX_FORMAT (code);
2361    int i;
2362
2363    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2364      {
2365	if (fmt[i] == 'e' && XEXP (x, i))
2366	  {
2367	    if (nonlocal_mentioned_p (XEXP (x, i)))
2368	      return 1;
2369	  }
2370	else if (fmt[i] == 'E')
2371	  {
2372	    int j;
2373	    for (j = 0; j < XVECLEN (x, i); j++)
2374	      if (nonlocal_mentioned_p (XVECEXP (x, i, j)))
2375		return 1;
2376	  }
2377      }
2378  }
2379
2380  return 0;
2381}
2382
2383/* Mark the function if it is constant.  */
2384
2385void
2386mark_constant_function ()
2387{
2388  rtx insn;
2389  int nonlocal_mentioned;
2390
2391  if (TREE_PUBLIC (current_function_decl)
2392      || TREE_READONLY (current_function_decl)
2393      || DECL_IS_PURE (current_function_decl)
2394      || TREE_THIS_VOLATILE (current_function_decl)
2395      || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
2396    return;
2397
2398  /* A loop might not return which counts as a side effect.  */
2399  if (mark_dfs_back_edges ())
2400    return;
2401
2402  nonlocal_mentioned = 0;
2403
2404  init_alias_analysis ();
2405
2406  /* Determine if this is a constant function.  */
2407
2408  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2409    if (INSN_P (insn) && nonlocal_mentioned_p (insn))
2410      {
2411	nonlocal_mentioned = 1;
2412	break;
2413      }
2414
2415  end_alias_analysis ();
2416
2417  /* Mark the function.  */
2418
2419  if (! nonlocal_mentioned)
2420    TREE_READONLY (current_function_decl) = 1;
2421}
2422
2423
2424static HARD_REG_SET argument_registers;
2425
2426void
2427init_alias_once ()
2428{
2429  int i;
2430
2431#ifndef OUTGOING_REGNO
2432#define OUTGOING_REGNO(N) N
2433#endif
2434  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2435    /* Check whether this register can hold an incoming pointer
2436       argument.  FUNCTION_ARG_REGNO_P tests outgoing register
2437       numbers, so translate if necessary due to register windows.  */
2438    if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2439	&& HARD_REGNO_MODE_OK (i, Pmode))
2440      SET_HARD_REG_BIT (argument_registers, i);
2441
2442  alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
2443}
2444
2445/* Initialize the aliasing machinery.  Initialize the REG_KNOWN_VALUE
2446   array.  */
2447
2448void
2449init_alias_analysis ()
2450{
2451  int maxreg = max_reg_num ();
2452  int changed, pass;
2453  int i;
2454  unsigned int ui;
2455  rtx insn;
2456
2457  reg_known_value_size = maxreg;
2458
2459  reg_known_value
2460    = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
2461    - FIRST_PSEUDO_REGISTER;
2462  reg_known_equiv_p
2463    = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
2464    - FIRST_PSEUDO_REGISTER;
2465
2466  /* Overallocate reg_base_value to allow some growth during loop
2467     optimization.  Loop unrolling can create a large number of
2468     registers.  */
2469  reg_base_value_size = maxreg * 2;
2470  reg_base_value = (rtx *) xcalloc (reg_base_value_size, sizeof (rtx));
2471  ggc_add_rtx_root (reg_base_value, reg_base_value_size);
2472
2473  new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
2474  reg_seen = (char *) xmalloc (reg_base_value_size);
2475  if (! reload_completed && flag_unroll_loops)
2476    {
2477      /* ??? Why are we realloc'ing if we're just going to zero it?  */
2478      alias_invariant = (rtx *)xrealloc (alias_invariant,
2479					 reg_base_value_size * sizeof (rtx));
2480      memset ((char *)alias_invariant, 0, reg_base_value_size * sizeof (rtx));
2481    }
2482
2483  /* The basic idea is that each pass through this loop will use the
2484     "constant" information from the previous pass to propagate alias
2485     information through another level of assignments.
2486
2487     This could get expensive if the assignment chains are long.  Maybe
2488     we should throttle the number of iterations, possibly based on
2489     the optimization level or flag_expensive_optimizations.
2490
2491     We could propagate more information in the first pass by making use
2492     of REG_N_SETS to determine immediately that the alias information
2493     for a pseudo is "constant".
2494
2495     A program with an uninitialized variable can cause an infinite loop
2496     here.  Instead of doing a full dataflow analysis to detect such problems
2497     we just cap the number of iterations for the loop.
2498
2499     The state of the arrays for the set chain in question does not matter
2500     since the program has undefined behavior.  */
2501
2502  pass = 0;
2503  do
2504    {
2505      /* Assume nothing will change this iteration of the loop.  */
2506      changed = 0;
2507
2508      /* We want to assign the same IDs each iteration of this loop, so
2509	 start counting from zero each iteration of the loop.  */
2510      unique_id = 0;
2511
2512      /* We're at the start of the function each iteration through the
2513	 loop, so we're copying arguments.  */
2514      copying_arguments = 1;
2515
2516      /* Wipe the potential alias information clean for this pass.  */
2517      memset ((char *) new_reg_base_value, 0, reg_base_value_size * sizeof (rtx));
2518
2519      /* Wipe the reg_seen array clean.  */
2520      memset ((char *) reg_seen, 0, reg_base_value_size);
2521
2522      /* Mark all hard registers which may contain an address.
2523	 The stack, frame and argument pointers may contain an address.
2524	 An argument register which can hold a Pmode value may contain
2525	 an address even if it is not in BASE_REGS.
2526
2527	 The address expression is VOIDmode for an argument and
2528	 Pmode for other registers.  */
2529
2530      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2531	if (TEST_HARD_REG_BIT (argument_registers, i))
2532	  new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
2533						   gen_rtx_REG (Pmode, i));
2534
2535      new_reg_base_value[STACK_POINTER_REGNUM]
2536	= gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2537      new_reg_base_value[ARG_POINTER_REGNUM]
2538	= gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2539      new_reg_base_value[FRAME_POINTER_REGNUM]
2540	= gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2541#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2542      new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2543	= gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2544#endif
2545
2546      /* Walk the insns adding values to the new_reg_base_value array.  */
2547      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2548	{
2549	  if (INSN_P (insn))
2550	    {
2551	      rtx note, set;
2552
2553#if defined (HAVE_prologue) || defined (HAVE_epilogue)
2554	      /* The prologue/epilogue insns are not threaded onto the
2555		 insn chain until after reload has completed.  Thus,
2556		 there is no sense wasting time checking if INSN is in
2557		 the prologue/epilogue until after reload has completed.  */
2558	      if (reload_completed
2559		  && prologue_epilogue_contains (insn))
2560		continue;
2561#endif
2562
2563	      /* If this insn has a noalias note, process it,  Otherwise,
2564	         scan for sets.  A simple set will have no side effects
2565	         which could change the base value of any other register.  */
2566
2567	      if (GET_CODE (PATTERN (insn)) == SET
2568		  && REG_NOTES (insn) != 0
2569		  && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2570		record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2571	      else
2572		note_stores (PATTERN (insn), record_set, NULL);
2573
2574	      set = single_set (insn);
2575
2576	      if (set != 0
2577		  && GET_CODE (SET_DEST (set)) == REG
2578		  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2579		{
2580		  unsigned int regno = REGNO (SET_DEST (set));
2581		  rtx src = SET_SRC (set);
2582
2583		  if (REG_NOTES (insn) != 0
2584		      && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2585			   && REG_N_SETS (regno) == 1)
2586			  || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2587		      && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2588		      && ! rtx_varies_p (XEXP (note, 0), 1)
2589		      && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
2590		    {
2591		      reg_known_value[regno] = XEXP (note, 0);
2592		      reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
2593		    }
2594		  else if (REG_N_SETS (regno) == 1
2595			   && GET_CODE (src) == PLUS
2596			   && GET_CODE (XEXP (src, 0)) == REG
2597			   && REGNO (XEXP (src, 0)) >= FIRST_PSEUDO_REGISTER
2598			   && (reg_known_value[REGNO (XEXP (src, 0))])
2599			   && GET_CODE (XEXP (src, 1)) == CONST_INT)
2600		    {
2601		      rtx op0 = XEXP (src, 0);
2602		      op0 = reg_known_value[REGNO (op0)];
2603		      reg_known_value[regno]
2604			= plus_constant (op0, INTVAL (XEXP (src, 1)));
2605		      reg_known_equiv_p[regno] = 0;
2606		    }
2607		  else if (REG_N_SETS (regno) == 1
2608			   && ! rtx_varies_p (src, 1))
2609		    {
2610		      reg_known_value[regno] = src;
2611		      reg_known_equiv_p[regno] = 0;
2612		    }
2613		}
2614	    }
2615	  else if (GET_CODE (insn) == NOTE
2616		   && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2617	    copying_arguments = 0;
2618	}
2619
2620      /* Now propagate values from new_reg_base_value to reg_base_value.  */
2621      for (ui = 0; ui < reg_base_value_size; ui++)
2622	{
2623	  if (new_reg_base_value[ui]
2624	      && new_reg_base_value[ui] != reg_base_value[ui]
2625	      && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
2626	    {
2627	      reg_base_value[ui] = new_reg_base_value[ui];
2628	      changed = 1;
2629	    }
2630	}
2631    }
2632  while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2633
2634  /* Fill in the remaining entries.  */
2635  for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
2636    if (reg_known_value[i] == 0)
2637      reg_known_value[i] = regno_reg_rtx[i];
2638
2639  /* Simplify the reg_base_value array so that no register refers to
2640     another register, except to special registers indirectly through
2641     ADDRESS expressions.
2642
2643     In theory this loop can take as long as O(registers^2), but unless
2644     there are very long dependency chains it will run in close to linear
2645     time.
2646
2647     This loop may not be needed any longer now that the main loop does
2648     a better job at propagating alias information.  */
2649  pass = 0;
2650  do
2651    {
2652      changed = 0;
2653      pass++;
2654      for (ui = 0; ui < reg_base_value_size; ui++)
2655	{
2656	  rtx base = reg_base_value[ui];
2657	  if (base && GET_CODE (base) == REG)
2658	    {
2659	      unsigned int base_regno = REGNO (base);
2660	      if (base_regno == ui)		/* register set from itself */
2661		reg_base_value[ui] = 0;
2662	      else
2663		reg_base_value[ui] = reg_base_value[base_regno];
2664	      changed = 1;
2665	    }
2666	}
2667    }
2668  while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2669
2670  /* Clean up.  */
2671  free (new_reg_base_value);
2672  new_reg_base_value = 0;
2673  free (reg_seen);
2674  reg_seen = 0;
2675}
2676
2677void
2678end_alias_analysis ()
2679{
2680  free (reg_known_value + FIRST_PSEUDO_REGISTER);
2681  reg_known_value = 0;
2682  reg_known_value_size = 0;
2683  free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
2684  reg_known_equiv_p = 0;
2685  if (reg_base_value)
2686    {
2687      ggc_del_root (reg_base_value);
2688      free (reg_base_value);
2689      reg_base_value = 0;
2690    }
2691  reg_base_value_size = 0;
2692  if (alias_invariant)
2693    {
2694      free (alias_invariant);
2695      alias_invariant = 0;
2696    }
2697}
2698