alias.c revision 52284
1278957Simp/* Alias analysis for GNU C
2278957Simp   Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
3278957Simp   Contributed by John Carr (jfc@mit.edu).
4278957Simp
5278957SimpThis file is part of GNU CC.
6278957Simp
7278957SimpGNU CC is free software; you can redistribute it and/or modify
8278957Simpit under the terms of the GNU General Public License as published by
9278957Simpthe Free Software Foundation; either version 2, or (at your option)
10278957Simpany later version.
11278957Simp
12278957SimpGNU CC is distributed in the hope that it will be useful,
13278957Simpbut WITHOUT ANY WARRANTY; without even the implied warranty of
14278957SimpMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15278957SimpGNU General Public License for more details.
16278957Simp
17278957SimpYou should have received a copy of the GNU General Public License
18278957Simpalong with GNU CC; see the file COPYING.  If not, write to
19278957Simpthe Free Software Foundation, 59 Temple Place - Suite 330,
20278957SimpBoston, MA 02111-1307, USA.  */
21278957Simp
22278957Simp#include "config.h"
23278957Simp#include "system.h"
24278957Simp#include "rtl.h"
25278957Simp#include "expr.h"
26278957Simp#include "regs.h"
27278957Simp#include "hard-reg-set.h"
28160370Simp#include "flags.h"
29160370Simp#include "output.h"
30160370Simp#include "toplev.h"
31160370Simp#include "splay-tree.h"
32160370Simp
33160370Simp/* The alias sets assigned to MEMs assist the back-end in determining
34160370Simp   which MEMs can alias which other MEMs.  In general, two MEMs in
35160370Simp   different alias sets to not alias each other.  There is one
36160370Simp   exception, however.  Consider something like:
37160370Simp
38160370Simp     struct S {int i; double d; };
39160370Simp
40160370Simp   a store to an `S' can alias something of either type `int' or type
41160370Simp   `double'.  (However, a store to an `int' cannot alias a `double'
42160370Simp   and vice versa.)  We indicate this via a tree structure that looks
43160370Simp   like:
44160370Simp           struct S
45160370Simp            /   \
46160370Simp	   /     \
47160370Simp         |/_     _\|
48160370Simp         int    double
49160370Simp
50160370Simp   (The arrows are directed and point downwards.)  If, when comparing
51160370Simp   two alias sets, we can hold one set fixed, and trace the other set
52332942Sian   downwards, and at some point find the first set, the two MEMs can
53332942Sian   alias one another.  In this situation we say the alias set for
54332942Sian   `struct S' is the `superset' and that those for `int' and `double'
55160370Simp   are `subsets'.
56160370Simp
57160370Simp   Alias set zero is implicitly a superset of all other alias sets.
58160370Simp   However, this is no actual entry for alias set zero.  It is an
59160370Simp   error to attempt to explicitly construct a subset of zero.  */
60160370Simp
61160370Simptypedef struct alias_set_entry {
62160370Simp  /* The alias set number, as stored in MEM_ALIAS_SET.  */
63160370Simp  int alias_set;
64160370Simp
65160370Simp  /* The children of the alias set.  These are not just the immediate
66160370Simp     children, but, in fact, all children.  So, if we have:
67160370Simp
68160370Simp       struct T { struct S s; float f; }
69160370Simp
70160370Simp     continuing our example above, the children here will be all of
71160370Simp     `int', `double', `float', and `struct S'.  */
72160370Simp  splay_tree children;
73160370Simp}* alias_set_entry;
74332942Sian
75160370Simpstatic rtx canon_rtx			PROTO((rtx));
76160370Simpstatic int rtx_equal_for_memref_p	PROTO((rtx, rtx));
77160370Simpstatic rtx find_symbolic_term		PROTO((rtx));
78332942Sianstatic int memrefs_conflict_p		PROTO((int, rtx, int, rtx,
79160370Simp					       HOST_WIDE_INT));
80160370Simpstatic void record_set			PROTO((rtx, rtx));
81160370Simpstatic rtx find_base_term		PROTO((rtx));
82160370Simpstatic int base_alias_check		PROTO((rtx, rtx, enum machine_mode,
83160370Simp					       enum machine_mode));
84160370Simpstatic rtx find_base_value		PROTO((rtx));
85160370Simpstatic int mems_in_disjoint_alias_sets_p PROTO((rtx, rtx));
86160370Simpstatic int insert_subset_children       PROTO((splay_tree_node,
87160370Simp					       void*));
88160370Simpstatic alias_set_entry get_alias_set_entry PROTO((int));
89160370Simpstatic rtx fixed_scalar_and_varying_struct_p PROTO((rtx, rtx, int (*)(rtx)));
90160370Simpstatic int aliases_everything_p         PROTO((rtx));
91160370Simpstatic int write_dependence_p           PROTO((rtx, rtx, int));
92160370Simp
93160370Simp/* Set up all info needed to perform alias analysis on memory references.  */
94160370Simp
95160370Simp#define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
96160370Simp
97160370Simp/* Returns nonzero if MEM1 and MEM2 do not alias because they are in
98160370Simp   different alias sets.  We ignore alias sets in functions making use
99160370Simp   of variable arguments because the va_arg macros on some systems are
100160370Simp   not legal ANSI C.  */
101160370Simp#define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2)			\
102160370Simp  mems_in_disjoint_alias_sets_p (MEM1, MEM2)
103160370Simp
104300710Sadrian/* Cap the number of passes we make over the insns propagating alias
105160370Simp   information through set chains.
106160370Simp
107160370Simp   10 is a completely arbitrary choice.  */
108160370Simp#define MAX_ALIAS_LOOP_PASSES 10
109160370Simp
110160370Simp/* reg_base_value[N] gives an address to which register N is related.
111160370Simp   If all sets after the first add or subtract to the current value
112160370Simp   or otherwise modify it so it does not point to a different top level
113160370Simp   object, reg_base_value[N] is equal to the address part of the source
114160370Simp   of the first set.
115315329Smizhka
116315329Smizhka   A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF.  ADDRESS
117160370Simp   expressions represent certain special values: function arguments and
118160370Simp   the stack, frame, and argument pointers.  The contents of an address
119160370Simp   expression are not used (but they are descriptive for debugging);
120160370Simp   only the address and mode matter.  Pointer equality, not rtx_equal_p,
121160370Simp   determines whether two ADDRESS expressions refer to the same base
122160370Simp   address.  The mode determines whether it is a function argument or
123160370Simp   other special value. */
124160370Simp
125160370Simprtx *reg_base_value;
126160370Simprtx *new_reg_base_value;
127160370Simpunsigned int reg_base_value_size;	/* size of reg_base_value array */
128160370Simp#define REG_BASE_VALUE(X) \
129160370Simp  ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
130160370Simp
131160370Simp/* Vector of known invariant relationships between registers.  Set in
132160370Simp   loop unrolling.  Indexed by register number, if nonzero the value
133160370Simp   is an expression describing this register in terms of another.
134160370Simp
135160370Simp   The length of this array is REG_BASE_VALUE_SIZE.
136160370Simp
137160370Simp   Because this array contains only pseudo registers it has no effect
138160370Simp   after reload.  */
139298651Sbrstatic rtx *alias_invariant;
140160370Simp
141160370Simp/* Vector indexed by N giving the initial (unchanging) value known
142160370Simp   for pseudo-register N.  */
143160370Simprtx *reg_known_value;
144160370Simp
145160370Simp/* Indicates number of valid entries in reg_known_value.  */
146160370Simpstatic int reg_known_value_size;
147160370Simp
148160370Simp/* Vector recording for each reg_known_value whether it is due to a
149300710Sadrian   REG_EQUIV note.  Future passes (viz., reload) may replace the
150300710Sadrian   pseudo with the equivalent expression and so we account for the
151300710Sadrian   dependences that would be introduced if that happens. */
152300710Sadrian/* ??? This is a problem only on the Convex.  The REG_EQUIV notes created in
153300710Sadrian   assign_parms mention the arg pointer, and there are explicit insns in the
154300711Sadrian   RTL that modify the arg pointer.  Thus we must ensure that such insns don't
155160370Simp   get scheduled across each other because that would invalidate the REG_EQUIV
156160370Simp   notes.  One could argue that the REG_EQUIV notes are wrong, but solving
157160370Simp   the problem in the scheduler will likely give better code, so we do it
158160370Simp   here.  */
159332942Sianchar *reg_known_equiv_p;
160332942Sian
161332942Sian/* True when scanning insns from the start of the rtl to the
162332942Sian   NOTE_INSN_FUNCTION_BEG note.  */
163332942Sian
164332942Sianstatic int copying_arguments;
165332942Sian
166332942Sian/* The splay-tree used to store the various alias set entries.  */
167332942Sian
168332942Sianstatic splay_tree alias_sets;
169332942Sian
170332942Sian/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
171332942Sian   such an entry, or NULL otherwise.  */
172332942Sian
173332942Sianstatic alias_set_entry
174332942Sianget_alias_set_entry (alias_set)
175332942Sian     int alias_set;
176332942Sian{
177332942Sian  splay_tree_node sn =
178332942Sian    splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
179332942Sian
180332942Sian  return sn ? ((alias_set_entry) sn->value) : ((alias_set_entry) 0);
181332942Sian}
182332942Sian
183332942Sian/* Returns nonzero value if the alias sets for MEM1 and MEM2 are such
184332942Sian   that the two MEMs cannot alias each other.  */
185332942Sian
186332942Sianstatic int
187332942Sianmems_in_disjoint_alias_sets_p (mem1, mem2)
188332942Sian     rtx mem1;
189332942Sian     rtx mem2;
190160370Simp{
191212413Savg  alias_set_entry ase;
192160370Simp
193160370Simp#ifdef ENABLE_CHECKING
194160370Simp/* Perform a basic sanity check.  Namely, that there are no alias sets
195160370Simp   if we're not using strict aliasing.  This helps to catch bugs
196160370Simp   whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
197160370Simp   where a MEM is allocated in some way other than by the use of
198160370Simp   gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared.  If we begin to
199160370Simp   use alias sets to indicate that spilled registers cannot alias each
200163527Simp   other, we might need to remove this check.  */
201163527Simp  if (!flag_strict_aliasing &&
202160370Simp      (MEM_ALIAS_SET (mem1) || MEM_ALIAS_SET (mem2)))
203163527Simp    abort ();
204160370Simp#endif
205160370Simp
206160370Simp  /* The code used in varargs macros are often not conforming ANSI C,
207160370Simp     which can trick the compiler into making incorrect aliasing
208160370Simp     assumptions in these functions.  So, we don't use alias sets in
209160370Simp     such a function.  FIXME: This should be moved into the front-end;
210160370Simp     it is a language-dependent notion, and there's no reason not to
211160370Simp     still use these checks to handle globals.  */
212160370Simp  if (current_function_stdarg || current_function_varargs)
213160370Simp    return 0;
214160370Simp
215160370Simp  if (!MEM_ALIAS_SET (mem1) || !MEM_ALIAS_SET (mem2))
216300710Sadrian    /* We have no alias set information for one of the MEMs, so we
217160370Simp       have to assume it can alias anything.  */
218300710Sadrian    return 0;
219160370Simp
220160370Simp  if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
221160370Simp    /* The two alias sets are the same, so they may alias.  */
222160370Simp    return 0;
223160370Simp
224163527Simp  /* Iterate through each of the children of the first alias set,
225160370Simp     comparing it with the second alias set.  */
226160370Simp  ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
227160370Simp  if (ase && splay_tree_lookup (ase->children,
228160370Simp				(splay_tree_key) MEM_ALIAS_SET (mem2)))
229160370Simp    return  0;
230160370Simp
231160370Simp  /* Now do the same, but with the alias sets reversed.  */
232160370Simp  ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
233160370Simp  if (ase && splay_tree_lookup (ase->children,
234160370Simp				(splay_tree_key) MEM_ALIAS_SET (mem1)))
235160370Simp    return  0;
236160370Simp
237160370Simp  /* The two MEMs are in distinct alias sets, and neither one is the
238160370Simp     child of the other.  Therefore, they cannot alias.  */
239160370Simp  return 1;
240160370Simp}
241332942Sian
242160370Simp/* Insert the NODE into the splay tree given by DATA.  Used by
243160370Simp   record_alias_subset via splay_tree_foreach.  */
244160370Simp
245160370Simpstatic int
246160370Simpinsert_subset_children (node, data)
247160370Simp     splay_tree_node node;
248160370Simp     void *data;
249227843Smarius{
250160370Simp  splay_tree_insert ((splay_tree) data,
251160370Simp		     node->key,
252257064Sloos		     node->value);
253160370Simp
254160370Simp  return 0;
255160370Simp}
256160370Simp
257160370Simp/* Indicate that things in SUBSET can alias things in SUPERSET, but
258160370Simp   not vice versa.  For example, in C, a store to an `int' can alias a
259160370Simp   structure containing an `int', but not vice versa.  Here, the
260192059Sgonzo   structure would be the SUPERSET and `int' the SUBSET.  This
261160370Simp   function should be called only once per SUPERSET/SUBSET pair.  At
262   present any given alias set may only be a subset of one superset.
263
264   It is illegal for SUPERSET to be zero; everything is implicitly a
265   subset of alias set zero.  */
266
267void
268record_alias_subset (superset, subset)
269     int superset;
270     int subset;
271{
272  alias_set_entry superset_entry;
273  alias_set_entry subset_entry;
274
275  if (superset == 0)
276    abort ();
277
278  superset_entry = get_alias_set_entry (superset);
279  if (!superset_entry)
280    {
281      /* Create an entry for the SUPERSET, so that we have a place to
282	 attach the SUBSET.  */
283      superset_entry =
284	(alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
285      superset_entry->alias_set = superset;
286      superset_entry->children
287	= splay_tree_new (splay_tree_compare_ints, 0, 0);
288      splay_tree_insert (alias_sets,
289			 (splay_tree_key) superset,
290			 (splay_tree_value) superset_entry);
291
292    }
293
294  subset_entry = get_alias_set_entry (subset);
295  if (subset_entry)
296    /* There is an entry for the subset.  Enter all of its children
297       (if they are not already present) as children of the SUPERSET.  */
298    splay_tree_foreach (subset_entry->children,
299			insert_subset_children,
300			superset_entry->children);
301
302  /* Enter the SUBSET itself as a child of the SUPERSET.  */
303  splay_tree_insert (superset_entry->children,
304		     (splay_tree_key) subset,
305		     /*value=*/0);
306}
307
308/* Inside SRC, the source of a SET, find a base address.  */
309
310static rtx
311find_base_value (src)
312     register rtx src;
313{
314  switch (GET_CODE (src))
315    {
316    case SYMBOL_REF:
317    case LABEL_REF:
318      return src;
319
320    case REG:
321      /* At the start of a function argument registers have known base
322	 values which may be lost later.  Returning an ADDRESS
323	 expression here allows optimization based on argument values
324	 even when the argument registers are used for other purposes.  */
325      if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
326	return new_reg_base_value[REGNO (src)];
327
328      /* If a pseudo has a known base value, return it.  Do not do this
329	 for hard regs since it can result in a circular dependency
330	 chain for registers which have values at function entry.
331
332	 The test above is not sufficient because the scheduler may move
333	 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN.  */
334      if (REGNO (src) >= FIRST_PSEUDO_REGISTER
335	  && (unsigned) REGNO (src) < reg_base_value_size
336	  && reg_base_value[REGNO (src)])
337	return reg_base_value[REGNO (src)];
338
339      return src;
340
341    case MEM:
342      /* Check for an argument passed in memory.  Only record in the
343	 copying-arguments block; it is too hard to track changes
344	 otherwise.  */
345      if (copying_arguments
346	  && (XEXP (src, 0) == arg_pointer_rtx
347	      || (GET_CODE (XEXP (src, 0)) == PLUS
348		  && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
349	return gen_rtx_ADDRESS (VOIDmode, src);
350      return 0;
351
352    case CONST:
353      src = XEXP (src, 0);
354      if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
355	break;
356      /* fall through */
357
358    case PLUS:
359    case MINUS:
360      {
361	rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
362
363	/* If either operand is a REG, then see if we already have
364	   a known value for it.  */
365	if (GET_CODE (src_0) == REG)
366	  {
367	    temp = find_base_value (src_0);
368	    if (temp)
369	      src_0 = temp;
370	  }
371
372	if (GET_CODE (src_1) == REG)
373	  {
374	    temp = find_base_value (src_1);
375	    if (temp)
376	      src_1 = temp;
377	  }
378
379	/* Guess which operand is the base address.
380
381	   If either operand is a symbol, then it is the base.  If
382	   either operand is a CONST_INT, then the other is the base.  */
383
384	if (GET_CODE (src_1) == CONST_INT
385	    || GET_CODE (src_0) == SYMBOL_REF
386	    || GET_CODE (src_0) == LABEL_REF
387	    || GET_CODE (src_0) == CONST)
388	  return find_base_value (src_0);
389
390	if (GET_CODE (src_0) == CONST_INT
391	    || GET_CODE (src_1) == SYMBOL_REF
392	    || GET_CODE (src_1) == LABEL_REF
393	    || GET_CODE (src_1) == CONST)
394	  return find_base_value (src_1);
395
396	/* This might not be necessary anymore.
397
398	   If either operand is a REG that is a known pointer, then it
399	   is the base.  */
400	if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
401	  return find_base_value (src_0);
402
403	if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
404	  return find_base_value (src_1);
405
406	return 0;
407      }
408
409    case LO_SUM:
410      /* The standard form is (lo_sum reg sym) so look only at the
411	 second operand.  */
412      return find_base_value (XEXP (src, 1));
413
414    case AND:
415      /* If the second operand is constant set the base
416	 address to the first operand. */
417      if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
418	return find_base_value (XEXP (src, 0));
419      return 0;
420
421    case ZERO_EXTEND:
422    case SIGN_EXTEND:	/* used for NT/Alpha pointers */
423    case HIGH:
424      return find_base_value (XEXP (src, 0));
425
426    default:
427      break;
428    }
429
430  return 0;
431}
432
433/* Called from init_alias_analysis indirectly through note_stores.  */
434
435/* while scanning insns to find base values, reg_seen[N] is nonzero if
436   register N has been set in this function.  */
437static char *reg_seen;
438
439/* Addresses which are known not to alias anything else are identified
440   by a unique integer.  */
441static int unique_id;
442
443static void
444record_set (dest, set)
445     rtx dest, set;
446{
447  register int regno;
448  rtx src;
449
450  if (GET_CODE (dest) != REG)
451    return;
452
453  regno = REGNO (dest);
454
455  if (set)
456    {
457      /* A CLOBBER wipes out any old value but does not prevent a previously
458	 unset register from acquiring a base address (i.e. reg_seen is not
459	 set).  */
460      if (GET_CODE (set) == CLOBBER)
461	{
462	  new_reg_base_value[regno] = 0;
463	  return;
464	}
465      src = SET_SRC (set);
466    }
467  else
468    {
469      if (reg_seen[regno])
470	{
471	  new_reg_base_value[regno] = 0;
472	  return;
473	}
474      reg_seen[regno] = 1;
475      new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
476						   GEN_INT (unique_id++));
477      return;
478    }
479
480  /* This is not the first set.  If the new value is not related to the
481     old value, forget the base value. Note that the following code is
482     not detected:
483     extern int x, y;  int *p = &x; p += (&y-&x);
484     ANSI C does not allow computing the difference of addresses
485     of distinct top level objects.  */
486  if (new_reg_base_value[regno])
487    switch (GET_CODE (src))
488      {
489      case LO_SUM:
490      case PLUS:
491      case MINUS:
492	if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
493	  new_reg_base_value[regno] = 0;
494	break;
495      case AND:
496	if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
497	  new_reg_base_value[regno] = 0;
498	break;
499      default:
500	new_reg_base_value[regno] = 0;
501	break;
502      }
503  /* If this is the first set of a register, record the value.  */
504  else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
505	   && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
506    new_reg_base_value[regno] = find_base_value (src);
507
508  reg_seen[regno] = 1;
509}
510
511/* Called from loop optimization when a new pseudo-register is created.  */
512void
513record_base_value (regno, val, invariant)
514     int regno;
515     rtx val;
516     int invariant;
517{
518  if ((unsigned) regno >= reg_base_value_size)
519    return;
520
521  /* If INVARIANT is true then this value also describes an invariant
522     relationship which can be used to deduce that two registers with
523     unknown values are different.  */
524  if (invariant && alias_invariant)
525    alias_invariant[regno] = val;
526
527  if (GET_CODE (val) == REG)
528    {
529      if ((unsigned) REGNO (val) < reg_base_value_size)
530	{
531	  reg_base_value[regno] = reg_base_value[REGNO (val)];
532	}
533      return;
534    }
535  reg_base_value[regno] = find_base_value (val);
536}
537
538static rtx
539canon_rtx (x)
540     rtx x;
541{
542  /* Recursively look for equivalences.  */
543  if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
544      && REGNO (x) < reg_known_value_size)
545    return reg_known_value[REGNO (x)] == x
546      ? x : canon_rtx (reg_known_value[REGNO (x)]);
547  else if (GET_CODE (x) == PLUS)
548    {
549      rtx x0 = canon_rtx (XEXP (x, 0));
550      rtx x1 = canon_rtx (XEXP (x, 1));
551
552      if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
553	{
554	  /* We can tolerate LO_SUMs being offset here; these
555	     rtl are used for nothing other than comparisons.  */
556	  if (GET_CODE (x0) == CONST_INT)
557	    return plus_constant_for_output (x1, INTVAL (x0));
558	  else if (GET_CODE (x1) == CONST_INT)
559	    return plus_constant_for_output (x0, INTVAL (x1));
560	  return gen_rtx_PLUS (GET_MODE (x), x0, x1);
561	}
562    }
563  /* This gives us much better alias analysis when called from
564     the loop optimizer.   Note we want to leave the original
565     MEM alone, but need to return the canonicalized MEM with
566     all the flags with their original values.  */
567  else if (GET_CODE (x) == MEM)
568    {
569      rtx addr = canon_rtx (XEXP (x, 0));
570      if (addr != XEXP (x, 0))
571	{
572	  rtx new = gen_rtx_MEM (GET_MODE (x), addr);
573	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
574	  MEM_COPY_ATTRIBUTES (new, x);
575	  MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
576	  x = new;
577	}
578    }
579  return x;
580}
581
582/* Return 1 if X and Y are identical-looking rtx's.
583
584   We use the data in reg_known_value above to see if two registers with
585   different numbers are, in fact, equivalent.  */
586
587static int
588rtx_equal_for_memref_p (x, y)
589     rtx x, y;
590{
591  register int i;
592  register int j;
593  register enum rtx_code code;
594  register char *fmt;
595
596  if (x == 0 && y == 0)
597    return 1;
598  if (x == 0 || y == 0)
599    return 0;
600  x = canon_rtx (x);
601  y = canon_rtx (y);
602
603  if (x == y)
604    return 1;
605
606  code = GET_CODE (x);
607  /* Rtx's of different codes cannot be equal.  */
608  if (code != GET_CODE (y))
609    return 0;
610
611  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
612     (REG:SI x) and (REG:HI x) are NOT equivalent.  */
613
614  if (GET_MODE (x) != GET_MODE (y))
615    return 0;
616
617  /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively.  */
618
619  if (code == REG)
620    return REGNO (x) == REGNO (y);
621  if (code == LABEL_REF)
622    return XEXP (x, 0) == XEXP (y, 0);
623  if (code == SYMBOL_REF)
624    return XSTR (x, 0) == XSTR (y, 0);
625  if (code == CONST_INT)
626    return INTVAL (x) == INTVAL (y);
627  if (code == ADDRESSOF)
628    return REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0)) && XINT (x, 1) == XINT (y, 1);
629
630  /* For commutative operations, the RTX match if the operand match in any
631     order.  Also handle the simple binary and unary cases without a loop.  */
632  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
633    return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
634	     && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
635	    || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
636		&& rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
637  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
638    return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
639	    && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
640  else if (GET_RTX_CLASS (code) == '1')
641    return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
642
643  /* Compare the elements.  If any pair of corresponding elements
644     fail to match, return 0 for the whole things.
645
646     Limit cases to types which actually appear in addresses.  */
647
648  fmt = GET_RTX_FORMAT (code);
649  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
650    {
651      switch (fmt[i])
652	{
653	case 'i':
654	  if (XINT (x, i) != XINT (y, i))
655	    return 0;
656	  break;
657
658	case 'E':
659	  /* Two vectors must have the same length.  */
660	  if (XVECLEN (x, i) != XVECLEN (y, i))
661	    return 0;
662
663	  /* And the corresponding elements must match.  */
664	  for (j = 0; j < XVECLEN (x, i); j++)
665	    if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
666	      return 0;
667	  break;
668
669	case 'e':
670	  if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
671	    return 0;
672	  break;
673
674	/* This can happen for an asm which clobbers memory.  */
675	case '0':
676	  break;
677
678	  /* It is believed that rtx's at this level will never
679	     contain anything but integers and other rtx's,
680	     except for within LABEL_REFs and SYMBOL_REFs.  */
681	default:
682	  abort ();
683	}
684    }
685  return 1;
686}
687
688/* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
689   X and return it, or return 0 if none found.  */
690
691static rtx
692find_symbolic_term (x)
693     rtx x;
694{
695  register int i;
696  register enum rtx_code code;
697  register char *fmt;
698
699  code = GET_CODE (x);
700  if (code == SYMBOL_REF || code == LABEL_REF)
701    return x;
702  if (GET_RTX_CLASS (code) == 'o')
703    return 0;
704
705  fmt = GET_RTX_FORMAT (code);
706  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
707    {
708      rtx t;
709
710      if (fmt[i] == 'e')
711	{
712	  t = find_symbolic_term (XEXP (x, i));
713	  if (t != 0)
714	    return t;
715	}
716      else if (fmt[i] == 'E')
717	break;
718    }
719  return 0;
720}
721
722static rtx
723find_base_term (x)
724     register rtx x;
725{
726  switch (GET_CODE (x))
727    {
728    case REG:
729      return REG_BASE_VALUE (x);
730
731    case ZERO_EXTEND:
732    case SIGN_EXTEND:	/* Used for Alpha/NT pointers */
733    case HIGH:
734    case PRE_INC:
735    case PRE_DEC:
736    case POST_INC:
737    case POST_DEC:
738      return find_base_term (XEXP (x, 0));
739
740    case CONST:
741      x = XEXP (x, 0);
742      if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
743	return 0;
744      /* fall through */
745    case LO_SUM:
746    case PLUS:
747    case MINUS:
748      {
749	rtx tmp1 = XEXP (x, 0);
750	rtx tmp2 = XEXP (x, 1);
751
752	/* This is a litle bit tricky since we have to determine which of
753	   the two operands represents the real base address.  Otherwise this
754	   routine may return the index register instead of the base register.
755
756	   That may cause us to believe no aliasing was possible, when in
757	   fact aliasing is possible.
758
759	   We use a few simple tests to guess the base register.  Additional
760	   tests can certainly be added.  For example, if one of the operands
761	   is a shift or multiply, then it must be the index register and the
762	   other operand is the base register.  */
763
764	/* If either operand is known to be a pointer, then use it
765	   to determine the base term.  */
766	if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
767	  return find_base_term (tmp1);
768
769	if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
770	  return find_base_term (tmp2);
771
772	/* Neither operand was known to be a pointer.  Go ahead and find the
773	   base term for both operands.  */
774	tmp1 = find_base_term (tmp1);
775	tmp2 = find_base_term (tmp2);
776
777	/* If either base term is named object or a special address
778	   (like an argument or stack reference), then use it for the
779	   base term.  */
780	if (tmp1
781	    && (GET_CODE (tmp1) == SYMBOL_REF
782		|| GET_CODE (tmp1) == LABEL_REF
783		|| (GET_CODE (tmp1) == ADDRESS
784		    && GET_MODE (tmp1) != VOIDmode)))
785	  return tmp1;
786
787	if (tmp2
788	    && (GET_CODE (tmp2) == SYMBOL_REF
789		|| GET_CODE (tmp2) == LABEL_REF
790		|| (GET_CODE (tmp2) == ADDRESS
791		    && GET_MODE (tmp2) != VOIDmode)))
792	  return tmp2;
793
794	/* We could not determine which of the two operands was the
795	   base register and which was the index.  So we can determine
796	   nothing from the base alias check.  */
797	return 0;
798      }
799
800    case AND:
801      if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
802	return REG_BASE_VALUE (XEXP (x, 0));
803      return 0;
804
805    case SYMBOL_REF:
806    case LABEL_REF:
807      return x;
808
809    default:
810      return 0;
811    }
812}
813
814/* Return 0 if the addresses X and Y are known to point to different
815   objects, 1 if they might be pointers to the same object.  */
816
817static int
818base_alias_check (x, y, x_mode, y_mode)
819     rtx x, y;
820     enum machine_mode x_mode, y_mode;
821{
822  rtx x_base = find_base_term (x);
823  rtx y_base = find_base_term (y);
824
825  /* If the address itself has no known base see if a known equivalent
826     value has one.  If either address still has no known base, nothing
827     is known about aliasing.  */
828  if (x_base == 0)
829    {
830      rtx x_c;
831      if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
832	return 1;
833      x_base = find_base_term (x_c);
834      if (x_base == 0)
835	return 1;
836    }
837
838  if (y_base == 0)
839    {
840      rtx y_c;
841      if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
842	return 1;
843      y_base = find_base_term (y_c);
844      if (y_base == 0)
845	return 1;
846    }
847
848  /* If the base addresses are equal nothing is known about aliasing.  */
849  if (rtx_equal_p (x_base, y_base))
850    return 1;
851
852  /* The base addresses of the read and write are different expressions.
853     If they are both symbols and they are not accessed via AND, there is
854     no conflict.  We can bring knowledge of object alignment into play
855     here.  For example, on alpha, "char a, b;" can alias one another,
856     though "char a; long b;" cannot.  */
857  if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
858    {
859      if (GET_CODE (x) == AND && GET_CODE (y) == AND)
860	return 1;
861      if (GET_CODE (x) == AND
862	  && (GET_CODE (XEXP (x, 1)) != CONST_INT
863	      || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
864	return 1;
865      if (GET_CODE (y) == AND
866	  && (GET_CODE (XEXP (y, 1)) != CONST_INT
867	      || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
868	return 1;
869      /* Differing symbols never alias.  */
870      return 0;
871    }
872
873  /* If one address is a stack reference there can be no alias:
874     stack references using different base registers do not alias,
875     a stack reference can not alias a parameter, and a stack reference
876     can not alias a global.  */
877  if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
878      || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
879    return 0;
880
881  if (! flag_argument_noalias)
882    return 1;
883
884  if (flag_argument_noalias > 1)
885    return 0;
886
887  /* Weak noalias assertion (arguments are distinct, but may match globals). */
888  return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
889}
890
891/*  Return the address of the (N_REFS + 1)th memory reference to ADDR
892    where SIZE is the size in bytes of the memory reference.  If ADDR
893    is not modified by the memory reference then ADDR is returned.  */
894
895rtx
896addr_side_effect_eval (addr, size, n_refs)
897     rtx addr;
898     int size;
899     int n_refs;
900{
901  int offset = 0;
902
903  switch (GET_CODE (addr))
904    {
905    case PRE_INC:
906      offset = (n_refs + 1) * size;
907      break;
908    case PRE_DEC:
909      offset = -(n_refs + 1) * size;
910      break;
911    case POST_INC:
912      offset = n_refs * size;
913      break;
914    case POST_DEC:
915      offset = -n_refs * size;
916      break;
917
918    default:
919      return addr;
920    }
921
922  if (offset)
923    addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
924  else
925    addr = XEXP (addr, 0);
926
927  return addr;
928}
929
930/* Return nonzero if X and Y (memory addresses) could reference the
931   same location in memory.  C is an offset accumulator.  When
932   C is nonzero, we are testing aliases between X and Y + C.
933   XSIZE is the size in bytes of the X reference,
934   similarly YSIZE is the size in bytes for Y.
935
936   If XSIZE or YSIZE is zero, we do not know the amount of memory being
937   referenced (the reference was BLKmode), so make the most pessimistic
938   assumptions.
939
940   If XSIZE or YSIZE is negative, we may access memory outside the object
941   being referenced as a side effect.  This can happen when using AND to
942   align memory references, as is done on the Alpha.
943
944   Nice to notice that varying addresses cannot conflict with fp if no
945   local variables had their addresses taken, but that's too hard now.  */
946
947
948static int
949memrefs_conflict_p (xsize, x, ysize, y, c)
950     register rtx x, y;
951     int xsize, ysize;
952     HOST_WIDE_INT c;
953{
954  if (GET_CODE (x) == HIGH)
955    x = XEXP (x, 0);
956  else if (GET_CODE (x) == LO_SUM)
957    x = XEXP (x, 1);
958  else
959    x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
960  if (GET_CODE (y) == HIGH)
961    y = XEXP (y, 0);
962  else if (GET_CODE (y) == LO_SUM)
963    y = XEXP (y, 1);
964  else
965    y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
966
967  if (rtx_equal_for_memref_p (x, y))
968    {
969      if (xsize <= 0 || ysize <= 0)
970	return 1;
971      if (c >= 0 && xsize > c)
972	return 1;
973      if (c < 0 && ysize+c > 0)
974	return 1;
975      return 0;
976    }
977
978  /* This code used to check for conflicts involving stack references and
979     globals but the base address alias code now handles these cases.  */
980
981  if (GET_CODE (x) == PLUS)
982    {
983      /* The fact that X is canonicalized means that this
984	 PLUS rtx is canonicalized.  */
985      rtx x0 = XEXP (x, 0);
986      rtx x1 = XEXP (x, 1);
987
988      if (GET_CODE (y) == PLUS)
989	{
990	  /* The fact that Y is canonicalized means that this
991	     PLUS rtx is canonicalized.  */
992	  rtx y0 = XEXP (y, 0);
993	  rtx y1 = XEXP (y, 1);
994
995	  if (rtx_equal_for_memref_p (x1, y1))
996	    return memrefs_conflict_p (xsize, x0, ysize, y0, c);
997	  if (rtx_equal_for_memref_p (x0, y0))
998	    return memrefs_conflict_p (xsize, x1, ysize, y1, c);
999	  if (GET_CODE (x1) == CONST_INT)
1000	    {
1001	      if (GET_CODE (y1) == CONST_INT)
1002		return memrefs_conflict_p (xsize, x0, ysize, y0,
1003					   c - INTVAL (x1) + INTVAL (y1));
1004	      else
1005		return memrefs_conflict_p (xsize, x0, ysize, y,
1006					   c - INTVAL (x1));
1007	    }
1008	  else if (GET_CODE (y1) == CONST_INT)
1009	    return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1010
1011	  return 1;
1012	}
1013      else if (GET_CODE (x1) == CONST_INT)
1014	return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1015    }
1016  else if (GET_CODE (y) == PLUS)
1017    {
1018      /* The fact that Y is canonicalized means that this
1019	 PLUS rtx is canonicalized.  */
1020      rtx y0 = XEXP (y, 0);
1021      rtx y1 = XEXP (y, 1);
1022
1023      if (GET_CODE (y1) == CONST_INT)
1024	return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1025      else
1026	return 1;
1027    }
1028
1029  if (GET_CODE (x) == GET_CODE (y))
1030    switch (GET_CODE (x))
1031      {
1032      case MULT:
1033	{
1034	  /* Handle cases where we expect the second operands to be the
1035	     same, and check only whether the first operand would conflict
1036	     or not.  */
1037	  rtx x0, y0;
1038	  rtx x1 = canon_rtx (XEXP (x, 1));
1039	  rtx y1 = canon_rtx (XEXP (y, 1));
1040	  if (! rtx_equal_for_memref_p (x1, y1))
1041	    return 1;
1042	  x0 = canon_rtx (XEXP (x, 0));
1043	  y0 = canon_rtx (XEXP (y, 0));
1044	  if (rtx_equal_for_memref_p (x0, y0))
1045	    return (xsize == 0 || ysize == 0
1046		    || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1047
1048	  /* Can't properly adjust our sizes.  */
1049	  if (GET_CODE (x1) != CONST_INT)
1050	    return 1;
1051	  xsize /= INTVAL (x1);
1052	  ysize /= INTVAL (x1);
1053	  c /= INTVAL (x1);
1054	  return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1055	}
1056
1057      case REG:
1058	/* Are these registers known not to be equal?  */
1059	if (alias_invariant)
1060	  {
1061	    unsigned int r_x = REGNO (x), r_y = REGNO (y);
1062	    rtx i_x, i_y;	/* invariant relationships of X and Y */
1063
1064	    i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1065	    i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1066
1067	    if (i_x == 0 && i_y == 0)
1068	      break;
1069
1070	    if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1071				      ysize, i_y ? i_y : y, c))
1072	      return 0;
1073	  }
1074	break;
1075
1076      default:
1077	break;
1078      }
1079
1080  /* Treat an access through an AND (e.g. a subword access on an Alpha)
1081     as an access with indeterminate size.  Assume that references
1082     besides AND are aligned, so if the size of the other reference is
1083     at least as large as the alignment, assume no other overlap.  */
1084  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1085    {
1086      if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1087	xsize = -1;
1088      return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1089    }
1090  if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1091    {
1092      /* ??? If we are indexing far enough into the array/structure, we
1093	 may yet be able to determine that we can not overlap.  But we
1094	 also need to that we are far enough from the end not to overlap
1095	 a following reference, so we do nothing with that for now.  */
1096      if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1097	ysize = -1;
1098      return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1099    }
1100
1101  if (CONSTANT_P (x))
1102    {
1103      if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1104	{
1105	  c += (INTVAL (y) - INTVAL (x));
1106	  return (xsize <= 0 || ysize <= 0
1107		  || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1108	}
1109
1110      if (GET_CODE (x) == CONST)
1111	{
1112	  if (GET_CODE (y) == CONST)
1113	    return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1114				       ysize, canon_rtx (XEXP (y, 0)), c);
1115	  else
1116	    return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1117				       ysize, y, c);
1118	}
1119      if (GET_CODE (y) == CONST)
1120	return memrefs_conflict_p (xsize, x, ysize,
1121				   canon_rtx (XEXP (y, 0)), c);
1122
1123      if (CONSTANT_P (y))
1124	return (xsize < 0 || ysize < 0
1125		|| (rtx_equal_for_memref_p (x, y)
1126		    && (xsize == 0 || ysize == 0
1127		        || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1128
1129      return 1;
1130    }
1131  return 1;
1132}
1133
1134/* Functions to compute memory dependencies.
1135
1136   Since we process the insns in execution order, we can build tables
1137   to keep track of what registers are fixed (and not aliased), what registers
1138   are varying in known ways, and what registers are varying in unknown
1139   ways.
1140
1141   If both memory references are volatile, then there must always be a
1142   dependence between the two references, since their order can not be
1143   changed.  A volatile and non-volatile reference can be interchanged
1144   though.
1145
1146   A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
1147   conflict with a non-MEM_IN_STRUCT reference at a fixed address.   We must
1148   allow QImode aliasing because the ANSI C standard allows character
1149   pointers to alias anything.  We are assuming that characters are
1150   always QImode here.  We also must allow AND addresses, because they may
1151   generate accesses outside the object being referenced.  This is used to
1152   generate aligned addresses from unaligned addresses, for instance, the
1153   alpha storeqi_unaligned pattern.  */
1154
1155/* Read dependence: X is read after read in MEM takes place.  There can
1156   only be a dependence here if both reads are volatile.  */
1157
1158int
1159read_dependence (mem, x)
1160     rtx mem;
1161     rtx x;
1162{
1163  return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1164}
1165
1166/* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1167   MEM2 is a reference to a structure at a varying address, or returns
1168   MEM2 if vice versa.  Otherwise, returns NULL_RTX.  If a non-NULL
1169   value is returned MEM1 and MEM2 can never alias.  VARIES_P is used
1170   to decide whether or not an address may vary; it should return
1171   nozero whenever variation is possible.  */
1172
1173static rtx
1174fixed_scalar_and_varying_struct_p (mem1, mem2, varies_p)
1175     rtx mem1;
1176     rtx mem2;
1177     int (*varies_p) PROTO((rtx));
1178{
1179  rtx mem1_addr = XEXP (mem1, 0);
1180  rtx mem2_addr = XEXP (mem2, 0);
1181
1182  if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1183      && !varies_p (mem1_addr) && varies_p (mem2_addr))
1184    /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1185       varying address.  */
1186    return mem1;
1187
1188  if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1189      && varies_p (mem1_addr) && !varies_p (mem2_addr))
1190    /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1191       varying address.  */
1192    return mem2;
1193
1194  return NULL_RTX;
1195}
1196
1197/* Returns nonzero if something about the mode or address format MEM1
1198   indicates that it might well alias *anything*.  */
1199
1200static int
1201aliases_everything_p (mem)
1202     rtx mem;
1203{
1204  if (GET_MODE (mem) == QImode)
1205    /* ANSI C says that a `char*' can point to anything.  */
1206    return 1;
1207
1208  if (GET_CODE (XEXP (mem, 0)) == AND)
1209    /* If the address is an AND, its very hard to know at what it is
1210       actually pointing.  */
1211    return 1;
1212
1213  return 0;
1214}
1215
1216/* True dependence: X is read after store in MEM takes place.  */
1217
1218int
1219true_dependence (mem, mem_mode, x, varies)
1220     rtx mem;
1221     enum machine_mode mem_mode;
1222     rtx x;
1223     int (*varies) PROTO((rtx));
1224{
1225  register rtx x_addr, mem_addr;
1226
1227  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1228    return 1;
1229
1230  if (DIFFERENT_ALIAS_SETS_P (x, mem))
1231    return 0;
1232
1233  /* If X is an unchanging read, then it can't possibly conflict with any
1234     non-unchanging store.  It may conflict with an unchanging write though,
1235     because there may be a single store to this address to initialize it.
1236     Just fall through to the code below to resolve the case where we have
1237     both an unchanging read and an unchanging write.  This won't handle all
1238     cases optimally, but the possible performance loss should be
1239     negligible.  */
1240  if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1241    return 0;
1242
1243  if (mem_mode == VOIDmode)
1244    mem_mode = GET_MODE (mem);
1245
1246  if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x), mem_mode))
1247    return 0;
1248
1249  x_addr = canon_rtx (XEXP (x, 0));
1250  mem_addr = canon_rtx (XEXP (mem, 0));
1251
1252  if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1253			    SIZE_FOR_MODE (x), x_addr, 0))
1254    return 0;
1255
1256  if (aliases_everything_p (x))
1257    return 1;
1258
1259  /* We cannot use aliases_everyting_p to test MEM, since we must look
1260     at MEM_MODE, rather than GET_MODE (MEM).  */
1261  if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1262    return 1;
1263
1264  /* In true_dependence we also allow BLKmode to alias anything.  Why
1265     don't we do this in anti_dependence and output_dependence?  */
1266  if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1267    return 1;
1268
1269  return !fixed_scalar_and_varying_struct_p (mem, x, varies);
1270}
1271
1272/* Returns non-zero if a write to X might alias a previous read from
1273   (or, if WRITEP is non-zero, a write to) MEM.  */
1274
1275static int
1276write_dependence_p (mem, x, writep)
1277     rtx mem;
1278     rtx x;
1279     int writep;
1280{
1281  rtx x_addr, mem_addr;
1282  rtx fixed_scalar;
1283
1284  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1285    return 1;
1286
1287  /* If MEM is an unchanging read, then it can't possibly conflict with
1288     the store to X, because there is at most one store to MEM, and it must
1289     have occurred somewhere before MEM.  */
1290  if (!writep && RTX_UNCHANGING_P (mem))
1291    return 0;
1292
1293  if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x),
1294			  GET_MODE (mem)))
1295    return 0;
1296
1297  x = canon_rtx (x);
1298  mem = canon_rtx (mem);
1299
1300  if (DIFFERENT_ALIAS_SETS_P (x, mem))
1301    return 0;
1302
1303  x_addr = XEXP (x, 0);
1304  mem_addr = XEXP (mem, 0);
1305
1306  if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1307			   SIZE_FOR_MODE (x), x_addr, 0))
1308    return 0;
1309
1310  fixed_scalar
1311    = fixed_scalar_and_varying_struct_p (mem, x, rtx_addr_varies_p);
1312
1313  return (!(fixed_scalar == mem && !aliases_everything_p (x))
1314	  && !(fixed_scalar == x && !aliases_everything_p (mem)));
1315}
1316
1317/* Anti dependence: X is written after read in MEM takes place.  */
1318
1319int
1320anti_dependence (mem, x)
1321     rtx mem;
1322     rtx x;
1323{
1324  return write_dependence_p (mem, x, /*writep=*/0);
1325}
1326
1327/* Output dependence: X is written after store in MEM takes place.  */
1328
1329int
1330output_dependence (mem, x)
1331     register rtx mem;
1332     register rtx x;
1333{
1334  return write_dependence_p (mem, x, /*writep=*/1);
1335}
1336
1337
1338static HARD_REG_SET argument_registers;
1339
1340void
1341init_alias_once ()
1342{
1343  register int i;
1344
1345#ifndef OUTGOING_REGNO
1346#define OUTGOING_REGNO(N) N
1347#endif
1348  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1349    /* Check whether this register can hold an incoming pointer
1350       argument.  FUNCTION_ARG_REGNO_P tests outgoing register
1351       numbers, so translate if necessary due to register windows. */
1352    if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1353	&& HARD_REGNO_MODE_OK (i, Pmode))
1354      SET_HARD_REG_BIT (argument_registers, i);
1355
1356  alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1357}
1358
1359void
1360init_alias_analysis ()
1361{
1362  int maxreg = max_reg_num ();
1363  int changed, pass;
1364  register int i;
1365  register unsigned int ui;
1366  register rtx insn;
1367
1368  reg_known_value_size = maxreg;
1369
1370  reg_known_value
1371    = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
1372    - FIRST_PSEUDO_REGISTER;
1373  reg_known_equiv_p =
1374    oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
1375  bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
1376	 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
1377  bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
1378	 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
1379
1380  /* Overallocate reg_base_value to allow some growth during loop
1381     optimization.  Loop unrolling can create a large number of
1382     registers.  */
1383  reg_base_value_size = maxreg * 2;
1384  reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
1385  new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
1386  reg_seen = (char *)alloca (reg_base_value_size);
1387  bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
1388  if (! reload_completed && flag_unroll_loops)
1389    {
1390      alias_invariant = (rtx *)xrealloc (alias_invariant,
1391					 reg_base_value_size * sizeof (rtx));
1392      bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1393    }
1394
1395
1396  /* The basic idea is that each pass through this loop will use the
1397     "constant" information from the previous pass to propagate alias
1398     information through another level of assignments.
1399
1400     This could get expensive if the assignment chains are long.  Maybe
1401     we should throttle the number of iterations, possibly based on
1402     the optimization level or flag_expensive_optimizations.
1403
1404     We could propagate more information in the first pass by making use
1405     of REG_N_SETS to determine immediately that the alias information
1406     for a pseudo is "constant".
1407
1408     A program with an uninitialized variable can cause an infinite loop
1409     here.  Instead of doing a full dataflow analysis to detect such problems
1410     we just cap the number of iterations for the loop.
1411
1412     The state of the arrays for the set chain in question does not matter
1413     since the program has undefined behavior.  */
1414
1415  pass = 0;
1416  do
1417    {
1418      /* Assume nothing will change this iteration of the loop.  */
1419      changed = 0;
1420
1421      /* We want to assign the same IDs each iteration of this loop, so
1422	 start counting from zero each iteration of the loop.  */
1423      unique_id = 0;
1424
1425      /* We're at the start of the funtion each iteration through the
1426	 loop, so we're copying arguments.  */
1427      copying_arguments = 1;
1428
1429      /* Wipe the potential alias information clean for this pass.  */
1430      bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1431
1432      /* Wipe the reg_seen array clean.  */
1433      bzero ((char *) reg_seen, reg_base_value_size);
1434
1435      /* Mark all hard registers which may contain an address.
1436	 The stack, frame and argument pointers may contain an address.
1437	 An argument register which can hold a Pmode value may contain
1438	 an address even if it is not in BASE_REGS.
1439
1440	 The address expression is VOIDmode for an argument and
1441	 Pmode for other registers.  */
1442
1443      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1444	if (TEST_HARD_REG_BIT (argument_registers, i))
1445	  new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1446						   gen_rtx_REG (Pmode, i));
1447
1448      new_reg_base_value[STACK_POINTER_REGNUM]
1449	= gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1450      new_reg_base_value[ARG_POINTER_REGNUM]
1451	= gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1452      new_reg_base_value[FRAME_POINTER_REGNUM]
1453	= gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1454#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1455      new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1456	= gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1457#endif
1458      if (struct_value_incoming_rtx
1459	  && GET_CODE (struct_value_incoming_rtx) == REG)
1460	new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1461	  = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1462
1463      if (static_chain_rtx
1464	  && GET_CODE (static_chain_rtx) == REG)
1465	new_reg_base_value[REGNO (static_chain_rtx)]
1466	  = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1467
1468      /* Walk the insns adding values to the new_reg_base_value array.  */
1469      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1470	{
1471	  if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1472	    {
1473	      rtx note, set;
1474	      /* If this insn has a noalias note, process it,  Otherwise,
1475	         scan for sets.  A simple set will have no side effects
1476	         which could change the base value of any other register. */
1477
1478	      if (GET_CODE (PATTERN (insn)) == SET
1479		  && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1480		record_set (SET_DEST (PATTERN (insn)), NULL_RTX);
1481	      else
1482		note_stores (PATTERN (insn), record_set);
1483
1484	      set = single_set (insn);
1485
1486	      if (set != 0
1487		  && GET_CODE (SET_DEST (set)) == REG
1488		  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1489		  && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1490		       && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1491		      || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1492		  && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1493		  && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1494		{
1495		  int regno = REGNO (SET_DEST (set));
1496		  reg_known_value[regno] = XEXP (note, 0);
1497		  reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1498		}
1499	    }
1500	  else if (GET_CODE (insn) == NOTE
1501		   && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1502	    copying_arguments = 0;
1503	}
1504
1505      /* Now propagate values from new_reg_base_value to reg_base_value.  */
1506      for (ui = 0; ui < reg_base_value_size; ui++)
1507	{
1508	  if (new_reg_base_value[ui]
1509	      && new_reg_base_value[ui] != reg_base_value[ui]
1510	      && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1511	    {
1512	      reg_base_value[ui] = new_reg_base_value[ui];
1513	      changed = 1;
1514	    }
1515	}
1516    }
1517  while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1518
1519  /* Fill in the remaining entries.  */
1520  for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1521    if (reg_known_value[i] == 0)
1522      reg_known_value[i] = regno_reg_rtx[i];
1523
1524  /* Simplify the reg_base_value array so that no register refers to
1525     another register, except to special registers indirectly through
1526     ADDRESS expressions.
1527
1528     In theory this loop can take as long as O(registers^2), but unless
1529     there are very long dependency chains it will run in close to linear
1530     time.
1531
1532     This loop may not be needed any longer now that the main loop does
1533     a better job at propagating alias information.  */
1534  pass = 0;
1535  do
1536    {
1537      changed = 0;
1538      pass++;
1539      for (ui = 0; ui < reg_base_value_size; ui++)
1540	{
1541	  rtx base = reg_base_value[ui];
1542	  if (base && GET_CODE (base) == REG)
1543	    {
1544	      unsigned int base_regno = REGNO (base);
1545	      if (base_regno == ui)		/* register set from itself */
1546		reg_base_value[ui] = 0;
1547	      else
1548		reg_base_value[ui] = reg_base_value[base_regno];
1549	      changed = 1;
1550	    }
1551	}
1552    }
1553  while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1554
1555  new_reg_base_value = 0;
1556  reg_seen = 0;
1557}
1558
1559void
1560end_alias_analysis ()
1561{
1562  reg_known_value = 0;
1563  reg_base_value = 0;
1564  reg_base_value_size = 0;
1565  if (alias_invariant)
1566    {
1567      free ((char *)alias_invariant);
1568      alias_invariant = 0;
1569    }
1570}
1571