1178825Sdfr/* Alias analysis for trees.
2233294Sstas   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3233294Sstas   Contributed by Diego Novillo <dnovillo@redhat.com>
4233294Sstas
5178825SdfrThis file is part of GCC.
6233294Sstas
7233294SstasGCC is free software; you can redistribute it and/or modify
8233294Sstasit under the terms of the GNU General Public License as published by
9178825Sdfrthe Free Software Foundation; either version 2, or (at your option)
10233294Sstasany later version.
11233294Sstas
12178825SdfrGCC is distributed in the hope that it will be useful,
13233294Sstasbut WITHOUT ANY WARRANTY; without even the implied warranty of
14233294SstasMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15233294SstasGNU General Public License for more details.
16178825Sdfr
17178825SdfrYou should have received a copy of the GNU General Public License
18178825Sdfralong with GCC; see the file COPYING.  If not, write to
19178825Sdfrthe Free Software Foundation, 51 Franklin Street, Fifth Floor,
20178825SdfrBoston, MA 02110-1301, USA.  */
21178825Sdfr
22178825Sdfr#include "config.h"
23178825Sdfr#include "system.h"
24178825Sdfr#include "coretypes.h"
25178825Sdfr#include "tm.h"
26178825Sdfr#include "tree.h"
27178825Sdfr#include "rtl.h"
28178825Sdfr#include "tm_p.h"
29178825Sdfr#include "hard-reg-set.h"
30178825Sdfr#include "basic-block.h"
31178825Sdfr#include "timevar.h"
32178825Sdfr#include "expr.h"
33178825Sdfr#include "ggc.h"
34178825Sdfr#include "langhooks.h"
35178825Sdfr#include "flags.h"
36178825Sdfr#include "function.h"
37178825Sdfr#include "diagnostic.h"
38178825Sdfr#include "tree-dump.h"
39178825Sdfr#include "tree-gimple.h"
40178825Sdfr#include "tree-flow.h"
41233294Sstas#include "tree-inline.h"
42178825Sdfr#include "tree-pass.h"
43178825Sdfr#include "tree-ssa-structalias.h"
44178825Sdfr#include "convert.h"
45178825Sdfr#include "params.h"
46178825Sdfr#include "ipa-type-escape.h"
47233294Sstas#include "vec.h"
48178825Sdfr#include "bitmap.h"
49178825Sdfr#include "pointer-set.h"
50178825Sdfr
51178825Sdfr/* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
52178825Sdfr   aliasing  */
53178825Sdfrstatic bitmap_obstack alias_obstack;
54178825Sdfr
55233294Sstas/* 'true' after aliases have been computed (see compute_may_aliases).  */
56233294Sstasbool aliases_computed_p;
57178825Sdfr
58178825Sdfr/* Structure to map a variable to its alias set and keep track of the
59178825Sdfr   virtual operands that will be needed to represent it.  */
60178825Sdfrstruct alias_map_d
61178825Sdfr{
62178825Sdfr  /* Variable and its alias set.  */
63178825Sdfr  tree var;
64178825Sdfr  HOST_WIDE_INT set;
65178825Sdfr
66178825Sdfr  /* Total number of virtual operands that will be needed to represent
67178825Sdfr     all the aliases of VAR.  */
68178825Sdfr  long total_alias_vops;
69178825Sdfr
70178825Sdfr  /* Nonzero if the aliases for this memory tag have been grouped
71178825Sdfr     already.  Used in group_aliases.  */
72178825Sdfr  unsigned int grouped_p : 1;
73178825Sdfr
74178825Sdfr  /* Set of variables aliased with VAR.  This is the exact same
75178825Sdfr     information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
76178825Sdfr     bitmap form to speed up alias grouping.  */
77178825Sdfr  bitmap may_aliases;
78178825Sdfr};
79178825Sdfr
80178825Sdfr
81178825Sdfr/* Counters used to display statistics on alias analysis.  */
82178825Sdfrstruct alias_stats_d
83178825Sdfr{
84178825Sdfr  unsigned int alias_queries;
85178825Sdfr  unsigned int alias_mayalias;
86178825Sdfr  unsigned int alias_noalias;
87233294Sstas  unsigned int simple_queries;
88178825Sdfr  unsigned int simple_resolved;
89178825Sdfr  unsigned int tbaa_queries;
90178825Sdfr  unsigned int tbaa_resolved;
91178825Sdfr  unsigned int structnoaddress_queries;
92178825Sdfr  unsigned int structnoaddress_resolved;
93178825Sdfr};
94178825Sdfr
95178825Sdfr
96178825Sdfr/* Local variables.  */
97178825Sdfrstatic struct alias_stats_d alias_stats;
98178825Sdfr
99178825Sdfr/* Local functions.  */
100178825Sdfrstatic void compute_flow_insensitive_aliasing (struct alias_info *);
101178825Sdfrstatic void dump_alias_stats (FILE *);
102178825Sdfrstatic bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
103178825Sdfrstatic tree create_memory_tag (tree type, bool is_type_tag);
104178825Sdfrstatic tree get_tmt_for (tree, struct alias_info *);
105178825Sdfrstatic tree get_nmt_for (tree);
106178825Sdfrstatic void add_may_alias (tree, tree);
107178825Sdfrstatic void replace_may_alias (tree, size_t, tree);
108178825Sdfrstatic struct alias_info *init_alias_info (void);
109178825Sdfrstatic void delete_alias_info (struct alias_info *);
110178825Sdfrstatic void compute_flow_sensitive_aliasing (struct alias_info *);
111178825Sdfrstatic void setup_pointers_and_addressables (struct alias_info *);
112178825Sdfrstatic void create_global_var (void);
113178825Sdfrstatic void maybe_create_global_var (struct alias_info *ai);
114178825Sdfrstatic void group_aliases (struct alias_info *);
115178825Sdfrstatic void set_pt_anything (tree ptr);
116178825Sdfr
117178825Sdfr/* Global declarations.  */
118178825Sdfr
119178825Sdfr/* Call clobbered variables in the function.  If bit I is set, then
120178825Sdfr   REFERENCED_VARS (I) is call-clobbered.  */
121233294Sstasbitmap call_clobbered_vars;
122178825Sdfr
123178825Sdfr/* Addressable variables in the function.  If bit I is set, then
124178825Sdfr   REFERENCED_VARS (I) has had its address taken.  Note that
125178825Sdfr   CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related.  An
126178825Sdfr   addressable variable is not necessarily call-clobbered (e.g., a
127178825Sdfr   local addressable whose address does not escape) and not all
128178825Sdfr   call-clobbered variables are addressable (e.g., a local static
129178825Sdfr   variable).  */
130178825Sdfrbitmap addressable_vars;
131178825Sdfr
132178825Sdfr/* When the program has too many call-clobbered variables and call-sites,
133178825Sdfr   this variable is used to represent the clobbering effects of function
134178825Sdfr   calls.  In these cases, all the call clobbered variables in the program
135178825Sdfr   are forced to alias this variable.  This reduces compile times by not
136178825Sdfr   having to keep track of too many V_MAY_DEF expressions at call sites.  */
137178825Sdfrtree global_var;
138178825Sdfr
139178825Sdfr
140178825Sdfr/* Compute may-alias information for every variable referenced in function
141178825Sdfr   FNDECL.
142178825Sdfr
143178825Sdfr   Alias analysis proceeds in 3 main phases:
144178825Sdfr
145178825Sdfr   1- Points-to and escape analysis.
146178825Sdfr
147178825Sdfr   This phase walks the use-def chains in the SSA web looking for three
148178825Sdfr   things:
149178825Sdfr
150178825Sdfr	* Assignments of the form P_i = &VAR
151178825Sdfr	* Assignments of the form P_i = malloc()
152178825Sdfr	* Pointers and ADDR_EXPR that escape the current function.
153178825Sdfr
154178825Sdfr   The concept of 'escaping' is the same one used in the Java world.  When
155178825Sdfr   a pointer or an ADDR_EXPR escapes, it means that it has been exposed
156178825Sdfr   outside of the current function.  So, assignment to global variables,
157178825Sdfr   function arguments and returning a pointer are all escape sites, as are
158178825Sdfr   conversions between pointers and integers.
159178825Sdfr
160233294Sstas   This is where we are currently limited.  Since not everything is renamed
161178825Sdfr   into SSA, we lose track of escape properties when a pointer is stashed
162178825Sdfr   inside a field in a structure, for instance.  In those cases, we are
163178825Sdfr   assuming that the pointer does escape.
164178825Sdfr
165178825Sdfr   We use escape analysis to determine whether a variable is
166178825Sdfr   call-clobbered.  Simply put, if an ADDR_EXPR escapes, then the variable
167178825Sdfr   is call-clobbered.  If a pointer P_i escapes, then all the variables
168178825Sdfr   pointed-to by P_i (and its memory tag) also escape.
169178825Sdfr
170178825Sdfr   2- Compute flow-sensitive aliases
171178825Sdfr
172178825Sdfr   We have two classes of memory tags.  Memory tags associated with the
173178825Sdfr   pointed-to data type of the pointers in the program.  These tags are
174178825Sdfr   called "type memory tag" (TMT).  The other class are those associated
175178825Sdfr   with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
176178825Sdfr   when adding operands for an INDIRECT_REF *P_i, we will first check
177178825Sdfr   whether P_i has a name tag, if it does we use it, because that will have
178178825Sdfr   more precise aliasing information.  Otherwise, we use the standard type
179178825Sdfr   tag.
180178825Sdfr
181178825Sdfr   In this phase, we go through all the pointers we found in points-to
182178825Sdfr   analysis and create alias sets for the name memory tags associated with
183178825Sdfr   each pointer P_i.  If P_i escapes, we mark call-clobbered the variables
184178825Sdfr   it points to and its tag.
185178825Sdfr
186178825Sdfr
187178825Sdfr   3- Compute flow-insensitive aliases
188233294Sstas
189233294Sstas   This pass will compare the alias set of every type memory tag and every
190178825Sdfr   addressable variable found in the program.  Given a type memory tag TMT
191178825Sdfr   and an addressable variable V.  If the alias sets of TMT and V conflict
192178825Sdfr   (as computed by may_alias_p), then V is marked as an alias tag and added
193233294Sstas   to the alias set of TMT.
194178825Sdfr
195178825Sdfr   For instance, consider the following function:
196178825Sdfr
197178825Sdfr	    foo (int i)
198178825Sdfr	    {
199178825Sdfr	      int *p, a, b;
200178825Sdfr
201178825Sdfr	      if (i > 10)
202178825Sdfr	        p = &a;
203178825Sdfr	      else
204178825Sdfr	        p = &b;
205178825Sdfr
206178825Sdfr	      *p = 3;
207178825Sdfr	      a = b + 2;
208178825Sdfr	      return *p;
209178825Sdfr	    }
210178825Sdfr
211178825Sdfr   After aliasing analysis has finished, the type memory tag for pointer
212233294Sstas   'p' will have two aliases, namely variables 'a' and 'b'.  Every time
213178825Sdfr   pointer 'p' is dereferenced, we want to mark the operation as a
214178825Sdfr   potential reference to 'a' and 'b'.
215178825Sdfr
216178825Sdfr	    foo (int i)
217178825Sdfr	    {
218178825Sdfr	      int *p, a, b;
219178825Sdfr
220178825Sdfr	      if (i_2 > 10)
221178825Sdfr		p_4 = &a;
222178825Sdfr	      else
223178825Sdfr		p_6 = &b;
224233294Sstas	      # p_1 = PHI <p_4(1), p_6(2)>;
225178825Sdfr
226178825Sdfr	      # a_7 = V_MAY_DEF <a_3>;
227178825Sdfr	      # b_8 = V_MAY_DEF <b_5>;
228178825Sdfr	      *p_1 = 3;
229178825Sdfr
230178825Sdfr	      # a_9 = V_MAY_DEF <a_7>
231178825Sdfr	      # VUSE <b_8>
232178825Sdfr	      a_9 = b_8 + 2;
233178825Sdfr
234178825Sdfr	      # VUSE <a_9>;
235178825Sdfr	      # VUSE <b_8>;
236178825Sdfr	      return *p_1;
237178825Sdfr	    }
238178825Sdfr
239178825Sdfr   In certain cases, the list of may aliases for a pointer may grow too
240178825Sdfr   large.  This may cause an explosion in the number of virtual operands
241178825Sdfr   inserted in the code.  Resulting in increased memory consumption and
242178825Sdfr   compilation time.
243178825Sdfr
244178825Sdfr   When the number of virtual operands needed to represent aliased
245178825Sdfr   loads and stores grows too large (configurable with @option{--param
246178825Sdfr   max-aliased-vops}), alias sets are grouped to avoid severe
247178825Sdfr   compile-time slow downs and memory consumption.  See group_aliases.  */
248178825Sdfr
249178825Sdfrstatic void
250178825Sdfrcompute_may_aliases (void)
251178825Sdfr{
252178825Sdfr  struct alias_info *ai;
253233294Sstas
254178825Sdfr  memset (&alias_stats, 0, sizeof (alias_stats));
255178825Sdfr
256178825Sdfr  /* Initialize aliasing information.  */
257233294Sstas  ai = init_alias_info ();
258178825Sdfr
259178825Sdfr  /* For each pointer P_i, determine the sets of variables that P_i may
260178825Sdfr     point-to.  For every addressable variable V, determine whether the
261233294Sstas     address of V escapes the current function, making V call-clobbered
262178825Sdfr     (i.e., whether &V is stored in a global variable or if its passed as a
263178825Sdfr     function call argument).  */
264233294Sstas  compute_points_to_sets (ai);
265178825Sdfr
266178825Sdfr  /* Collect all pointers and addressable variables, compute alias sets,
267178825Sdfr     create memory tags for pointers and promote variables whose address is
268178825Sdfr     not needed anymore.  */
269178825Sdfr  setup_pointers_and_addressables (ai);
270178825Sdfr
271178825Sdfr  /* Compute flow-sensitive, points-to based aliasing for all the name
272233294Sstas     memory tags.  Note that this pass needs to be done before flow
273178825Sdfr     insensitive analysis because it uses the points-to information
274178825Sdfr     gathered before to mark call-clobbered type tags.  */
275178825Sdfr  compute_flow_sensitive_aliasing (ai);
276178825Sdfr
277178825Sdfr  /* Compute type-based flow-insensitive aliasing for all the type
278178825Sdfr     memory tags.  */
279178825Sdfr  compute_flow_insensitive_aliasing (ai);
280178825Sdfr
281178825Sdfr  /* If the program has too many call-clobbered variables and/or function
282178825Sdfr     calls, create .GLOBAL_VAR and use it to model call-clobbering
283233294Sstas     semantics at call sites.  This reduces the number of virtual operands
284178825Sdfr     considerably, improving compile times at the expense of lost
285178825Sdfr     aliasing precision.  */
286178825Sdfr  maybe_create_global_var (ai);
287178825Sdfr
288178825Sdfr  /* Debugging dumps.  */
289178825Sdfr  if (dump_file)
290178825Sdfr    {
291233294Sstas      dump_referenced_vars (dump_file);
292233294Sstas      if (dump_flags & TDF_STATS)
293233294Sstas	dump_alias_stats (dump_file);
294233294Sstas      dump_points_to_info (dump_file);
295233294Sstas      dump_alias_info (dump_file);
296233294Sstas    }
297233294Sstas
298233294Sstas  /* Deallocate memory used by aliasing data structures.  */
299233294Sstas  delete_alias_info (ai);
300233294Sstas
301233294Sstas  {
302233294Sstas    block_stmt_iterator bsi;
303233294Sstas    basic_block bb;
304233294Sstas    FOR_EACH_BB (bb)
305233294Sstas      {
306233294Sstas        for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
307233294Sstas          {
308233294Sstas            update_stmt_if_modified (bsi_stmt (bsi));
309233294Sstas          }
310233294Sstas      }
311233294Sstas  }
312233294Sstas
313233294Sstas}
314233294Sstas
315233294Sstasstruct tree_opt_pass pass_may_alias =
316233294Sstas{
317233294Sstas  "alias",				/* name */
318233294Sstas  NULL,					/* gate */
319233294Sstas  compute_may_aliases,			/* execute */
320233294Sstas  NULL,					/* sub */
321233294Sstas  NULL,					/* next */
322233294Sstas  0,					/* static_pass_number */
323233294Sstas  TV_TREE_MAY_ALIAS,			/* tv_id */
324233294Sstas  PROP_cfg | PROP_ssa,			/* properties_required */
325178825Sdfr  PROP_alias,				/* properties_provided */
326178825Sdfr  0,					/* properties_destroyed */
327178825Sdfr  0,					/* todo_flags_start */
328178825Sdfr  TODO_dump_func | TODO_update_ssa
329233294Sstas    | TODO_ggc_collect | TODO_verify_ssa
330178825Sdfr    | TODO_verify_stmts, 		/* todo_flags_finish */
331178825Sdfr  0					/* letter */
332178825Sdfr};
333178825Sdfr
334178825Sdfr
335178825Sdfr/* Data structure used to count the number of dereferences to PTR
336178825Sdfr   inside an expression.  */
337178825Sdfrstruct count_ptr_d
338178825Sdfr{
339178825Sdfr  tree ptr;
340178825Sdfr  unsigned count;
341178825Sdfr};
342178825Sdfr
343178825Sdfr
344178825Sdfr/* Helper for count_uses_and_derefs.  Called by walk_tree to look for
345178825Sdfr   (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA.  */
346178825Sdfr
347178825Sdfrstatic tree
348178825Sdfrcount_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
349178825Sdfr{
350178825Sdfr  struct count_ptr_d *count_p = (struct count_ptr_d *) data;
351233294Sstas
352178825Sdfr  /* Do not walk inside ADDR_EXPR nodes.  In the expression &ptr->fld,
353178825Sdfr     pointer 'ptr' is *not* dereferenced, it is simply used to compute
354178825Sdfr     the address of 'fld' as 'ptr + offsetof(fld)'.  */
355178825Sdfr  if (TREE_CODE (*tp) == ADDR_EXPR)
356178825Sdfr    {
357178825Sdfr      *walk_subtrees = 0;
358178825Sdfr      return NULL_TREE;
359178825Sdfr    }
360178825Sdfr
361178825Sdfr  if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
362178825Sdfr    count_p->count++;
363233294Sstas
364233294Sstas  return NULL_TREE;
365233294Sstas}
366178825Sdfr
367233294Sstas
368233294Sstas/* Count the number of direct and indirect uses for pointer PTR in
369233294Sstas   statement STMT.  The two counts are stored in *NUM_USES_P and
370178825Sdfr   *NUM_DEREFS_P respectively.  *IS_STORE_P is set to 'true' if at
371233294Sstas   least one of those dereferences is a store operation.  */
372233294Sstas
373178825Sdfrvoid
374178825Sdfrcount_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
375233294Sstas		       unsigned *num_derefs_p, bool *is_store)
376233294Sstas{
377233294Sstas  ssa_op_iter i;
378233294Sstas  tree use;
379233294Sstas
380178825Sdfr  *num_uses_p = 0;
381  *num_derefs_p = 0;
382  *is_store = false;
383
384  /* Find out the total number of uses of PTR in STMT.  */
385  FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
386    if (use == ptr)
387      (*num_uses_p)++;
388
389  /* Now count the number of indirect references to PTR.  This is
390     truly awful, but we don't have much choice.  There are no parent
391     pointers inside INDIRECT_REFs, so an expression like
392     '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
393     find all the indirect and direct uses of x_1 inside.  The only
394     shortcut we can take is the fact that GIMPLE only allows
395     INDIRECT_REFs inside the expressions below.  */
396  if (TREE_CODE (stmt) == MODIFY_EXPR
397      || (TREE_CODE (stmt) == RETURN_EXPR
398	  && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
399      || TREE_CODE (stmt) == ASM_EXPR
400      || TREE_CODE (stmt) == CALL_EXPR)
401    {
402      tree lhs, rhs;
403
404      if (TREE_CODE (stmt) == MODIFY_EXPR)
405	{
406	  lhs = TREE_OPERAND (stmt, 0);
407	  rhs = TREE_OPERAND (stmt, 1);
408	}
409      else if (TREE_CODE (stmt) == RETURN_EXPR)
410	{
411	  tree e = TREE_OPERAND (stmt, 0);
412	  lhs = TREE_OPERAND (e, 0);
413	  rhs = TREE_OPERAND (e, 1);
414	}
415      else if (TREE_CODE (stmt) == ASM_EXPR)
416	{
417	  lhs = ASM_OUTPUTS (stmt);
418	  rhs = ASM_INPUTS (stmt);
419	}
420      else
421	{
422	  lhs = NULL_TREE;
423	  rhs = stmt;
424	}
425
426      if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
427	{
428	  struct count_ptr_d count;
429	  count.ptr = ptr;
430	  count.count = 0;
431	  walk_tree (&lhs, count_ptr_derefs, &count, NULL);
432	  *is_store = true;
433	  *num_derefs_p = count.count;
434	}
435
436      if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
437	{
438	  struct count_ptr_d count;
439	  count.ptr = ptr;
440	  count.count = 0;
441	  walk_tree (&rhs, count_ptr_derefs, &count, NULL);
442	  *num_derefs_p += count.count;
443	}
444    }
445
446  gcc_assert (*num_uses_p >= *num_derefs_p);
447}
448
449/* Initialize the data structures used for alias analysis.  */
450
451static struct alias_info *
452init_alias_info (void)
453{
454  struct alias_info *ai;
455  referenced_var_iterator rvi;
456  tree var;
457
458  bitmap_obstack_initialize (&alias_obstack);
459  ai = xcalloc (1, sizeof (struct alias_info));
460  ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
461  sbitmap_zero (ai->ssa_names_visited);
462  VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
463  ai->written_vars = BITMAP_ALLOC (&alias_obstack);
464  ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
465  ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
466
467  /* If aliases have been computed before, clear existing information.  */
468  if (aliases_computed_p)
469    {
470      unsigned i;
471
472      /* Similarly, clear the set of addressable variables.  In this
473	 case, we can just clear the set because addressability is
474	 only computed here.  */
475      bitmap_clear (addressable_vars);
476
477      /* Clear flow-insensitive alias information from each symbol.  */
478      FOR_EACH_REFERENCED_VAR (var, rvi)
479	{
480	  var_ann_t ann = var_ann (var);
481
482	  ann->is_alias_tag = 0;
483	  ann->may_aliases = NULL;
484	  NUM_REFERENCES_CLEAR (ann);
485
486	  /* Since we are about to re-discover call-clobbered
487	     variables, clear the call-clobbered flag.  Variables that
488	     are intrinsically call-clobbered (globals, local statics,
489	     etc) will not be marked by the aliasing code, so we can't
490	     remove them from CALL_CLOBBERED_VARS.
491
492	     NB: STRUCT_FIELDS are still call clobbered if they are for
493	     a global variable, so we *don't* clear their call clobberedness
494	     just because they are tags, though we will clear it if they
495	     aren't for global variables.  */
496	  if (ann->mem_tag_kind == NAME_TAG
497	      || ann->mem_tag_kind == TYPE_TAG
498	      || !is_global_var (var))
499	    clear_call_clobbered (var);
500	}
501
502      /* Clear flow-sensitive points-to information from each SSA name.  */
503      for (i = 1; i < num_ssa_names; i++)
504	{
505	  tree name = ssa_name (i);
506
507	  if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
508	    continue;
509
510	  if (SSA_NAME_PTR_INFO (name))
511	    {
512	      struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
513
514	      /* Clear all the flags but keep the name tag to
515		 avoid creating new temporaries unnecessarily.  If
516		 this pointer is found to point to a subset or
517		 superset of its former points-to set, then a new
518		 tag will need to be created in create_name_tags.  */
519	      pi->pt_anything = 0;
520	      pi->pt_null = 0;
521	      pi->value_escapes_p = 0;
522	      pi->is_dereferenced = 0;
523	      if (pi->pt_vars)
524		bitmap_clear (pi->pt_vars);
525	    }
526	}
527    }
528
529  /* Next time, we will need to reset alias information.  */
530  aliases_computed_p = true;
531
532  return ai;
533}
534
535
536/* Deallocate memory used by alias analysis.  */
537
538static void
539delete_alias_info (struct alias_info *ai)
540{
541  size_t i;
542  referenced_var_iterator rvi;
543  tree var;
544
545  sbitmap_free (ai->ssa_names_visited);
546  ai->processed_ptrs = NULL;
547
548  for (i = 0; i < ai->num_addressable_vars; i++)
549    free (ai->addressable_vars[i]);
550
551  FOR_EACH_REFERENCED_VAR(var, rvi)
552    {
553      var_ann_t ann = var_ann (var);
554      NUM_REFERENCES_CLEAR (ann);
555    }
556
557  free (ai->addressable_vars);
558
559  for (i = 0; i < ai->num_pointers; i++)
560    free (ai->pointers[i]);
561  free (ai->pointers);
562
563  BITMAP_FREE (ai->written_vars);
564  BITMAP_FREE (ai->dereferenced_ptrs_store);
565  BITMAP_FREE (ai->dereferenced_ptrs_load);
566  bitmap_obstack_release (&alias_obstack);
567  free (ai);
568
569  delete_points_to_sets ();
570}
571
572/* Create name tags for all the pointers that have been dereferenced.
573   We only create a name tag for a pointer P if P is found to point to
574   a set of variables (so that we can alias them to *P) or if it is
575   the result of a call to malloc (which means that P cannot point to
576   anything else nor alias any other variable).
577
578   If two pointers P and Q point to the same set of variables, they
579   are assigned the same name tag.  */
580
581static void
582create_name_tags (void)
583{
584  size_t i;
585  VEC (tree, heap) *with_ptvars = NULL;
586  tree ptr;
587
588  /* Collect the list of pointers with a non-empty points to set.  */
589  for (i = 1; i < num_ssa_names; i++)
590    {
591      tree ptr = ssa_name (i);
592      struct ptr_info_def *pi;
593
594      if (!ptr
595	  || !POINTER_TYPE_P (TREE_TYPE (ptr))
596	  || !SSA_NAME_PTR_INFO (ptr))
597	continue;
598
599      pi = SSA_NAME_PTR_INFO (ptr);
600
601      if (pi->pt_anything || !pi->is_dereferenced)
602	{
603	  /* No name tags for pointers that have not been
604	     dereferenced or point to an arbitrary location.  */
605	  pi->name_mem_tag = NULL_TREE;
606	  continue;
607	}
608
609      /* Set pt_anything on the pointers without pt_vars filled in so
610	 that they are assigned a type tag.  */
611
612      if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
613	VEC_safe_push (tree, heap, with_ptvars, ptr);
614      else
615	set_pt_anything (ptr);
616    }
617
618  /* If we didn't find any pointers with pt_vars set, we're done.  */
619  if (!with_ptvars)
620    return;
621
622  /* Now go through the pointers with pt_vars, and find a name tag
623     with the same pt_vars as this pointer, or create one if one
624     doesn't exist.  */
625  for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
626    {
627      struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
628      size_t j;
629      tree ptr2;
630      tree old_name_tag = pi->name_mem_tag;
631
632      /* If PTR points to a set of variables, check if we don't
633	 have another pointer Q with the same points-to set before
634	 creating a tag.  If so, use Q's tag instead of creating a
635	 new one.
636
637	 This is important for not creating unnecessary symbols
638	 and also for copy propagation.  If we ever need to
639	 propagate PTR into Q or vice-versa, we would run into
640	 problems if they both had different name tags because
641	 they would have different SSA version numbers (which
642	 would force us to take the name tags in and out of SSA).  */
643      for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
644	{
645	  struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
646
647	  if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
648	    {
649	      pi->name_mem_tag = qi->name_mem_tag;
650	      break;
651	    }
652	}
653
654      /* If we didn't find a pointer with the same points-to set
655	 as PTR, create a new name tag if needed.  */
656      if (pi->name_mem_tag == NULL_TREE)
657	pi->name_mem_tag = get_nmt_for (ptr);
658
659      /* If the new name tag computed for PTR is different than
660	 the old name tag that it used to have, then the old tag
661	 needs to be removed from the IL, so we mark it for
662	 renaming.  */
663      if (old_name_tag && old_name_tag != pi->name_mem_tag)
664	mark_sym_for_renaming (old_name_tag);
665
666      TREE_THIS_VOLATILE (pi->name_mem_tag)
667	|= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
668
669      /* Mark the new name tag for renaming.  */
670      mark_sym_for_renaming (pi->name_mem_tag);
671    }
672
673  VEC_free (tree, heap, with_ptvars);
674}
675
676
677/* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
678   the name memory tag (NMT) associated with P_i.  If P_i escapes, then its
679   name tag and the variables it points-to are call-clobbered.  Finally, if
680   P_i escapes and we could not determine where it points to, then all the
681   variables in the same alias set as *P_i are marked call-clobbered.  This
682   is necessary because we must assume that P_i may take the address of any
683   variable in the same alias set.  */
684
685static void
686compute_flow_sensitive_aliasing (struct alias_info *ai)
687{
688  size_t i;
689
690  for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
691    {
692      tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
693      if (!find_what_p_points_to (ptr))
694	set_pt_anything (ptr);
695    }
696
697  create_name_tags ();
698
699  for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
700    {
701      unsigned j;
702      tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
703      struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
704      var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
705      bitmap_iterator bi;
706
707      if (pi->value_escapes_p || pi->pt_anything)
708	{
709	  /* If PTR escapes or may point to anything, then its associated
710	     memory tags and pointed-to variables are call-clobbered.  */
711	  if (pi->name_mem_tag)
712	    mark_call_clobbered (pi->name_mem_tag);
713
714	  if (v_ann->type_mem_tag)
715	    mark_call_clobbered (v_ann->type_mem_tag);
716
717	  if (pi->pt_vars)
718	    EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
719	      mark_call_clobbered (referenced_var (j));
720	}
721
722      /* Set up aliasing information for PTR's name memory tag (if it has
723	 one).  Note that only pointers that have been dereferenced will
724	 have a name memory tag.  */
725      if (pi->name_mem_tag && pi->pt_vars)
726	EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
727	  {
728	    add_may_alias (pi->name_mem_tag, referenced_var (j));
729	    add_may_alias (v_ann->type_mem_tag, referenced_var (j));
730	  }
731
732      /* If the name tag is call clobbered, so is the type tag
733	 associated with the base VAR_DECL.  */
734      if (pi->name_mem_tag
735	  && v_ann->type_mem_tag
736	  && is_call_clobbered (pi->name_mem_tag))
737	mark_call_clobbered (v_ann->type_mem_tag);
738    }
739}
740
741
742/* Compute type-based alias sets.  Traverse all the pointers and
743   addressable variables found in setup_pointers_and_addressables.
744
745   For every pointer P in AI->POINTERS and addressable variable V in
746   AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's type
747   memory tag (TMT) if their alias sets conflict.  V is then marked as
748   an alias tag so that the operand scanner knows that statements
749   containing V have aliased operands.  */
750
751static void
752compute_flow_insensitive_aliasing (struct alias_info *ai)
753{
754  size_t i;
755
756  /* Initialize counter for the total number of virtual operands that
757     aliasing will introduce.  When AI->TOTAL_ALIAS_VOPS goes beyond the
758     threshold set by --params max-alias-vops, we enable alias
759     grouping.  */
760  ai->total_alias_vops = 0;
761
762  /* For every pointer P, determine which addressable variables may alias
763     with P's type memory tag.  */
764  for (i = 0; i < ai->num_pointers; i++)
765    {
766      size_t j;
767      struct alias_map_d *p_map = ai->pointers[i];
768      tree tag = var_ann (p_map->var)->type_mem_tag;
769      var_ann_t tag_ann = var_ann (tag);
770      tree var;
771
772      p_map->total_alias_vops = 0;
773      p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
774
775      /* Add any pre-existing may_aliases to the bitmap used to represent
776	 TAG's alias set in case we need to group aliases.  */
777      if (tag_ann->may_aliases)
778	for (j = 0; j < VARRAY_ACTIVE_SIZE (tag_ann->may_aliases); ++j)
779	  bitmap_set_bit (p_map->may_aliases,
780			  DECL_UID (VARRAY_TREE (tag_ann->may_aliases, j)));
781
782      for (j = 0; j < ai->num_addressable_vars; j++)
783	{
784	  struct alias_map_d *v_map;
785	  var_ann_t v_ann;
786	  bool tag_stored_p, var_stored_p;
787
788	  v_map = ai->addressable_vars[j];
789	  var = v_map->var;
790	  v_ann = var_ann (var);
791
792	  /* Skip memory tags and variables that have never been
793	     written to.  We also need to check if the variables are
794	     call-clobbered because they may be overwritten by
795	     function calls.
796
797	     Note this is effectively random accessing elements in
798	     the sparse bitset, which can be highly inefficient.
799	     So we first check the call_clobbered status of the
800	     tag and variable before querying the bitmap.  */
801	  tag_stored_p = is_call_clobbered (tag)
802	                 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
803	  var_stored_p = is_call_clobbered (var)
804	                 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
805	  if (!tag_stored_p && !var_stored_p)
806	    continue;
807
808	  if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
809	    {
810	      size_t num_tag_refs, num_var_refs;
811
812	      num_tag_refs = NUM_REFERENCES (tag_ann);
813	      num_var_refs = NUM_REFERENCES (v_ann);
814
815	      /* Add VAR to TAG's may-aliases set.  */
816
817	      /* We should never have a var with subvars here, because
818	         they shouldn't get into the set of addressable vars */
819	      gcc_assert (!var_can_have_subvars (var)
820			  || get_subvars_for_var (var) == NULL);
821
822	      add_may_alias (tag, var);
823	      /* Update the bitmap used to represent TAG's alias set
824		 in case we need to group aliases.  */
825	      bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
826
827	      /* Update the total number of virtual operands due to
828		 aliasing.  Since we are adding one more alias to TAG's
829		 may-aliases set, the total number of virtual operands due
830		 to aliasing will be increased by the number of references
831		 made to VAR and TAG (every reference to TAG will also
832		 count as a reference to VAR).  */
833	      ai->total_alias_vops += (num_var_refs + num_tag_refs);
834	      p_map->total_alias_vops += (num_var_refs + num_tag_refs);
835
836
837	    }
838	}
839    }
840
841  /* Since this analysis is based exclusively on symbols, it fails to
842     handle cases where two pointers P and Q have different memory
843     tags with conflicting alias set numbers but no aliased symbols in
844     common.
845
846     For example, suppose that we have two memory tags TMT.1 and TMT.2
847     such that
848
849     		may-aliases (TMT.1) = { a }
850		may-aliases (TMT.2) = { b }
851
852     and the alias set number of TMT.1 conflicts with that of TMT.2.
853     Since they don't have symbols in common, loads and stores from
854     TMT.1 and TMT.2 will seem independent of each other, which will
855     lead to the optimizers making invalid transformations (see
856     testsuite/gcc.c-torture/execute/pr15262-[12].c).
857
858     To avoid this problem, we do a final traversal of AI->POINTERS
859     looking for pairs of pointers that have no aliased symbols in
860     common and yet have conflicting alias set numbers.  */
861  for (i = 0; i < ai->num_pointers; i++)
862    {
863      size_t j;
864      struct alias_map_d *p_map1 = ai->pointers[i];
865      tree tag1 = var_ann (p_map1->var)->type_mem_tag;
866      bitmap may_aliases1 = p_map1->may_aliases;
867
868      for (j = i + 1; j < ai->num_pointers; j++)
869	{
870	  struct alias_map_d *p_map2 = ai->pointers[j];
871	  tree tag2 = var_ann (p_map2->var)->type_mem_tag;
872	  bitmap may_aliases2 = p_map2->may_aliases;
873
874	  /* If the pointers may not point to each other, do nothing.  */
875	  if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
876	    continue;
877
878	  /* The two pointers may alias each other.  If they already have
879	     symbols in common, do nothing.  */
880	  if (bitmap_intersect_p (may_aliases1, may_aliases2))
881	    continue;
882
883	  if (!bitmap_empty_p (may_aliases2))
884	    {
885	      unsigned int k;
886	      bitmap_iterator bi;
887
888	      /* Add all the aliases for TAG2 into TAG1's alias set.
889		 FIXME, update grouping heuristic counters.  */
890	      EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
891		add_may_alias (tag1, referenced_var (k));
892	      bitmap_ior_into (may_aliases1, may_aliases2);
893	    }
894	  else
895	    {
896	      /* Since TAG2 does not have any aliases of its own, add
897		 TAG2 itself to the alias set of TAG1.  */
898	      add_may_alias (tag1, tag2);
899	      bitmap_set_bit (may_aliases1, DECL_UID (tag2));
900	    }
901	}
902    }
903
904  if (dump_file)
905    fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
906	     get_name (current_function_decl),
907	     ai->total_alias_vops);
908
909  /* Determine if we need to enable alias grouping.  */
910  if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
911    group_aliases (ai);
912}
913
914
915/* Comparison function for qsort used in group_aliases.  */
916
917static int
918total_alias_vops_cmp (const void *p, const void *q)
919{
920  const struct alias_map_d **p1 = (const struct alias_map_d **)p;
921  const struct alias_map_d **p2 = (const struct alias_map_d **)q;
922  long n1 = (*p1)->total_alias_vops;
923  long n2 = (*p2)->total_alias_vops;
924
925  /* We want to sort in descending order.  */
926  return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
927}
928
929/* Group all the aliases for TAG to make TAG represent all the
930   variables in its alias set.  Update the total number
931   of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS).  This
932   function will make TAG be the unique alias tag for all the
933   variables in its may-aliases.  So, given:
934
935   	may-aliases(TAG) = { V1, V2, V3 }
936
937   This function will group the variables into:
938
939   	may-aliases(V1) = { TAG }
940	may-aliases(V2) = { TAG }
941	may-aliases(V2) = { TAG }  */
942
943static void
944group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
945{
946  unsigned int i;
947  var_ann_t tag_ann = var_ann (tag);
948  size_t num_tag_refs = NUM_REFERENCES (tag_ann);
949  bitmap_iterator bi;
950
951  EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
952    {
953      tree var = referenced_var (i);
954      var_ann_t ann = var_ann (var);
955
956      /* Make TAG the unique alias of VAR.  */
957      ann->is_alias_tag = 0;
958      ann->may_aliases = NULL;
959
960      /* Note that VAR and TAG may be the same if the function has no
961	 addressable variables (see the discussion at the end of
962	 setup_pointers_and_addressables).  */
963      if (var != tag)
964	add_may_alias (var, tag);
965
966      /* Reduce total number of virtual operands contributed
967	 by TAG on behalf of VAR.  Notice that the references to VAR
968	 itself won't be removed.  We will merely replace them with
969	 references to TAG.  */
970      ai->total_alias_vops -= num_tag_refs;
971    }
972
973  /* We have reduced the number of virtual operands that TAG makes on
974     behalf of all the variables formerly aliased with it.  However,
975     we have also "removed" all the virtual operands for TAG itself,
976     so we add them back.  */
977  ai->total_alias_vops += num_tag_refs;
978
979  /* TAG no longer has any aliases.  */
980  tag_ann->may_aliases = NULL;
981}
982
983
984/* Group may-aliases sets to reduce the number of virtual operands due
985   to aliasing.
986
987     1- Sort the list of pointers in decreasing number of contributed
988	virtual operands.
989
990     2- Take the first entry in AI->POINTERS and revert the role of
991	the memory tag and its aliases.  Usually, whenever an aliased
992	variable Vi is found to alias with a memory tag T, we add Vi
993	to the may-aliases set for T.  Meaning that after alias
994	analysis, we will have:
995
996		may-aliases(T) = { V1, V2, V3, ..., Vn }
997
998	This means that every statement that references T, will get 'n'
999	virtual operands for each of the Vi tags.  But, when alias
1000	grouping is enabled, we make T an alias tag and add it to the
1001	alias set of all the Vi variables:
1002
1003		may-aliases(V1) = { T }
1004		may-aliases(V2) = { T }
1005		...
1006		may-aliases(Vn) = { T }
1007
1008	This has two effects: (a) statements referencing T will only get
1009	a single virtual operand, and, (b) all the variables Vi will now
1010	appear to alias each other.  So, we lose alias precision to
1011	improve compile time.  But, in theory, a program with such a high
1012	level of aliasing should not be very optimizable in the first
1013	place.
1014
1015     3- Since variables may be in the alias set of more than one
1016	memory tag, the grouping done in step (2) needs to be extended
1017	to all the memory tags that have a non-empty intersection with
1018	the may-aliases set of tag T.  For instance, if we originally
1019	had these may-aliases sets:
1020
1021		may-aliases(T) = { V1, V2, V3 }
1022		may-aliases(R) = { V2, V4 }
1023
1024	In step (2) we would have reverted the aliases for T as:
1025
1026		may-aliases(V1) = { T }
1027		may-aliases(V2) = { T }
1028		may-aliases(V3) = { T }
1029
1030	But note that now V2 is no longer aliased with R.  We could
1031	add R to may-aliases(V2), but we are in the process of
1032	grouping aliases to reduce virtual operands so what we do is
1033	add V4 to the grouping to obtain:
1034
1035		may-aliases(V1) = { T }
1036		may-aliases(V2) = { T }
1037		may-aliases(V3) = { T }
1038		may-aliases(V4) = { T }
1039
1040     4- If the total number of virtual operands due to aliasing is
1041	still above the threshold set by max-alias-vops, go back to (2).  */
1042
1043static void
1044group_aliases (struct alias_info *ai)
1045{
1046  size_t i;
1047
1048  /* Sort the POINTERS array in descending order of contributed
1049     virtual operands.  */
1050  qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1051         total_alias_vops_cmp);
1052
1053  /* For every pointer in AI->POINTERS, reverse the roles of its tag
1054     and the tag's may-aliases set.  */
1055  for (i = 0; i < ai->num_pointers; i++)
1056    {
1057      size_t j;
1058      tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
1059      bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1060
1061      /* Skip tags that have been grouped already.  */
1062      if (ai->pointers[i]->grouped_p)
1063	continue;
1064
1065      /* See if TAG1 had any aliases in common with other type tags.
1066	 If we find a TAG2 with common aliases with TAG1, add TAG2's
1067	 aliases into TAG1.  */
1068      for (j = i + 1; j < ai->num_pointers; j++)
1069	{
1070	  bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1071
1072          if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1073	    {
1074	      tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
1075
1076	      bitmap_ior_into (tag1_aliases, tag2_aliases);
1077
1078	      /* TAG2 does not need its aliases anymore.  */
1079	      bitmap_clear (tag2_aliases);
1080	      var_ann (tag2)->may_aliases = NULL;
1081
1082	      /* TAG1 is the unique alias of TAG2.  */
1083	      add_may_alias (tag2, tag1);
1084
1085	      ai->pointers[j]->grouped_p = true;
1086	    }
1087	}
1088
1089      /* Now group all the aliases we collected into TAG1.  */
1090      group_aliases_into (tag1, tag1_aliases, ai);
1091
1092      /* If we've reduced total number of virtual operands below the
1093	 threshold, stop.  */
1094      if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1095	break;
1096    }
1097
1098  /* Finally, all the variables that have been grouped cannot be in
1099     the may-alias set of name memory tags.  Suppose that we have
1100     grouped the aliases in this code so that may-aliases(a) = TMT.20
1101
1102     	p_5 = &a;
1103	...
1104	# a_9 = V_MAY_DEF <a_8>
1105	p_5->field = 0
1106	... Several modifications to TMT.20 ...
1107	# VUSE <a_9>
1108	x_30 = p_5->field
1109
1110     Since p_5 points to 'a', the optimizers will try to propagate 0
1111     into p_5->field, but that is wrong because there have been
1112     modifications to 'TMT.20' in between.  To prevent this we have to
1113     replace 'a' with 'TMT.20' in the name tag of p_5.  */
1114  for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1115    {
1116      size_t j;
1117      tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1118      tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1119      varray_type aliases;
1120
1121      if (name_tag == NULL_TREE)
1122	continue;
1123
1124      aliases = var_ann (name_tag)->may_aliases;
1125      for (j = 0; aliases && j < VARRAY_ACTIVE_SIZE (aliases); j++)
1126	{
1127	  tree alias = VARRAY_TREE (aliases, j);
1128	  var_ann_t ann = var_ann (alias);
1129
1130	  if ((ann->mem_tag_kind == NOT_A_TAG
1131	       || ann->mem_tag_kind == STRUCT_FIELD)
1132	      && ann->may_aliases)
1133	    {
1134	      tree new_alias;
1135
1136	      gcc_assert (VARRAY_ACTIVE_SIZE (ann->may_aliases) == 1);
1137
1138	      new_alias = VARRAY_TREE (ann->may_aliases, 0);
1139	      replace_may_alias (name_tag, j, new_alias);
1140	    }
1141	}
1142    }
1143
1144  if (dump_file)
1145    fprintf (dump_file,
1146	     "%s: Total number of aliased vops after grouping: %ld%s\n",
1147	     get_name (current_function_decl),
1148	     ai->total_alias_vops,
1149	     (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1150}
1151
1152
1153/* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS.  */
1154
1155static void
1156create_alias_map_for (tree var, struct alias_info *ai)
1157{
1158  struct alias_map_d *alias_map;
1159  alias_map = xcalloc (1, sizeof (*alias_map));
1160  alias_map->var = var;
1161  alias_map->set = get_alias_set (var);
1162  ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1163}
1164
1165
1166/* Create memory tags for all the dereferenced pointers and build the
1167   ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1168   sets.  Based on the address escape and points-to information collected
1169   earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1170   variables whose address is not needed anymore.  */
1171
1172static void
1173setup_pointers_and_addressables (struct alias_info *ai)
1174{
1175  size_t n_vars, num_addressable_vars, num_pointers;
1176  referenced_var_iterator rvi;
1177  tree var;
1178  VEC (tree, heap) *varvec = NULL;
1179  safe_referenced_var_iterator srvi;
1180
1181  /* Size up the arrays ADDRESSABLE_VARS and POINTERS.  */
1182  num_addressable_vars = num_pointers = 0;
1183
1184  FOR_EACH_REFERENCED_VAR (var, rvi)
1185    {
1186      if (may_be_aliased (var))
1187	num_addressable_vars++;
1188
1189      if (POINTER_TYPE_P (TREE_TYPE (var)))
1190	{
1191	  /* Since we don't keep track of volatile variables, assume that
1192	     these pointers are used in indirect store operations.  */
1193	  if (TREE_THIS_VOLATILE (var))
1194	    bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1195
1196	  num_pointers++;
1197	}
1198    }
1199
1200  /* Create ADDRESSABLE_VARS and POINTERS.  Note that these arrays are
1201     always going to be slightly bigger than we actually need them
1202     because some TREE_ADDRESSABLE variables will be marked
1203     non-addressable below and only pointers with unique type tags are
1204     going to be added to POINTERS.  */
1205  ai->addressable_vars = xcalloc (num_addressable_vars,
1206				  sizeof (struct alias_map_d *));
1207  ai->pointers = xcalloc (num_pointers, sizeof (struct alias_map_d *));
1208  ai->num_addressable_vars = 0;
1209  ai->num_pointers = 0;
1210
1211  /* Since we will be creating type memory tags within this loop, cache the
1212     value of NUM_REFERENCED_VARS to avoid processing the additional tags
1213     unnecessarily.  */
1214  n_vars = num_referenced_vars;
1215
1216  FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1217    {
1218      var_ann_t v_ann = var_ann (var);
1219      subvar_t svars;
1220
1221      /* Name memory tags already have flow-sensitive aliasing
1222	 information, so they need not be processed by
1223	 compute_flow_insensitive_aliasing.  Similarly, type memory
1224	 tags are already accounted for when we process their
1225	 associated pointer.
1226
1227         Structure fields, on the other hand, have to have some of this
1228         information processed for them, but it's pointless to mark them
1229         non-addressable (since they are fake variables anyway).  */
1230      if (v_ann->mem_tag_kind != NOT_A_TAG
1231	  && v_ann->mem_tag_kind != STRUCT_FIELD)
1232	continue;
1233
1234      /* Remove the ADDRESSABLE flag from every addressable variable whose
1235         address is not needed anymore.  This is caused by the propagation
1236         of ADDR_EXPR constants into INDIRECT_REF expressions and the
1237         removal of dead pointer assignments done by the early scalar
1238         cleanup passes.  */
1239      if (TREE_ADDRESSABLE (var))
1240	{
1241	  if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1242	      && TREE_CODE (var) != RESULT_DECL
1243	      && !is_global_var (var))
1244	    {
1245	      bool okay_to_mark = true;
1246
1247	      /* Since VAR is now a regular GIMPLE register, we will need
1248		 to rename VAR into SSA afterwards.  */
1249	      mark_sym_for_renaming (var);
1250
1251	      /* If VAR can have sub-variables, and any of its
1252		 sub-variables has its address taken, then we cannot
1253		 remove the addressable flag from VAR.  */
1254	      if (var_can_have_subvars (var)
1255		  && (svars = get_subvars_for_var (var)))
1256		{
1257		  subvar_t sv;
1258
1259		  for (sv = svars; sv; sv = sv->next)
1260		    {
1261		      if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1262			okay_to_mark = false;
1263		      mark_sym_for_renaming (sv->var);
1264		    }
1265		}
1266
1267	      /* The address of VAR is not needed, remove the
1268		 addressable bit, so that it can be optimized as a
1269		 regular variable.  */
1270	      if (okay_to_mark)
1271		mark_non_addressable (var);
1272	    }
1273	}
1274
1275      /* Global variables and addressable locals may be aliased.  Create an
1276         entry in ADDRESSABLE_VARS for VAR.  */
1277      if (may_be_aliased (var)
1278	  && (!var_can_have_subvars (var)
1279	      || get_subvars_for_var (var) == NULL))
1280	{
1281	  create_alias_map_for (var, ai);
1282	  mark_sym_for_renaming (var);
1283	}
1284
1285      /* Add pointer variables that have been dereferenced to the POINTERS
1286         array and create a type memory tag for them.  */
1287      if (POINTER_TYPE_P (TREE_TYPE (var)))
1288	{
1289	  if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1290	       || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1291	    {
1292	      tree tag;
1293	      var_ann_t t_ann;
1294
1295	      /* If pointer VAR still doesn't have a memory tag
1296		 associated with it, create it now or re-use an
1297		 existing one.  */
1298	      tag = get_tmt_for (var, ai);
1299	      t_ann = var_ann (tag);
1300
1301	      /* The type tag will need to be renamed into SSA
1302		 afterwards. Note that we cannot do this inside
1303		 get_tmt_for because aliasing may run multiple times
1304		 and we only create type tags the first time.  */
1305	      mark_sym_for_renaming (tag);
1306
1307	      /* Similarly, if pointer VAR used to have another type
1308		 tag, we will need to process it in the renamer to
1309		 remove the stale virtual operands.  */
1310	      if (v_ann->type_mem_tag)
1311		mark_sym_for_renaming (v_ann->type_mem_tag);
1312
1313	      /* Associate the tag with pointer VAR.  */
1314	      v_ann->type_mem_tag = tag;
1315
1316	      /* If pointer VAR has been used in a store operation,
1317		 then its memory tag must be marked as written-to.  */
1318	      if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1319		bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1320
1321	      /* If pointer VAR is a global variable or a PARM_DECL,
1322		 then its memory tag should be considered a global
1323		 variable.  */
1324	      if (TREE_CODE (var) == PARM_DECL || is_global_var (var))
1325		mark_call_clobbered (tag);
1326
1327	      /* All the dereferences of pointer VAR count as
1328		 references of TAG.  Since TAG can be associated with
1329		 several pointers, add the dereferences of VAR to the
1330		 TAG.  */
1331	      NUM_REFERENCES_SET (t_ann,
1332				  NUM_REFERENCES (t_ann)
1333				  + NUM_REFERENCES (v_ann));
1334	    }
1335	  else
1336	    {
1337	      /* The pointer has not been dereferenced.  If it had a
1338		 type memory tag, remove it and mark the old tag for
1339		 renaming to remove it out of the IL.  */
1340	      var_ann_t ann = var_ann (var);
1341	      tree tag = ann->type_mem_tag;
1342	      if (tag)
1343		{
1344		  mark_sym_for_renaming (tag);
1345		  ann->type_mem_tag = NULL_TREE;
1346		}
1347	    }
1348	}
1349    }
1350  VEC_free (tree, heap, varvec);
1351}
1352
1353
1354/* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1355   every call site, we need to emit V_MAY_DEF expressions to represent the
1356   clobbering effects of the call for variables whose address escapes the
1357   current function.
1358
1359   One approach is to group all call-clobbered variables into a single
1360   representative that is used as an alias of every call-clobbered variable
1361   (.GLOBAL_VAR).  This works well, but it ties the optimizer hands because
1362   references to any call clobbered variable is a reference to .GLOBAL_VAR.
1363
1364   The second approach is to emit a clobbering V_MAY_DEF for every
1365   call-clobbered variable at call sites.  This is the preferred way in terms
1366   of optimization opportunities but it may create too many V_MAY_DEF operands
1367   if there are many call clobbered variables and function calls in the
1368   function.
1369
1370   To decide whether or not to use .GLOBAL_VAR we multiply the number of
1371   function calls found by the number of call-clobbered variables.  If that
1372   product is beyond a certain threshold, as determined by the parameterized
1373   values shown below, we use .GLOBAL_VAR.
1374
1375   FIXME.  This heuristic should be improved.  One idea is to use several
1376   .GLOBAL_VARs of different types instead of a single one.  The thresholds
1377   have been derived from a typical bootstrap cycle, including all target
1378   libraries. Compile times were found increase by ~1% compared to using
1379   .GLOBAL_VAR.  */
1380
1381static void
1382maybe_create_global_var (struct alias_info *ai)
1383{
1384  unsigned i, n_clobbered;
1385  bitmap_iterator bi;
1386
1387  /* No need to create it, if we have one already.  */
1388  if (global_var == NULL_TREE)
1389    {
1390      /* Count all the call-clobbered variables.  */
1391      n_clobbered = 0;
1392      EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1393	{
1394	  n_clobbered++;
1395	}
1396
1397      /* If the number of virtual operands that would be needed to
1398	 model all the call-clobbered variables is larger than
1399	 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1400
1401	 Also create .GLOBAL_VAR if there are no call-clobbered
1402	 variables and the program contains a mixture of pure/const
1403	 and regular function calls.  This is to avoid the problem
1404	 described in PR 20115:
1405
1406	      int X;
1407	      int func_pure (void) { return X; }
1408	      int func_non_pure (int a) { X += a; }
1409	      int foo ()
1410	      {
1411	 	int a = func_pure ();
1412		func_non_pure (a);
1413		a = func_pure ();
1414		return a;
1415	      }
1416
1417	 Since foo() has no call-clobbered variables, there is
1418	 no relationship between the calls to func_pure and
1419	 func_non_pure.  Since func_pure has no side-effects, value
1420	 numbering optimizations elide the second call to func_pure.
1421	 So, if we have some pure/const and some regular calls in the
1422	 program we create .GLOBAL_VAR to avoid missing these
1423	 relations.  */
1424      if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1425	  || (n_clobbered == 0
1426	      && ai->num_calls_found > 0
1427	      && ai->num_pure_const_calls_found > 0
1428	      && ai->num_calls_found > ai->num_pure_const_calls_found))
1429	create_global_var ();
1430    }
1431
1432  /* Mark all call-clobbered symbols for renaming.  Since the initial
1433     rewrite into SSA ignored all call sites, we may need to rename
1434     .GLOBAL_VAR and the call-clobbered variables.   */
1435  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1436    {
1437      tree var = referenced_var (i);
1438
1439      /* If the function has calls to clobbering functions and
1440	 .GLOBAL_VAR has been created, make it an alias for all
1441	 call-clobbered variables.  */
1442      if (global_var && var != global_var)
1443	{
1444	  subvar_t svars;
1445	  add_may_alias (var, global_var);
1446	  if (var_can_have_subvars (var)
1447	      && (svars = get_subvars_for_var (var)))
1448	    {
1449	      subvar_t sv;
1450	      for (sv = svars; sv; sv = sv->next)
1451		mark_sym_for_renaming (sv->var);
1452	    }
1453	}
1454
1455      mark_sym_for_renaming (var);
1456    }
1457}
1458
1459
1460/* Return TRUE if pointer PTR may point to variable VAR.
1461
1462   MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1463	This is needed because when checking for type conflicts we are
1464	interested in the alias set of the memory location pointed-to by
1465	PTR.  The alias set of PTR itself is irrelevant.
1466
1467   VAR_ALIAS_SET is the alias set for VAR.  */
1468
1469static bool
1470may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1471	     tree var, HOST_WIDE_INT var_alias_set,
1472	     bool alias_set_only)
1473{
1474  tree mem;
1475  var_ann_t m_ann;
1476
1477  alias_stats.alias_queries++;
1478  alias_stats.simple_queries++;
1479
1480  /* By convention, a variable cannot alias itself.  */
1481  mem = var_ann (ptr)->type_mem_tag;
1482  if (mem == var)
1483    {
1484      alias_stats.alias_noalias++;
1485      alias_stats.simple_resolved++;
1486      return false;
1487    }
1488
1489  /* If -fargument-noalias-global is >1, pointer arguments may
1490     not point to global variables.  */
1491  if (flag_argument_noalias > 1 && is_global_var (var)
1492      && TREE_CODE (ptr) == PARM_DECL)
1493    {
1494      alias_stats.alias_noalias++;
1495      alias_stats.simple_resolved++;
1496      return false;
1497    }
1498
1499  /* If either MEM or VAR is a read-only global and the other one
1500     isn't, then PTR cannot point to VAR.  */
1501  if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1502      || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1503    {
1504      alias_stats.alias_noalias++;
1505      alias_stats.simple_resolved++;
1506      return false;
1507    }
1508
1509  m_ann = var_ann (mem);
1510
1511  gcc_assert (m_ann->mem_tag_kind == TYPE_TAG);
1512
1513  alias_stats.tbaa_queries++;
1514
1515  /* If the alias sets don't conflict then MEM cannot alias VAR.  */
1516  if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1517    {
1518      alias_stats.alias_noalias++;
1519      alias_stats.tbaa_resolved++;
1520      return false;
1521    }
1522
1523  /* If var is a record or union type, ptr cannot point into var
1524     unless there is some operation explicit address operation in the
1525     program that can reference a field of the ptr's dereferenced
1526     type.  This also assumes that the types of both var and ptr are
1527     contained within the compilation unit, and that there is no fancy
1528     addressing arithmetic associated with any of the types
1529     involved.  */
1530
1531  if ((mem_alias_set != 0) && (var_alias_set != 0))
1532    {
1533      tree ptr_type = TREE_TYPE (ptr);
1534      tree var_type = TREE_TYPE (var);
1535
1536      /* The star count is -1 if the type at the end of the pointer_to
1537	 chain is not a record or union type. */
1538      if ((!alias_set_only) &&
1539	  ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1540	{
1541	  int ptr_star_count = 0;
1542
1543	  /* Ipa_type_escape_star_count_of_interesting_type is a little to
1544	     restrictive for the pointer type, need to allow pointers to
1545	     primitive types as long as those types cannot be pointers
1546	     to everything.  */
1547	  while (POINTER_TYPE_P (ptr_type))
1548	    /* Strip the *'s off.  */
1549	    {
1550	      ptr_type = TREE_TYPE (ptr_type);
1551	      ptr_star_count++;
1552	    }
1553
1554	  /* There does not appear to be a better test to see if the
1555	     pointer type was one of the pointer to everything
1556	     types.  */
1557
1558	  if (ptr_star_count > 0)
1559	    {
1560	      alias_stats.structnoaddress_queries++;
1561	      if (ipa_type_escape_field_does_not_clobber_p (var_type,
1562							    TREE_TYPE (ptr)))
1563		{
1564		  alias_stats.structnoaddress_resolved++;
1565		  alias_stats.alias_noalias++;
1566		  return false;
1567		}
1568	    }
1569	  else if (ptr_star_count == 0)
1570	    {
1571	      /* If ptr_type was not really a pointer to type, it cannot
1572		 alias.  */
1573	      alias_stats.structnoaddress_queries++;
1574	      alias_stats.structnoaddress_resolved++;
1575	      alias_stats.alias_noalias++;
1576	      return false;
1577	    }
1578	}
1579    }
1580
1581  alias_stats.alias_mayalias++;
1582  return true;
1583}
1584
1585
1586/* Add ALIAS to the set of variables that may alias VAR.  */
1587
1588static void
1589add_may_alias (tree var, tree alias)
1590{
1591  size_t i;
1592  var_ann_t v_ann = get_var_ann (var);
1593  var_ann_t a_ann = get_var_ann (alias);
1594
1595  /* Don't allow self-referential aliases.  */
1596  gcc_assert (var != alias);
1597
1598  /* ALIAS must be addressable if it's being added to an alias set.  */
1599#if 1
1600  TREE_ADDRESSABLE (alias) = 1;
1601#else
1602  gcc_assert (may_be_aliased (alias));
1603#endif
1604
1605  if (v_ann->may_aliases == NULL)
1606    VARRAY_TREE_INIT (v_ann->may_aliases, 2, "aliases");
1607
1608  /* Avoid adding duplicates.  */
1609  for (i = 0; i < VARRAY_ACTIVE_SIZE (v_ann->may_aliases); i++)
1610    if (alias == VARRAY_TREE (v_ann->may_aliases, i))
1611      return;
1612
1613  /* If VAR is a call-clobbered variable, so is its new ALIAS.
1614     FIXME, call-clobbering should only depend on whether an address
1615     escapes.  It should be independent of aliasing.  */
1616  if (is_call_clobbered (var))
1617    mark_call_clobbered (alias);
1618
1619  /* Likewise.  If ALIAS is call-clobbered, so is VAR.  */
1620  else if (is_call_clobbered (alias))
1621    mark_call_clobbered (var);
1622
1623  VARRAY_PUSH_TREE (v_ann->may_aliases, alias);
1624  a_ann->is_alias_tag = 1;
1625}
1626
1627
1628/* Replace alias I in the alias sets of VAR with NEW_ALIAS.  */
1629
1630static void
1631replace_may_alias (tree var, size_t i, tree new_alias)
1632{
1633  var_ann_t v_ann = var_ann (var);
1634  VARRAY_TREE (v_ann->may_aliases, i) = new_alias;
1635
1636  /* If VAR is a call-clobbered variable, so is NEW_ALIAS.
1637     FIXME, call-clobbering should only depend on whether an address
1638     escapes.  It should be independent of aliasing.  */
1639  if (is_call_clobbered (var))
1640    mark_call_clobbered (new_alias);
1641
1642  /* Likewise.  If NEW_ALIAS is call-clobbered, so is VAR.  */
1643  else if (is_call_clobbered (new_alias))
1644    mark_call_clobbered (var);
1645}
1646
1647
1648/* Mark pointer PTR as pointing to an arbitrary memory location.  */
1649
1650static void
1651set_pt_anything (tree ptr)
1652{
1653  struct ptr_info_def *pi = get_ptr_info (ptr);
1654
1655  pi->pt_anything = 1;
1656  pi->pt_vars = NULL;
1657
1658  /* The pointer used to have a name tag, but we now found it pointing
1659     to an arbitrary location.  The name tag needs to be renamed and
1660     disassociated from PTR.  */
1661  if (pi->name_mem_tag)
1662    {
1663      mark_sym_for_renaming (pi->name_mem_tag);
1664      pi->name_mem_tag = NULL_TREE;
1665    }
1666}
1667
1668
1669/* Return true if STMT is an "escape" site from the current function.  Escape
1670   sites those statements which might expose the address of a variable
1671   outside the current function.  STMT is an escape site iff:
1672
1673   	1- STMT is a function call, or
1674	2- STMT is an __asm__ expression, or
1675	3- STMT is an assignment to a non-local variable, or
1676	4- STMT is a return statement.
1677
1678   AI points to the alias information collected so far.  */
1679
1680bool
1681is_escape_site (tree stmt, struct alias_info *ai)
1682{
1683  tree call = get_call_expr_in (stmt);
1684  if (call != NULL_TREE)
1685    {
1686      ai->num_calls_found++;
1687
1688      if (!TREE_SIDE_EFFECTS (call))
1689	ai->num_pure_const_calls_found++;
1690
1691      return true;
1692    }
1693  else if (TREE_CODE (stmt) == ASM_EXPR)
1694    return true;
1695  else if (TREE_CODE (stmt) == MODIFY_EXPR)
1696    {
1697      tree lhs = TREE_OPERAND (stmt, 0);
1698
1699      /* Get to the base of _REF nodes.  */
1700      if (TREE_CODE (lhs) != SSA_NAME)
1701	lhs = get_base_address (lhs);
1702
1703      /* If we couldn't recognize the LHS of the assignment, assume that it
1704	 is a non-local store.  */
1705      if (lhs == NULL_TREE)
1706	return true;
1707
1708      /* If the RHS is a conversion between a pointer and an integer, the
1709	 pointer escapes since we can't track the integer.  */
1710      if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
1711	   || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
1712	   || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
1713	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
1714					(TREE_OPERAND (stmt, 1), 0)))
1715	  && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
1716	return true;
1717
1718      /* If the LHS is an SSA name, it can't possibly represent a non-local
1719	 memory store.  */
1720      if (TREE_CODE (lhs) == SSA_NAME)
1721	return false;
1722
1723      /* FIXME: LHS is not an SSA_NAME.  Even if it's an assignment to a
1724	 local variables we cannot be sure if it will escape, because we
1725	 don't have information about objects not in SSA form.  Need to
1726	 implement something along the lines of
1727
1728	 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
1729	 Midkiff, ``Escape analysis for java,'' in Proceedings of the
1730	 Conference on Object-Oriented Programming Systems, Languages, and
1731	 Applications (OOPSLA), pp. 1-19, 1999.  */
1732      return true;
1733    }
1734  else if (TREE_CODE (stmt) == RETURN_EXPR)
1735    return true;
1736
1737  return false;
1738}
1739
1740
1741/* Create a new memory tag of type TYPE.  If IS_TYPE_TAG is true, the tag
1742   is considered to represent all the pointers whose pointed-to types are
1743   in the same alias set class.  Otherwise, the tag represents a single
1744   SSA_NAME pointer variable.  */
1745
1746static tree
1747create_memory_tag (tree type, bool is_type_tag)
1748{
1749  var_ann_t ann;
1750  tree tag = create_tmp_var_raw (type, (is_type_tag) ? "TMT" : "NMT");
1751
1752  /* By default, memory tags are local variables.  Alias analysis will
1753     determine whether they should be considered globals.  */
1754  DECL_CONTEXT (tag) = current_function_decl;
1755
1756  /* Memory tags are by definition addressable.  */
1757  TREE_ADDRESSABLE (tag) = 1;
1758
1759  ann = get_var_ann (tag);
1760  ann->mem_tag_kind = (is_type_tag) ? TYPE_TAG : NAME_TAG;
1761  ann->type_mem_tag = NULL_TREE;
1762
1763  /* Add the tag to the symbol table.  */
1764  add_referenced_tmp_var (tag);
1765
1766  return tag;
1767}
1768
1769
1770/* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
1771   This is used if P_i has been found to point to a specific set of
1772   variables or to a non-aliased memory location like the address returned
1773   by malloc functions.  */
1774
1775static tree
1776get_nmt_for (tree ptr)
1777{
1778  struct ptr_info_def *pi = get_ptr_info (ptr);
1779  tree tag = pi->name_mem_tag;
1780
1781  if (tag == NULL_TREE)
1782    tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
1783
1784  /* If PTR is a PARM_DECL, it points to a global variable or malloc,
1785     then its name tag should be considered a global variable.  */
1786  if (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
1787      || pi->pt_global_mem)
1788    mark_call_clobbered (tag);
1789
1790  return tag;
1791}
1792
1793
1794/* Return the type memory tag associated to pointer PTR.  A memory tag is an
1795   artificial variable that represents the memory location pointed-to by
1796   PTR.  It is used to model the effects of pointer de-references on
1797   addressable variables.
1798
1799   AI points to the data gathered during alias analysis.  This function
1800   populates the array AI->POINTERS.  */
1801
1802static tree
1803get_tmt_for (tree ptr, struct alias_info *ai)
1804{
1805  size_t i;
1806  tree tag;
1807  tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
1808  HOST_WIDE_INT tag_set = get_alias_set (tag_type);
1809
1810  /* To avoid creating unnecessary memory tags, only create one memory tag
1811     per alias set class.  Note that it may be tempting to group
1812     memory tags based on conflicting alias sets instead of
1813     equivalence.  That would be wrong because alias sets are not
1814     necessarily transitive (as demonstrated by the libstdc++ test
1815     23_containers/vector/cons/4.cc).  Given three alias sets A, B, C
1816     such that conflicts (A, B) == true and conflicts (A, C) == true,
1817     it does not necessarily follow that conflicts (B, C) == true.  */
1818  for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
1819    {
1820      struct alias_map_d *curr = ai->pointers[i];
1821      tree curr_tag = var_ann (curr->var)->type_mem_tag;
1822      if (tag_set == curr->set)
1823	{
1824	  tag = curr_tag;
1825	  break;
1826	}
1827    }
1828
1829  /* If VAR cannot alias with any of the existing memory tags, create a new
1830     tag for PTR and add it to the POINTERS array.  */
1831  if (tag == NULL_TREE)
1832    {
1833      struct alias_map_d *alias_map;
1834
1835      /* If PTR did not have a type tag already, create a new TMT.*
1836	 artificial variable representing the memory location
1837	 pointed-to by PTR.  */
1838      if (var_ann (ptr)->type_mem_tag == NULL_TREE)
1839	tag = create_memory_tag (tag_type, true);
1840      else
1841	tag = var_ann (ptr)->type_mem_tag;
1842
1843      /* Add PTR to the POINTERS array.  Note that we are not interested in
1844	 PTR's alias set.  Instead, we cache the alias set for the memory that
1845	 PTR points to.  */
1846      alias_map = xcalloc (1, sizeof (*alias_map));
1847      alias_map->var = ptr;
1848      alias_map->set = tag_set;
1849      ai->pointers[ai->num_pointers++] = alias_map;
1850    }
1851
1852  /* If the pointed-to type is volatile, so is the tag.  */
1853  TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
1854
1855  /* Make sure that the type tag has the same alias set as the
1856     pointed-to type.  */
1857  gcc_assert (tag_set == get_alias_set (tag));
1858
1859  return tag;
1860}
1861
1862
1863/* Create GLOBAL_VAR, an artificial global variable to act as a
1864   representative of all the variables that may be clobbered by function
1865   calls.  */
1866
1867static void
1868create_global_var (void)
1869{
1870  global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
1871                           void_type_node);
1872  DECL_ARTIFICIAL (global_var) = 1;
1873  TREE_READONLY (global_var) = 0;
1874  DECL_EXTERNAL (global_var) = 1;
1875  TREE_STATIC (global_var) = 1;
1876  TREE_USED (global_var) = 1;
1877  DECL_CONTEXT (global_var) = NULL_TREE;
1878  TREE_THIS_VOLATILE (global_var) = 0;
1879  TREE_ADDRESSABLE (global_var) = 0;
1880
1881  add_referenced_tmp_var (global_var);
1882  mark_sym_for_renaming (global_var);
1883}
1884
1885
1886/* Dump alias statistics on FILE.  */
1887
1888static void
1889dump_alias_stats (FILE *file)
1890{
1891  const char *funcname
1892    = lang_hooks.decl_printable_name (current_function_decl, 2);
1893  fprintf (file, "\nAlias statistics for %s\n\n", funcname);
1894  fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
1895  fprintf (file, "Total alias mayalias results:\t%u\n",
1896	   alias_stats.alias_mayalias);
1897  fprintf (file, "Total alias noalias results:\t%u\n",
1898	   alias_stats.alias_noalias);
1899  fprintf (file, "Total simple queries:\t%u\n",
1900	   alias_stats.simple_queries);
1901  fprintf (file, "Total simple resolved:\t%u\n",
1902	   alias_stats.simple_resolved);
1903  fprintf (file, "Total TBAA queries:\t%u\n",
1904	   alias_stats.tbaa_queries);
1905  fprintf (file, "Total TBAA resolved:\t%u\n",
1906	   alias_stats.tbaa_resolved);
1907  fprintf (file, "Total non-addressable structure type queries:\t%u\n",
1908	   alias_stats.structnoaddress_queries);
1909  fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
1910	   alias_stats.structnoaddress_resolved);
1911}
1912
1913
1914/* Dump alias information on FILE.  */
1915
1916void
1917dump_alias_info (FILE *file)
1918{
1919  size_t i;
1920  const char *funcname
1921    = lang_hooks.decl_printable_name (current_function_decl, 2);
1922  referenced_var_iterator rvi;
1923  tree var;
1924
1925  fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
1926
1927  fprintf (file, "Aliased symbols\n\n");
1928
1929  FOR_EACH_REFERENCED_VAR (var, rvi)
1930    {
1931      if (may_be_aliased (var))
1932	dump_variable (file, var);
1933    }
1934
1935  fprintf (file, "\nDereferenced pointers\n\n");
1936
1937  FOR_EACH_REFERENCED_VAR (var, rvi)
1938    {
1939      var_ann_t ann = var_ann (var);
1940      if (ann->type_mem_tag)
1941	dump_variable (file, var);
1942    }
1943
1944  fprintf (file, "\nType memory tags\n\n");
1945
1946  FOR_EACH_REFERENCED_VAR (var, rvi)
1947    {
1948      var_ann_t ann = var_ann (var);
1949      if (ann->mem_tag_kind == TYPE_TAG)
1950	dump_variable (file, var);
1951    }
1952
1953  fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
1954
1955  fprintf (file, "SSA_NAME pointers\n\n");
1956  for (i = 1; i < num_ssa_names; i++)
1957    {
1958      tree ptr = ssa_name (i);
1959      struct ptr_info_def *pi;
1960
1961      if (ptr == NULL_TREE)
1962	continue;
1963
1964      pi = SSA_NAME_PTR_INFO (ptr);
1965      if (!SSA_NAME_IN_FREE_LIST (ptr)
1966	  && pi
1967	  && pi->name_mem_tag)
1968	dump_points_to_info_for (file, ptr);
1969    }
1970
1971  fprintf (file, "\nName memory tags\n\n");
1972
1973  FOR_EACH_REFERENCED_VAR (var, rvi)
1974    {
1975      var_ann_t ann = var_ann (var);
1976      if (ann->mem_tag_kind == NAME_TAG)
1977	dump_variable (file, var);
1978    }
1979
1980  fprintf (file, "\n");
1981}
1982
1983
1984/* Dump alias information on stderr.  */
1985
1986void
1987debug_alias_info (void)
1988{
1989  dump_alias_info (stderr);
1990}
1991
1992
1993/* Return the alias information associated with pointer T.  It creates a
1994   new instance if none existed.  */
1995
1996struct ptr_info_def *
1997get_ptr_info (tree t)
1998{
1999  struct ptr_info_def *pi;
2000
2001  gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2002
2003  pi = SSA_NAME_PTR_INFO (t);
2004  if (pi == NULL)
2005    {
2006      pi = ggc_alloc (sizeof (*pi));
2007      memset ((void *)pi, 0, sizeof (*pi));
2008      SSA_NAME_PTR_INFO (t) = pi;
2009    }
2010
2011  return pi;
2012}
2013
2014
2015/* Dump points-to information for SSA_NAME PTR into FILE.  */
2016
2017void
2018dump_points_to_info_for (FILE *file, tree ptr)
2019{
2020  struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2021
2022  print_generic_expr (file, ptr, dump_flags);
2023
2024  if (pi)
2025    {
2026      if (pi->name_mem_tag)
2027	{
2028	  fprintf (file, ", name memory tag: ");
2029	  print_generic_expr (file, pi->name_mem_tag, dump_flags);
2030	}
2031
2032      if (pi->is_dereferenced)
2033	fprintf (file, ", is dereferenced");
2034
2035      if (pi->value_escapes_p)
2036	fprintf (file, ", its value escapes");
2037
2038      if (pi->pt_anything)
2039	fprintf (file, ", points-to anything");
2040
2041      if (pi->pt_null)
2042	fprintf (file, ", points-to NULL");
2043
2044      if (pi->pt_vars)
2045	{
2046	  unsigned ix;
2047	  bitmap_iterator bi;
2048
2049	  fprintf (file, ", points-to vars: { ");
2050	  EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2051	    {
2052	      print_generic_expr (file, referenced_var (ix), dump_flags);
2053	      fprintf (file, " ");
2054	    }
2055	  fprintf (file, "}");
2056	}
2057    }
2058
2059  fprintf (file, "\n");
2060}
2061
2062
2063/* Dump points-to information for VAR into stderr.  */
2064
2065void
2066debug_points_to_info_for (tree var)
2067{
2068  dump_points_to_info_for (stderr, var);
2069}
2070
2071
2072/* Dump points-to information into FILE.  NOTE: This function is slow, as
2073   it needs to traverse the whole CFG looking for pointer SSA_NAMEs.  */
2074
2075void
2076dump_points_to_info (FILE *file)
2077{
2078  basic_block bb;
2079  block_stmt_iterator si;
2080  ssa_op_iter iter;
2081  const char *fname =
2082    lang_hooks.decl_printable_name (current_function_decl, 2);
2083  referenced_var_iterator rvi;
2084  tree var;
2085
2086  fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2087
2088  /* First dump points-to information for the default definitions of
2089     pointer variables.  This is necessary because default definitions are
2090     not part of the code.  */
2091  FOR_EACH_REFERENCED_VAR (var, rvi)
2092    {
2093      if (POINTER_TYPE_P (TREE_TYPE (var)))
2094	{
2095	  tree def = default_def (var);
2096	  if (def)
2097	    dump_points_to_info_for (file, def);
2098	}
2099    }
2100
2101  /* Dump points-to information for every pointer defined in the program.  */
2102  FOR_EACH_BB (bb)
2103    {
2104      tree phi;
2105
2106      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2107	{
2108	  tree ptr = PHI_RESULT (phi);
2109	  if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2110	    dump_points_to_info_for (file, ptr);
2111	}
2112
2113	for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2114	  {
2115	    tree stmt = bsi_stmt (si);
2116	    tree def;
2117	    FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2118	      if (POINTER_TYPE_P (TREE_TYPE (def)))
2119		dump_points_to_info_for (file, def);
2120	  }
2121    }
2122
2123  fprintf (file, "\n");
2124}
2125
2126
2127/* Dump points-to info pointed to by PTO into STDERR.  */
2128
2129void
2130debug_points_to_info (void)
2131{
2132  dump_points_to_info (stderr);
2133}
2134
2135/* Dump to FILE the list of variables that may be aliasing VAR.  */
2136
2137void
2138dump_may_aliases_for (FILE *file, tree var)
2139{
2140  varray_type aliases;
2141
2142  if (TREE_CODE (var) == SSA_NAME)
2143    var = SSA_NAME_VAR (var);
2144
2145  aliases = var_ann (var)->may_aliases;
2146  if (aliases)
2147    {
2148      size_t i;
2149      fprintf (file, "{ ");
2150      for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2151	{
2152	  print_generic_expr (file, VARRAY_TREE (aliases, i), dump_flags);
2153	  fprintf (file, " ");
2154	}
2155      fprintf (file, "}");
2156    }
2157}
2158
2159
2160/* Dump to stderr the list of variables that may be aliasing VAR.  */
2161
2162void
2163debug_may_aliases_for (tree var)
2164{
2165  dump_may_aliases_for (stderr, var);
2166}
2167
2168/* Return true if VAR may be aliased.  */
2169
2170bool
2171may_be_aliased (tree var)
2172{
2173  /* Obviously.  */
2174  if (TREE_ADDRESSABLE (var))
2175    return true;
2176
2177  /* Globally visible variables can have their addresses taken by other
2178     translation units.  */
2179  if (DECL_EXTERNAL (var) || TREE_PUBLIC (var))
2180    return true;
2181
2182  /* Automatic variables can't have their addresses escape any other way.
2183     This must be after the check for global variables, as extern declarations
2184     do not have TREE_STATIC set.  */
2185  if (!TREE_STATIC (var))
2186    return false;
2187
2188  /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2189     of address-of operators, and so we can trust TREE_ADDRESSABLE.  Otherwise
2190     we can only be sure the variable isn't addressable if it's local to the
2191     current function.  */
2192  if (flag_unit_at_a_time)
2193    return false;
2194  if (decl_function_context (var) == current_function_decl)
2195    return false;
2196
2197  return true;
2198}
2199
2200
2201/* Given two symbols return TRUE if one is in the alias set of the other.  */
2202bool
2203is_aliased_with (tree tag, tree sym)
2204{
2205  size_t i;
2206  varray_type aliases;
2207
2208  if (var_ann (sym)->is_alias_tag)
2209    {
2210      aliases = var_ann (tag)->may_aliases;
2211
2212      if (aliases == NULL)
2213	return false;
2214
2215      for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2216	if (VARRAY_TREE (aliases, i) == sym)
2217	  return true;
2218    }
2219  else
2220    {
2221      aliases = var_ann (sym)->may_aliases;
2222
2223      if (aliases == NULL)
2224	return false;
2225
2226      for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2227	if (VARRAY_TREE (aliases, i) == tag)
2228	  return true;
2229    }
2230
2231  return false;
2232}
2233
2234
2235/* Given two tags return TRUE if their may-alias sets intersect.  */
2236
2237bool
2238may_aliases_intersect (tree tag1, tree tag2)
2239{
2240  struct pointer_set_t *set1 = pointer_set_create ();
2241  unsigned i;
2242  varray_type may_aliases1 = var_ann (tag1)->may_aliases;
2243  varray_type may_aliases2 = var_ann (tag2)->may_aliases;
2244
2245  if (may_aliases1 == NULL || may_aliases2 == NULL)
2246    return false;
2247
2248  /* Insert all the symbols from the first may-alias set into the
2249     pointer-set.  */
2250  for (i = 0; i < VARRAY_ACTIVE_SIZE (may_aliases1); i++)
2251    pointer_set_insert (set1, VARRAY_TREE (may_aliases1, i));
2252
2253  /* Go through the second may-alias set and check if it contains symbols that
2254     are common with the first set.  */
2255  for (i = 0; i < VARRAY_ACTIVE_SIZE (may_aliases2); i++)
2256    if (pointer_set_contains (set1, VARRAY_TREE (may_aliases2, i)))
2257      {
2258       pointer_set_destroy (set1);
2259       return true;
2260      }
2261
2262  pointer_set_destroy (set1);
2263  return false;
2264}
2265
2266
2267/* Add VAR to the list of may-aliases of PTR's type tag.  If PTR
2268   doesn't already have a type tag, create one.  */
2269
2270void
2271add_type_alias (tree ptr, tree var)
2272{
2273  varray_type aliases;
2274  tree tag;
2275  var_ann_t ann = var_ann (ptr);
2276  subvar_t svars;
2277  VEC (tree, heap) *varvec = NULL;
2278
2279  if (ann->type_mem_tag == NULL_TREE)
2280    {
2281      tree q = NULL_TREE;
2282      tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2283      HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2284      safe_referenced_var_iterator rvi;
2285
2286      /* PTR doesn't have a type tag, create a new one and add VAR to
2287	 the new tag's alias set.
2288
2289	 FIXME, This is slower than necessary.  We need to determine
2290	 whether there is another pointer Q with the same alias set as
2291	 PTR.  This could be sped up by having type tags associated
2292	 with types.  */
2293      FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
2294	{
2295	  if (POINTER_TYPE_P (TREE_TYPE (q))
2296	      && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
2297	    {
2298	      /* Found another pointer Q with the same alias set as
2299		 the PTR's pointed-to type.  If Q has a type tag, use
2300		 it.  Otherwise, create a new memory tag for PTR.  */
2301	      var_ann_t ann1 = var_ann (q);
2302	      if (ann1->type_mem_tag)
2303		ann->type_mem_tag = ann1->type_mem_tag;
2304	      else
2305		ann->type_mem_tag = create_memory_tag (tag_type, true);
2306	      goto found_tag;
2307	    }
2308	}
2309
2310      /* Couldn't find any other pointer with a type tag we could use.
2311	 Create a new memory tag for PTR.  */
2312      ann->type_mem_tag = create_memory_tag (tag_type, true);
2313    }
2314
2315found_tag:
2316  /* If VAR is not already PTR's type tag, add it to the may-alias set
2317     for PTR's type tag.  */
2318  gcc_assert (var_ann (var)->type_mem_tag == NOT_A_TAG);
2319  tag = ann->type_mem_tag;
2320
2321  /* If VAR has subvars, add the subvars to the tag instead of the
2322     actual var.  */
2323  if (var_can_have_subvars (var)
2324      && (svars = get_subvars_for_var (var)))
2325    {
2326      subvar_t sv;
2327      for (sv = svars; sv; sv = sv->next)
2328	add_may_alias (tag, sv->var);
2329    }
2330  else
2331    add_may_alias (tag, var);
2332
2333  /* TAG and its set of aliases need to be marked for renaming.  */
2334  mark_sym_for_renaming (tag);
2335  if ((aliases = var_ann (tag)->may_aliases) != NULL)
2336    {
2337      size_t i;
2338      for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2339	mark_sym_for_renaming (VARRAY_TREE (aliases, i));
2340    }
2341
2342  /* If we had grouped aliases, VAR may have aliases of its own.  Mark
2343     them for renaming as well.  Other statements referencing the
2344     aliases of VAR will need to be updated.  */
2345  if ((aliases = var_ann (var)->may_aliases) != NULL)
2346    {
2347      size_t i;
2348      for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2349	mark_sym_for_renaming (VARRAY_TREE (aliases, i));
2350    }
2351  VEC_free (tree, heap, varvec);
2352}
2353
2354
2355/* Create a new type tag for PTR.  Construct the may-alias list of this type
2356   tag so that it has the aliasing of VAR.
2357
2358   Note, the set of aliases represented by the new type tag are not marked
2359   for renaming.  */
2360
2361void
2362new_type_alias (tree ptr, tree var)
2363{
2364  var_ann_t p_ann = var_ann (ptr);
2365  tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2366  var_ann_t v_ann = var_ann (var);
2367  tree tag;
2368  subvar_t svars;
2369
2370  gcc_assert (p_ann->type_mem_tag == NULL_TREE);
2371  gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG);
2372
2373  /* Add VAR to the may-alias set of PTR's new type tag.  If VAR has
2374     subvars, add the subvars to the tag instead of the actual var.  */
2375  if (var_can_have_subvars (var)
2376      && (svars = get_subvars_for_var (var)))
2377    {
2378      subvar_t sv;
2379
2380      tag = create_memory_tag (tag_type, true);
2381      p_ann->type_mem_tag = tag;
2382
2383      for (sv = svars; sv; sv = sv->next)
2384        add_may_alias (tag, sv->var);
2385    }
2386  else
2387    {
2388      /* The following is based on code in add_stmt_operand to ensure that the
2389	 same defs/uses/vdefs/vuses will be found after replacing a reference
2390	 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2391	 is the address of var.  */
2392      varray_type aliases = v_ann->may_aliases;
2393
2394      if ((aliases != NULL)
2395	  && (VARRAY_ACTIVE_SIZE (aliases) == 1))
2396	{
2397	  tree ali = VARRAY_TREE (aliases, 0);
2398
2399	  if (get_var_ann (ali)->mem_tag_kind == TYPE_TAG)
2400	    {
2401	      p_ann->type_mem_tag = ali;
2402	      return;
2403	    }
2404	}
2405
2406      tag = create_memory_tag (tag_type, true);
2407      p_ann->type_mem_tag = tag;
2408
2409      if (aliases == NULL)
2410	add_may_alias (tag, var);
2411      else
2412	{
2413	  size_t i;
2414
2415	  for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2416	    add_may_alias (tag, VARRAY_TREE (aliases, i));
2417	}
2418    }
2419}
2420
2421
2422
2423/* This represents the used range of a variable.  */
2424
2425typedef struct used_part
2426{
2427  HOST_WIDE_INT minused;
2428  HOST_WIDE_INT maxused;
2429  /* True if we have an explicit use/def of some portion of this variable,
2430     even if it is all of it. i.e. a.b = 5 or temp = a.b.  */
2431  bool explicit_uses;
2432  /* True if we have an implicit use/def of some portion of this
2433     variable.  Implicit uses occur when we can't tell what part we
2434     are referencing, and have to make conservative assumptions.  */
2435  bool implicit_uses;
2436} *used_part_t;
2437
2438/* An array of used_part structures, indexed by variable uid.  */
2439
2440static htab_t used_portions;
2441
2442struct used_part_map
2443{
2444  unsigned int uid;
2445  used_part_t to;
2446};
2447
2448/* Return true if the uid in the two used part maps are equal.  */
2449
2450static int
2451used_part_map_eq (const void *va, const void *vb)
2452{
2453  const struct used_part_map  *a = va, *b = vb;
2454  return (a->uid == b->uid);
2455}
2456
2457/* Hash a from uid in a used_part_map.  */
2458
2459static unsigned int
2460used_part_map_hash (const void *item)
2461{
2462  return ((const struct used_part_map *)item)->uid;
2463}
2464
2465/* Free a used part map element.  */
2466
2467static void
2468free_used_part_map (void *item)
2469{
2470  free (((struct used_part_map *)item)->to);
2471  free (item);
2472}
2473
2474/* Lookup a used_part structure for a UID.  */
2475
2476static used_part_t
2477up_lookup (unsigned int uid)
2478{
2479  struct used_part_map *h, in;
2480  in.uid = uid;
2481  h = htab_find_with_hash (used_portions, &in, uid);
2482  if (!h)
2483    return NULL;
2484  return h->to;
2485}
2486
2487/* Insert the pair UID, TO into the used part hashtable.  */
2488
2489static void
2490up_insert (unsigned int uid, used_part_t to)
2491{
2492  struct used_part_map *h;
2493  void **loc;
2494
2495  h = xmalloc (sizeof (struct used_part_map));
2496  h->uid = uid;
2497  h->to = to;
2498  loc = htab_find_slot_with_hash (used_portions, h,
2499				  uid, INSERT);
2500  if (*loc != NULL)
2501    free (*loc);
2502  *(struct used_part_map **)  loc = h;
2503}
2504
2505
2506/* Given a variable uid, UID, get or create the entry in the used portions
2507   table for the variable.  */
2508
2509static used_part_t
2510get_or_create_used_part_for (size_t uid)
2511{
2512  used_part_t up;
2513  if ((up = up_lookup (uid)) == NULL)
2514    {
2515      up = xcalloc (1, sizeof (struct used_part));
2516      up->minused = INT_MAX;
2517      up->maxused = 0;
2518      up->explicit_uses = false;
2519      up->implicit_uses = false;
2520    }
2521
2522  return up;
2523}
2524
2525
2526/* Create and return a structure sub-variable for field FIELD of
2527   variable VAR.  */
2528
2529static tree
2530create_sft (tree var, tree field)
2531{
2532  var_ann_t ann;
2533  tree subvar = create_tmp_var_raw (TREE_TYPE (field), "SFT");
2534
2535  /* We need to copy the various flags from VAR to SUBVAR, so that
2536     they are is_global_var iff the original variable was.  */
2537  DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2538  DECL_EXTERNAL (subvar) = DECL_EXTERNAL (var);
2539  TREE_PUBLIC  (subvar) = TREE_PUBLIC (var);
2540  TREE_STATIC (subvar) = TREE_STATIC (var);
2541  TREE_READONLY (subvar) = TREE_READONLY (var);
2542  TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
2543
2544  /* Add the new variable to REFERENCED_VARS.  */
2545  ann = get_var_ann (subvar);
2546  ann->mem_tag_kind = STRUCT_FIELD;
2547  ann->type_mem_tag = NULL;
2548  add_referenced_tmp_var (subvar);
2549
2550  return subvar;
2551}
2552
2553
2554/* Given an aggregate VAR, create the subvariables that represent its
2555   fields.  */
2556
2557static void
2558create_overlap_variables_for (tree var)
2559{
2560  VEC(fieldoff_s,heap) *fieldstack = NULL;
2561  used_part_t up;
2562  size_t uid = DECL_UID (var);
2563
2564  if (!up_lookup (uid))
2565    return;
2566
2567  up = up_lookup (uid);
2568  push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2569  if (VEC_length (fieldoff_s, fieldstack) != 0)
2570    {
2571      subvar_t *subvars;
2572      fieldoff_s *fo;
2573      bool notokay = false;
2574      int fieldcount = 0;
2575      int i;
2576      HOST_WIDE_INT lastfooffset = -1;
2577      HOST_WIDE_INT lastfosize = -1;
2578      tree lastfotype = NULL_TREE;
2579
2580      /* Not all fields have DECL_SIZE set, and those that don't, we don't
2581	 know their size, and thus, can't handle.
2582	 The same is true of fields with DECL_SIZE that is not an integer
2583	 constant (such as variable sized fields).
2584	 Fields with offsets which are not constant will have an offset < 0
2585	 We *could* handle fields that are constant sized arrays, but
2586	 currently don't.  Doing so would require some extra changes to
2587	 tree-ssa-operands.c.  */
2588
2589      for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2590	{
2591	  if (!DECL_SIZE (fo->field)
2592	      || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
2593	      || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE
2594	      || fo->offset < 0)
2595	    {
2596	      notokay = true;
2597	      break;
2598	    }
2599          fieldcount++;
2600	}
2601
2602      /* The current heuristic we use is as follows:
2603	 If the variable has no used portions in this function, no
2604	 structure vars are created for it.
2605	 Otherwise,
2606         If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2607	 we always create structure vars for them.
2608	 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2609	 some explicit uses, we create structure vars for them.
2610	 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2611	 no explicit uses, we do not create structure vars for them.
2612      */
2613
2614      if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2615	  && !up->explicit_uses)
2616	{
2617	  if (dump_file && (dump_flags & TDF_DETAILS))
2618	    {
2619	      fprintf (dump_file, "Variable ");
2620	      print_generic_expr (dump_file, var, 0);
2621	      fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2622	    }
2623	  notokay = true;
2624	}
2625
2626      /* Bail out, if we can't create overlap variables.  */
2627      if (notokay)
2628	{
2629	  VEC_free (fieldoff_s, heap, fieldstack);
2630	  return;
2631	}
2632
2633      /* Otherwise, create the variables.  */
2634      subvars = lookup_subvars_for_var (var);
2635
2636      sort_fieldstack (fieldstack);
2637
2638      for (i = VEC_length (fieldoff_s, fieldstack);
2639	   VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2640	{
2641	  subvar_t sv;
2642	  HOST_WIDE_INT fosize;
2643	  tree currfotype;
2644
2645	  fosize = TREE_INT_CST_LOW (DECL_SIZE (fo->field));
2646	  currfotype = TREE_TYPE (fo->field);
2647
2648	  /* If this field isn't in the used portion,
2649	     or it has the exact same offset and size as the last
2650	     field, skip it.  */
2651
2652	  if (((fo->offset <= up->minused
2653		&& fo->offset + fosize <= up->minused)
2654	       || fo->offset >= up->maxused)
2655	      || (fo->offset == lastfooffset
2656		  && fosize == lastfosize
2657		  && currfotype == lastfotype))
2658	    continue;
2659	  sv = ggc_alloc (sizeof (struct subvar));
2660	  sv->offset = fo->offset;
2661	  sv->size = fosize;
2662	  sv->next = *subvars;
2663	  sv->var = create_sft (var, fo->field);
2664
2665	  if (dump_file)
2666	    {
2667	      fprintf (dump_file, "structure field tag %s created for var %s",
2668		       get_name (sv->var), get_name (var));
2669	      fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
2670		       sv->offset);
2671	      fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
2672		       sv->size);
2673	      fprintf (dump_file, "\n");
2674	    }
2675
2676	  lastfotype = currfotype;
2677	  lastfooffset = fo->offset;
2678	  lastfosize = fosize;
2679	  *subvars = sv;
2680	}
2681
2682      /* Once we have created subvars, the original is no longer call
2683	 clobbered on its own.  Its call clobbered status depends
2684	 completely on the call clobbered status of the subvars.
2685
2686	 add_referenced_var in the above loop will take care of
2687	 marking subvars of global variables as call clobbered for us
2688	 to start, since they are global as well.  */
2689      clear_call_clobbered (var);
2690    }
2691
2692  VEC_free (fieldoff_s, heap, fieldstack);
2693}
2694
2695
2696/* Find the conservative answer to the question of what portions of what
2697   structures are used by this statement.  We assume that if we have a
2698   component ref with a known size + offset, that we only need that part
2699   of the structure.  For unknown cases, or cases where we do something
2700   to the whole structure, we assume we need to create fields for the
2701   entire structure.  */
2702
2703static tree
2704find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2705{
2706  switch (TREE_CODE (*tp))
2707    {
2708    case COMPONENT_REF:
2709      {
2710	HOST_WIDE_INT bitsize;
2711	HOST_WIDE_INT bitpos;
2712	tree offset;
2713	enum machine_mode mode;
2714	int unsignedp;
2715	int volatilep;
2716	tree ref;
2717	ref = get_inner_reference (*tp, &bitsize, &bitpos, &offset, &mode,
2718				   &unsignedp, &volatilep, false);
2719	if (DECL_P (ref) && offset == NULL && bitsize != -1)
2720	  {
2721	    size_t uid = DECL_UID (ref);
2722	    used_part_t up;
2723
2724	    up = get_or_create_used_part_for (uid);
2725
2726	    if (bitpos <= up->minused)
2727	      up->minused = bitpos;
2728	    if ((bitpos + bitsize >= up->maxused))
2729	      up->maxused = bitpos + bitsize;
2730
2731	    up->explicit_uses = true;
2732	    up_insert (uid, up);
2733
2734	    *walk_subtrees = 0;
2735	    return NULL_TREE;
2736	  }
2737	else if (DECL_P (ref))
2738	  {
2739	    if (DECL_SIZE (ref)
2740		&& var_can_have_subvars (ref)
2741		&& TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST)
2742	      {
2743		used_part_t up;
2744		size_t uid = DECL_UID (ref);
2745
2746		up = get_or_create_used_part_for (uid);
2747
2748		up->minused = 0;
2749		up->maxused = TREE_INT_CST_LOW (DECL_SIZE (ref));
2750
2751		up->implicit_uses = true;
2752
2753		up_insert (uid, up);
2754
2755		*walk_subtrees = 0;
2756		return NULL_TREE;
2757	      }
2758	  }
2759      }
2760      break;
2761      /* This is here to make sure we mark the entire base variable as used
2762	 when you take its address.  Because our used portion analysis is
2763	 simple, we aren't looking at casts or pointer arithmetic to see what
2764	 happens when you take the address.  */
2765    case ADDR_EXPR:
2766      {
2767	tree var = get_base_address (TREE_OPERAND (*tp, 0));
2768
2769	if (var
2770	    && DECL_P (var)
2771	    && DECL_SIZE (var)
2772	    && var_can_have_subvars (var)
2773	    && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
2774	  {
2775	    used_part_t up;
2776	    size_t uid = DECL_UID (var);
2777
2778	    up = get_or_create_used_part_for (uid);
2779
2780	    up->minused = 0;
2781	    up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
2782	    up->implicit_uses = true;
2783
2784	    up_insert (uid, up);
2785	    *walk_subtrees = 0;
2786	    return NULL_TREE;
2787	  }
2788      }
2789      break;
2790    case VAR_DECL:
2791    case PARM_DECL:
2792    case RESULT_DECL:
2793      {
2794	tree var = *tp;
2795	if (DECL_SIZE (var)
2796	    && var_can_have_subvars (var)
2797	    && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
2798	  {
2799	    used_part_t up;
2800	    size_t uid = DECL_UID (var);
2801
2802	    up = get_or_create_used_part_for (uid);
2803
2804	    up->minused = 0;
2805	    up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
2806	    up->implicit_uses = true;
2807
2808	    up_insert (uid, up);
2809	    *walk_subtrees = 0;
2810	    return NULL_TREE;
2811	  }
2812      }
2813      break;
2814
2815    default:
2816      break;
2817
2818    }
2819  return NULL_TREE;
2820}
2821
2822/* Create structure field variables for structures used in this function.  */
2823
2824static void
2825create_structure_vars (void)
2826{
2827  basic_block bb;
2828  safe_referenced_var_iterator rvi;
2829  VEC (tree, heap) *varvec = NULL;
2830  tree var;
2831
2832  used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
2833                               free_used_part_map);
2834
2835  FOR_EACH_BB (bb)
2836    {
2837      block_stmt_iterator bsi;
2838      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2839	{
2840	  walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
2841					find_used_portions,
2842					NULL);
2843	}
2844    }
2845  FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
2846    {
2847      /* The C++ FE creates vars without DECL_SIZE set, for some reason.  */
2848      if (var
2849	  && DECL_SIZE (var)
2850	  && var_can_have_subvars (var)
2851	  && var_ann (var)->mem_tag_kind == NOT_A_TAG
2852	  && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
2853	create_overlap_variables_for (var);
2854    }
2855  htab_delete (used_portions);
2856  VEC_free (tree, heap, varvec);
2857
2858}
2859
2860static bool
2861gate_structure_vars (void)
2862{
2863  return flag_tree_salias != 0;
2864}
2865
2866struct tree_opt_pass pass_create_structure_vars =
2867{
2868  "salias",		 /* name */
2869  gate_structure_vars,	 /* gate */
2870  create_structure_vars, /* execute */
2871  NULL,			 /* sub */
2872  NULL,			 /* next */
2873  0,			 /* static_pass_number */
2874  0,			 /* tv_id */
2875  PROP_cfg,		 /* properties_required */
2876  0,			 /* properties_provided */
2877  0,			 /* properties_destroyed */
2878  0,			 /* todo_flags_start */
2879  TODO_dump_func,	 /* todo_flags_finish */
2880  0			 /* letter */
2881};
2882